repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
yingchi/fastai-notes | deeplearning1/nbs/vgg16.py | 1 | 8793 | from __future__ import division, print_function
import os, json
from glob import glob
import numpy as np
from scipy import misc, ndimage
from scipy.ndimage.interpolation import zoom
from keras import backend as K
from keras.layers.normalization import BatchNormalization
from keras.utils.data_utils import get_file
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout, Lambda
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers.pooling import GlobalAveragePooling2D
from keras.optimizers import SGD, RMSprop, Adam
from keras.preprocessing import image
# In case we are going to use the TensorFlow backend we need to explicitly set the Theano image ordering
from keras import backend as K
K.set_image_dim_ordering('th')
vgg_mean = np.array([123.68, 116.779, 103.939], dtype=np.float32).reshape((3,1,1))
def vgg_preprocess(x):
"""
Subtracts the mean RGB value, and transposes RGB to BGR.
The mean RGB was computed on the image set used to train the VGG model.
Args:
x: Image array (height x width x channels)
Returns:
Image array (height x width x transposed_channels)
"""
x = x - vgg_mean
return x[:, ::-1] # reverse axis rgb->bgr
class Vgg16():
"""
The VGG 16 Imagenet model
"""
def __init__(self):
# self.FILE_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/'
self.FILE_PATH = 'http://files.fast.ai/models/'
self.create()
self.get_classes()
def get_classes(self):
"""
Downloads the Imagenet classes index file and loads it to self.classes.
The file is downloaded only if it not already in the cache.
"""
fname = 'imagenet_class_index.json'
fpath = get_file(fname, self.FILE_PATH+fname, cache_subdir='models')
with open(fpath) as f:
class_dict = json.load(f)
self.classes = [class_dict[str(i)][1] for i in range(len(class_dict))]
def predict(self, imgs, details=False):
"""
Predict the labels of a set of images using the VGG16 model.
Args:
imgs (ndarray) : An array of N images (size: N x width x height x channels).
details : ??
Returns:
preds (np.array) : Highest confidence value of the predictions for each image.
idxs (np.ndarray): Class index of the predictions with the max confidence.
classes (list) : Class labels of the predictions with the max confidence.
"""
# predict probability of each class for each image
all_preds = self.model.predict(imgs)
# for each image get the index of the class with max probability
idxs = np.argmax(all_preds, axis=1)
# get the values of the highest probability for each image
preds = [all_preds[i, idxs[i]] for i in range(len(idxs))]
# get the label of the class with the highest probability for each image
classes = [self.classes[idx] for idx in idxs]
return np.array(preds), idxs, classes
def ConvBlock(self, layers, filters):
"""
Adds a specified number of ZeroPadding and Covolution layers
to the model, and a MaxPooling layer at the very end.
Args:
layers (int): The number of zero padded convolution layers
to be added to the model.
filters (int): The number of convolution filters to be
created for each layer.
"""
model = self.model
for i in range(layers):
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(filters, 3, 3, activation='relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
def FCBlock(self):
"""
Adds a fully connected layer of 4096 neurons to the model with a
Dropout of 0.5
Args: None
Returns: None
"""
model = self.model
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
def create(self):
"""
Creates the VGG16 network achitecture and loads the pretrained weights.
Args: None
Returns: None
"""
model = self.model = Sequential()
model.add(Lambda(vgg_preprocess, input_shape=(3,224,224), output_shape=(3,224,224)))
self.ConvBlock(2, 64)
self.ConvBlock(2, 128)
self.ConvBlock(3, 256)
self.ConvBlock(3, 512)
self.ConvBlock(3, 512)
model.add(Flatten())
self.FCBlock()
self.FCBlock()
model.add(Dense(1000, activation='softmax'))
fname = 'vgg16_weights_th_dim_ordering_th_kernels.h5'
# fname = 'vgg16.h5'
model.load_weights(get_file(fname, self.FILE_PATH+fname, cache_subdir='models'))
def get_batches(self, path, gen=image.ImageDataGenerator(), shuffle=True, batch_size=8, class_mode='categorical'):
"""
Takes the path to a directory, and generates batches of augmented/normalized data. Yields batches indefinitely, in an infinite loop.
See Keras documentation: https://keras.io/preprocessing/image/
"""
return gen.flow_from_directory(path, target_size=(224,224),
class_mode=class_mode, shuffle=shuffle, batch_size=batch_size)
def ft(self, num):
"""
Replace the last layer of the model with a Dense (fully connected) layer of num neurons.
Will also lock the weights of all layers except the new layer so that we only learn
weights for the last layer in subsequent training.
Args:
num (int) : Number of neurons in the Dense layer
Returns:
None
"""
model = self.model
model.pop()
for layer in model.layers: layer.trainable=False
model.add(Dense(num, activation='softmax'))
self.compile()
def finetune(self, batches):
"""
Modifies the original VGG16 network architecture and updates self.classes for new training data.
Args:
batches : A keras.preprocessing.image.ImageDataGenerator object.
See definition for get_batches().
"""
self.ft(batches.nb_class)
classes = list(iter(batches.class_indices)) # get a list of all the class labels
# batches.class_indices is a dict with the class name as key and an index as value
# eg. {'cats': 0, 'dogs': 1}
# sort the class labels by index according to batches.class_indices and update model.classes
for c in batches.class_indices:
classes[batches.class_indices[c]] = c
self.classes = classes
def compile(self, lr=0.001):
"""
Configures the model for training.
See Keras documentation: https://keras.io/models/model/
"""
self.model.compile(optimizer=Adam(lr=lr),
loss='categorical_crossentropy', metrics=['accuracy'])
def fit_data(self, trn, labels, val, val_labels, nb_epoch=1, batch_size=64):
"""
Trains the model for a fixed number of epochs (iterations on a dataset).
See Keras documentation: https://keras.io/models/model/
"""
self.model.fit(trn, labels, nb_epoch=nb_epoch,
validation_data=(val, val_labels), batch_size=batch_size)
def fit(self, batches, val_batches, nb_epoch=1):
"""
Fits the model on data yielded batch-by-batch by a Python generator.
See Keras documentation: https://keras.io/models/model/
"""
self.model.fit_generator(batches, samples_per_epoch=batches.nb_sample, nb_epoch=nb_epoch,
validation_data=val_batches, nb_val_samples=val_batches.nb_sample)
def test(self, path, batch_size=8):
"""
Predicts the classes using the trained model on data yielded batch-by-batch.
Args:
path (string): Path to the target directory. It should contain one subdirectory
per class.
batch_size (int): The number of images to be considered in each batch.
Returns:
test_batches, numpy array(s) of predictions for the test_batches.
"""
test_batches = self.get_batches(path, shuffle=False, batch_size=batch_size, class_mode=None)
return test_batches, self.model.predict_generator(test_batches, test_batches.nb_sample)
| apache-2.0 | -1,971,491,146,242,214,100 | 36.738197 | 144 | 0.607984 | false |
googlearchive/simian | src/tests/simian/mac/common/util_test.py | 1 | 3717 | #!/usr/bin/env python
#
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""util module tests."""
import datetime
import time
import mock
import stubout
from google.apputils import app
from google.apputils import basetest
from simian.mac.common import util
class DatetimeTest(basetest.TestCase):
def setUp(self):
self.dt = util.Datetime
def testUtcFromTimestampInt(self):
"""Tests utcfromtimestamp()."""
expected_datetime = datetime.datetime(2011, 8, 8, 15, 42, 59)
epoch = 1312818179
self.assertEqual(expected_datetime, self.dt.utcfromtimestamp(epoch))
def testUtcFromTimestampFloat(self):
"""Tests utcfromtimestamp()."""
expected_datetime = datetime.datetime(2011, 8, 8, 15, 42, 59)
epoch = 1312818179.1415989
self.assertEqual(expected_datetime, self.dt.utcfromtimestamp(epoch))
def testUtcFromTimestampString(self):
"""Tests utcfromtimestamp()."""
expected_datetime = datetime.datetime(2011, 8, 8, 15, 42, 59)
epoch = '1312818179.1415989'
self.assertEqual(expected_datetime, self.dt.utcfromtimestamp(epoch))
def testUtcFromTimestampNone(self):
"""Tests utcfromtimestamp() with None as epoch time."""
self.assertRaises(ValueError, self.dt.utcfromtimestamp, None)
def testUtcFromTimestampInvalid(self):
"""Tests utcfromtimestamp() with None as epoch time."""
self.assertRaises(ValueError, self.dt.utcfromtimestamp, 'zz')
def testUtcFromTimestampUnderOneHourInFuture(self):
"""Tests utcfromtimestamp() with epoch under one hour in the future."""
epoch = time.time() + 600.0 # add ten minutes
self.assertRaises(
util.EpochFutureValueError, self.dt.utcfromtimestamp, epoch)
def testUtcFromTimestampOverOneHourInFuture(self):
"""Tests utcfromtimestamp() with epoch over one hour in the future."""
epoch = time.time() + 4000.0 # add a bit more than 1 hour
self.assertRaises(
util.EpochExtremeFutureValueError,
self.dt.utcfromtimestamp, epoch)
class UtilModuleTest(basetest.TestCase):
def testSerializeJson(self):
"""Test Serialize()."""
with mock.patch.object(util.json, 'dumps', return_value='serial1'):
self.assertEqual('serial1', util.Serialize('object1'))
with mock.patch.object(util.json, 'dumps', side_effect=TypeError):
self.assertRaises(util.SerializeError, util.Serialize, 'object2')
def testDeserializeJson(self):
"""Test Deserialize()."""
with mock.patch.object(util.json, 'loads', return_value='object1'):
self.assertEqual('object1', util.Deserialize('serial1'))
with mock.patch.object(util.json, 'loads', side_effect=ValueError):
self.assertRaises(util.DeserializeError, util.Deserialize, 'serial2')
def testDeserializeWhenNone(self):
"""Test Deserialize()."""
self.assertRaises(util.DeserializeError, util.Deserialize, None)
def testUrlUnquote(self):
"""Test UrlUnquote()."""
self.assertEqual(util.UrlUnquote('foo'), 'foo')
self.assertEqual(util.UrlUnquote('foo%2F'), 'foo/')
self.assertEqual(util.UrlUnquote('foo<ohcrap>'), 'foo<ohcrap>')
def main(unused_argv):
basetest.main()
if __name__ == '__main__':
app.run()
| apache-2.0 | 6,284,869,909,259,407,000 | 33.416667 | 75 | 0.719666 | false |
Cadasta/django-jsonattrs | example/exampleapp/views.py | 1 | 10512 | import django.views.generic as generic
import django.views.generic.edit as edit
from django.shortcuts import redirect
from django.core.urlresolvers import reverse_lazy
from django.forms import ModelChoiceField
from django.contrib.contenttypes.models import ContentType
from django.db import IntegrityError
import django.db.transaction as transaction
from django.db.utils import OperationalError, ProgrammingError
from jsonattrs.models import Schema
from jsonattrs.forms import AttributeModelForm
from .models import Division, Department, Party, Contract
from .forms import (
SchemaForm, AttributeFormSet, PartyForm, DivisionDepartmentForm
)
try:
div_t = ContentType.objects.get(app_label='exampleapp',
model='division')
dept_t = ContentType.objects.get(app_label='exampleapp',
model='department')
# These can happen when constructing database migrations from scratch.
except OperationalError:
pass
except ProgrammingError:
pass
# ----------------------------------------------------------------------
#
# HOME PAGE
#
class IndexView(generic.TemplateView):
template_name = 'exampleapp/index.html'
# ----------------------------------------------------------------------
#
# SCHEMATA
#
class SchemaList(generic.ListView):
model = Schema
def get_context_data(self, *args, **kwargs):
def row_key(row):
key = str(row['content_type'])
key += ':'
key += row['division'] if row['division'] else ' '
key += ':'
key += row['department'] if row['department'] else ' '
return key
context = super().get_context_data(*args, **kwargs)
table_data = []
for schema in context['object_list']:
sel = schema.selectors
nsel = len(sel)
div_selector = (Division.objects.get(pk=sel[0]).name
if nsel > 0 else None)
dept_selector = (Department.objects.get(pk=sel[1]).name
if nsel > 1 else None)
table_data.append({'content_type': schema.content_type,
'division': div_selector,
'department': dept_selector,
'schema': schema})
context['table_data'] = sorted(table_data, key=row_key)
return context
class SchemaMixin:
form_class = SchemaForm
success_url = reverse_lazy('schema-list')
def get_initial(self):
obj = self.get_object()
if obj is not None:
sels = obj.selectors
return {'content_type': obj.content_type,
'division': (Division.objects.get(pk=sels[0])
if len(sels) > 0 else None),
'department': (Department.objects.get(pk=sels[1])
if len(sels) > 1 else None)}
else:
return {}
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
obj = self.get_object()
if obj is not None:
kwargs.update({'instance': obj})
return kwargs
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
if not hasattr(self, 'formset'):
if self.get_object() is not None:
self.formset = AttributeFormSet(instance=self.get_object())
else:
self.formset = AttributeFormSet()
context['formset'] = self.formset
context['empty_row'] = self.formset.empty_form
return context
def form_valid(self, form):
content_type = ContentType.objects.get(
pk=self.request.POST['content_type']
)
selectors = ()
div = None
dept = None
if self.request.POST['division']:
div = self.request.POST['division']
selectors = (div,)
if self.request.POST['department']:
dept = self.request.POST['department']
selectors = (div, dept)
try:
with transaction.atomic():
self.set_up_schema(content_type, selectors)
self.formset = AttributeFormSet(
self.request.POST, self.request.FILES,
instance=self.schema
)
if not self.formset.is_valid():
raise IntegrityError
self.formset.save()
except:
return self.form_invalid(self.get_form())
else:
return redirect(self.success_url)
class SchemaCreate(SchemaMixin, generic.FormView):
template_name = 'jsonattrs/schema_create_form.html'
def get_object(self):
return self.schema if hasattr(self, 'schema') else None
def set_up_schema(self, content_type, selectors):
self.schema = Schema.objects.create(
content_type=content_type,
selectors=selectors
)
class SchemaUpdate(SchemaMixin, generic.FormView):
template_name = 'jsonattrs/schema_update_form.html'
def get_object(self):
return (self.schema if hasattr(self, 'schema')
else Schema.objects.get(pk=self.kwargs['pk']))
def set_up_schema(self, content_type, selectors):
self.schema = Schema.objects.get(
content_type=content_type,
selectors=selectors
)
class SchemaDelete(edit.DeleteView):
model = Schema
success_url = reverse_lazy('schema-list')
# ----------------------------------------------------------------------
#
# ENTITY ATTRIBUTES
#
class EntityAttributesMixin:
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['attrs'] = context['object'].attrs.attributes.values()
return context
def form_valid(self, form):
print('EntityAttributesMixin.form_valid:', self.request.POST)
return super().form_valid(form)
def form_invalid(self, form):
print('EntityAttributesMixin.form_invalid:', self.request.POST)
return super().form_invalid(form)
# ----------------------------------------------------------------------
#
# DIVISION/DEPARTMENT HANDLING FOR OBJECT CREATION FORMS
#
class DivisionDepartmentMixin:
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
divdepts = []
for div in Division.objects.all():
for dept in div.departments.all():
divdepts.append((str(div.pk) + ':' + str(dept.pk),
div.name + '/' + dept.name))
context['divdept_form'] = DivisionDepartmentForm(choices=divdepts)
return context
class DivisionDepartmentCreateMixin:
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
divpk, deptpk = self.request.GET['divdept'].split(':')
kwargs['schema_selectors'] = (
{'name': None,
'selector': divpk},
{'name': 'department',
'value': Department.objects.get(pk=deptpk),
'selector': deptpk}
)
return kwargs
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
divpk, deptpk = self.request.GET['divdept'].split(':')
context['divname'] = Division.objects.get(pk=divpk).name
context['deptname'] = Department.objects.get(pk=deptpk).name
return context
# ----------------------------------------------------------------------
#
# DIVISIONS
#
class DivisionList(generic.ListView):
model = Division
class DivisionDetail(EntityAttributesMixin, generic.DetailView):
model = Division
class DivisionCreate(edit.CreateView):
model = Division
fields = ('name',)
success_url = reverse_lazy('division-list')
class DivisionDelete(edit.DeleteView):
model = Division
success_url = reverse_lazy('division-list')
# ----------------------------------------------------------------------
#
# DEPARTMENTS
#
class DepartmentList(generic.ListView):
model = Department
class DepartmentDetail(EntityAttributesMixin, generic.DetailView):
model = Department
class DepartmentForm(AttributeModelForm):
class Meta:
model = Department
fields = ('name', 'division')
def __init__(self, *args, **kwargs):
super(DepartmentForm, self).__init__(*args, **kwargs)
self.fields['division'] = ModelChoiceField(
queryset=Division.objects.all(), empty_label=None
)
class DepartmentCreate(edit.CreateView):
model = Department
form_class = DepartmentForm
success_url = reverse_lazy('department-list')
class DepartmentDelete(edit.DeleteView):
model = Department
success_url = reverse_lazy('department-list')
# ----------------------------------------------------------------------
#
# PARTIES
#
class PartyList(DivisionDepartmentMixin, generic.ListView):
model = Party
class PartyDetail(EntityAttributesMixin, generic.DetailView):
model = Party
class PartyCreate(DivisionDepartmentCreateMixin, edit.CreateView):
model = Party
form_class = PartyForm
class PartyUpdate(EntityAttributesMixin, edit.UpdateView):
model = Party
form_class = PartyForm
template_name_suffix = '_update_form'
class PartyDelete(edit.DeleteView):
model = Party
success_url = reverse_lazy('party-list')
# ----------------------------------------------------------------------
#
# CONTRACTS
#
class ContractList(DivisionDepartmentMixin, generic.ListView):
model = Contract
class ContractDetail(EntityAttributesMixin, generic.DetailView):
model = Contract
class ContractForm(AttributeModelForm):
class Meta:
model = Contract
fields = ('department',)
def __init__(self, *args, **kwargs):
super(ContractForm, self).__init__(*args, **kwargs)
self.fields['department'] = ModelChoiceField(
queryset=Department.objects.all(), empty_label=None
)
class ContractCreate(DivisionDepartmentCreateMixin, edit.CreateView):
model = Contract
form_class = ContractForm
class ContractUpdate(edit.UpdateView):
model = Contract
form_class = ContractForm
template_name_suffix = '_update_form'
class ContractDelete(edit.DeleteView):
model = Contract
success_url = reverse_lazy('contract-list')
| agpl-3.0 | 1,201,420,258,175,254,500 | 28.611268 | 75 | 0.582097 | false |
flohorovicic/pynoddy | VisualizationExample.py | 1 | 1198 | """
Created on Wed Apr 15 19:33:09 2020
@author: [email protected]
This script shows how to generate a 3d visualization of a pynoddy model
in a vtk popup window.
note: every time the window pops out, you need to close it to free the terminal
There is another ipynb file which shows how to visualize such models in a jupyter notebook
"""
import pynoddy
import pynoddy.output
import vtkplotter as vtkP
# Determine the path to the noddy file
#(comment the first line and uncomment the second line to see the second model
# The second model takes around a minute to generate)
modelfile = 'examples/strike_slip.his'
#modelfile = 'examples/Scenario3_MedResolution.his'
# Determine the path to the noddy executable
noddy_path = 'noddyapp/noddy_win64.exe'
# Where you would like to place all your output files
outputfolder = 'sandbox/'
# create a plot in vtkplotter
plot = vtkP.Plotter(axes=1, bg='white', interactive=1)
# call the plotting function
points = pynoddy.output.CalculatePlotStructure(modelfile, plot, noddy_path,
outputfolder=outputfolder,
LithologyOpacity=0.2, outputOption=0)
plot.show(viewup='z')
| gpl-2.0 | -2,734,801,834,425,218,600 | 33.228571 | 90 | 0.724541 | false |
achimnol/aiotools | src/aiotools/func.py | 1 | 3631 | import collections
import functools
from .compat import get_running_loop
__all__ = (
'apartial',
'lru_cache',
)
_CacheEntry = collections.namedtuple('_CacheEntry', 'value expire_at')
def apartial(coro, *args, **kwargs):
"""
Wraps a coroutine function with pre-defined arguments (including keyword
arguments). It is an asynchronous version of :func:`functools.partial`.
"""
@functools.wraps(coro)
async def wrapped(*cargs, **ckwargs):
return await coro(*args, *cargs, **kwargs, **ckwargs)
return wrapped
def lru_cache(maxsize: int = 128,
typed: bool = False,
expire_after: float = None):
"""
A simple LRU cache just like :func:`functools.lru_cache`, but it works for
coroutines. This is not as heavily optimized as :func:`functools.lru_cache`
which uses an internal C implementation, as it targets async operations
that take a long time.
It follows the same API that the standard functools provides. The wrapped
function has ``cache_clear()`` method to flush the cache manually, but
leaves ``cache_info()`` for statistics unimplemented.
Note that calling the coroutine multiple times with the same arguments
before the first call returns may incur duplicate executions.
This function is not thread-safe.
Args:
maxsize: The maximum number of cached entries.
typed: Cache keys in different types separately (e.g., ``3`` and ``3.0`` will
be different keys).
expire_after: Re-calculate the value if the configured time has passed even
when the cache is hit. When re-calculation happens the
expiration timer is also reset.
"""
if maxsize is not None and not isinstance(maxsize, int):
raise TypeError('Expected maxsize to be an integer or None')
def wrapper(coro):
sentinel = object() # unique object to distinguish None as result
cache = collections.OrderedDict()
cache_get = cache.get
cache_del = cache.__delitem__
cache_set = cache.__setitem__
cache_len = cache.__len__
cache_move = cache.move_to_end
make_key = functools._make_key
# We don't use explicit locks like the standard functools,
# because this lru_cache is intended for use in asyncio coroutines.
# The only context interleaving happens when calling the user-defined
# coroutine, so there is no need to add extra synchronization guards.
@functools.wraps(coro)
async def wrapped(*args, **kwargs):
now = get_running_loop().time()
k = make_key(args, kwargs, typed)
entry = cache_get(k, sentinel)
if entry is not sentinel:
if entry.expire_at is None:
return entry.value
if entry.expire_at >= now:
return entry.value
cache_del(k)
result = await coro(*args, **kwargs)
if maxsize is not None and cache_len() >= maxsize:
cache.popitem(last=False)
if expire_after is not None:
expire_at = now + expire_after
else:
expire_at = None
cache_set(k, _CacheEntry(result, expire_at))
cache_move(k, last=True)
return result
def cache_clear():
cache.clear()
def cache_info():
raise NotImplementedError
wrapped.cache_clear = cache_clear
wrapped.cache_info = cache_info
return wrapped
return wrapper
| mit | 2,900,340,229,955,730,000 | 32.934579 | 84 | 0.613054 | false |
MikhailMS/Final_Project | music_generation_module/shift_key.py | 1 | 8953 | # Import packages
import music21
from os import listdir
from os.path import isfile, join
# Import modules
#from utils import *
#== Main class ==#
# Main tonics for key shifting
c_tonic = dict([("G#", 4),("A-", 4),("A", 3),("A#", 2),("B-", 2),("B", 1),("C", 0),("C#", -1),("D-", -1),("D", -2),("D#", -3),
("E-", -3),("E", -4),("F", -5),("F#", 6),("G-", 6),("G", 5)])
c_s_tonic = dict([("G#", 5),("A-", 5),("A", 4),("A#", 3),("B-", 3),("B", 2),("C", 1),("C#", 0),("D-", 0),("D", -1),("D#", -2),
("E-", -2),("E", -3),("F", -4),("F#", -5),("G-", -5),("G", 6)])
d_f_tonic = dict([("G#", 5),("A-", 5),("A", 4),("A#", 3),("B-", 3),("B", 2),("C", 1),("C#", 0),("D-", 0),("D", -1),("D#", -2),
("E-", -2),("E", -3),("F", -4),("F#", -5),("G-", -5),("G", 6)])
d_tonic = dict([("G#", 6),("A-", 6),("A", 5),("A#", 4),("B-", 4),("B", 3),("C", 2),("C#", 1),("D-", 1),("D", 0),("D#", -1),
("E-", -1),("E", -2),("F", -3),("F#", -4),("G-", -4),("G", -5)])
d_s_tonic = dict([("G#", -5),("A-", -5),("A", 6),("A#", 5),("B-", 5),("B", 4),("C", 3),("C#", 2),("D-", 2),("D", 1),("D#", 0),
("E-", 0),("E", -1),("F", -2),("F#", -3),("G-", -3),("G", -4)])
e_f_tonic = dict([("G#", -5),("A-", -5),("A", 6),("A#", 5),("B-", 5),("B", 4),("C", 3),("C#", 2),("D-", 2),("D", 1),("D#", 0),
("E-", 0),("E", -1),("F", -2),("F#", -3),("G-", -3),("G", -4)])
e_tonic = dict([("G#", -4),("A-", -4),("A", -5),("A#", 6),("B-", 6),("B", 5),("C", 4),("C#", 3),("D-", 3),("D", 2),("D#", 1),
("E-", 1),("E", 0),("F", -1),("F#", -2),("G-", -2),("G", -3)])
f_tonic = dict([("G#", -3),("A-", -3),("A", -4),("A#", -5),("B-", -5),("B", 6),("C", 5),("C#", 4),("D-", 4),("D", 3),("D#", 2),
("E-", 2),("E", 1),("F", 0),("F#", -1),("G-", -1),("G", -2)])
f_s_tonic = dict([("G#", -2),("A-", -2),("A", -3),("A#", -4),("B-", -4),("B", -4),("C", 6),("C#", 5),("D-", 5),("D", 4),("D#", 3),
("E-", 3),("E", 2),("F", 1),("F#", 0),("G-", 0),("G", -1)])
g_f_tonic = dict([("G#", -2),("A-", -2),("A", -3),("A#", -4),("B-", -4),("B", -4),("C", 6),("C#", 5),("D-", 5),("D", 4),("D#", 3),
("E-", 3),("E", 2),("F", 1),("F#", 0),("G-", 0),("G", -1)])
g_tonic = dict([("G#", -1),("A-", -1),("A", -2),("A#", -3),("B-", -3),("B", -4),("C", -5),("C#", 6),("D-", 6),("D", 5),("D#", 4),
("E-", 4),("E", 3),("F", 2),("F#", 1),("G-", 1),("G", 0)])
g_s_tonic = dict([("G#", 0),("A-", 0),("A", -1),("A#", -2),("B-", -2),("B", -3),("C", -4),("C#", -5),("D-", -5),("D", 6),("D#", 5),
("E-", 5),("E", 4),("F", 3),("F#", 2),("G-", 2),("G", 1)])
a_f_tonic = dict([("G#", 0),("A-", 0),("A", -1),("A#", -2),("B-", -2),("B", -3),("C", -4),("C#", -5),("D-", -5),("D", 6),("D#", 5),
("E-", 5),("E", 4),("F", 3),("F#", 2),("G-", 2),("G", 1)])
a_tonic = dict([("G#", 1),("A-", 1),("A", 0),("A#", -1),("B-", -1),("B", -2),("C", -3),("C#", -4),("D-", -4),("D", -5),("D#", 6),
("E-", 6),("E", 5),("F", 4),("F#", 3),("G-", 3),("G", 2)]) # same as A minor scale
a_s_tonic = dict([("G#", 2),("A-", 2),("A", 1),("A#", 0),("B-", 0),("B", -1),("C", -2),("C#", -3),("D-", -3),("D", -4),("D#", -5),
("E-", -5),("E", 6),("F", 5),("F#", 4),("G-", 4),("G", 3)])
b_f_tonic = dict([("G#", 2),("A-", 2),("A", 1),("A#", 0),("B-", 0),("B", -1),("C", -2),("C#", -3),("D-", -3),("D", -4),("D#", -5),
("E-", -5),("E", 6),("F", 5),("F#", 4),("G-", 4),("G", 3)])
b_tonic = dict([("G#", 3),("A-", 3),("A", 2),("A#", 1),("B-", 1),("B", 0),("C", -1),("C#", -2),("D-", -2),("D", -3),("D#", -4),
("E-", -4),("E", -5),("F", 6),("F#", 5),("G-", 5),("G", 4)])
def shift_key():
"""Shifts music key by parallel key rules"""
# Get midi files
music_files = [f for f in listdir("./output") if (isfile(join("./output", f)) and (".mid" in f))]
for fname in music_files:
name = fname[:-4]
print "Loaded {}".format(name)
score = music21.converter.parse('./output/{}'.format(fname))
key = score.analyze('key')
print 'Before change: {}, {}'.format(key.tonic.name, key.mode)
# Check if should be done major -> minor shift
if '{} {}'.format(key.tonic.name,key.mode) == "C major":
halfSteps = a_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "C# major":
halfSteps = a_s_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "D- major":
halfSteps = b_f_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "D major":
halfSteps = b_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "E- major":
halfSteps = c_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "E major":
halfSteps = c_s_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "F major":
halfSteps = d_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "F# major":
halfSteps = d_s_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "G major":
halfSteps = e_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "A- major":
halfSteps = f_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "A major":
halfSteps = f_s_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "B- major":
halfSteps = g_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "B major":
halfSteps = g_s_tonic[key.tonic.name]
print 'Minors: {}'.format(halfSteps)
# Check if should be done minor -> major shift
elif '{} {}'.format(key.tonic.name,key.mode) == "C minor":
halfSteps = e_f_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "C# minor":
halfSteps = e_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "D minor":
halfSteps = f_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "D# minor":
halfSteps = f_s_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "E minor":
halfSteps = g_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "F minor":
halfSteps = a_f_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "F# minor":
halfSteps = a_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "G minor":
halfSteps = b_f_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "G# minor":
halfSteps = b_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "A minor":
halfSteps = c_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "A# minor":
halfSteps = c_s_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "B- minor":
halfSteps = d_f_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
elif '{} {}'.format(key.tonic.name,key.mode) == "B minor":
halfSteps = d_tonic[key.tonic.name]
print 'Majors: {}'.format(halfSteps)
newscore = score.transpose(halfSteps)
key = newscore.analyze('key')
print 'After change: {}, {}'.format(key.tonic.name, key.mode)
if name == 'composition':
newFileName = "Clean_{}.mid".format(name)
print '{} is saved!'.format(newFileName)
newscore.write('midi',newFileName)
shift_key()
| bsd-2-clause | 4,652,851,743,536,483,000 | 48.192308 | 131 | 0.418296 | false |
ernw/dizzy | dizzy/log.py | 1 | 3465 | # log.py
#
# Copyright 2017 Daniel Mende <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from datetime import datetime
from traceback import print_tb
from threading import Lock
from sys import exc_info
from pprint import pprint
DEBUG = 5
VERBOSE_2 = 4
VERBOSE_1 = 3
NORMAL = 2
REDUCE = 1
NONE = 0
print_level = NORMAL
print_levels = {"DEBUG": DEBUG,
"VERBOSE_2": VERBOSE_2,
"VERBOSE_1": VERBOSE_1,
"NORMAL": NORMAL,
"REDUCE": REDUCE,
"NONE": NONE}
print_colors = { DEBUG: '\033[91m',
VERBOSE_2: '\033[94m',
VERBOSE_1: '\033[96m',
NORMAL: '\033[97m',
REDUCE: '\033[90m',
NONE: ''}
ENDC = '\033[0m'
print_lock = Lock()
def set_print_level(level):
global print_level
print_level = level
def print_dizzy(value, level=NORMAL):
if print_level >= level:
with print_lock:
print(print_colors[level], end='')
print(value)
if isinstance(value, Exception):
ex_type, ex, tb = exc_info()
print_tb(tb)
print(ENDC, end='')
def pprint_dizzy(value, level=NORMAL):
if print_level >= level:
with print_lock:
print(print_colors[level], end='')
pprint(value, width=1000, compact=True)
print(ENDC, end='')
class Logger(object):
def __init__(self, stream, logfile, buffered=False):
self.stream = stream
self.logfile = open(logfile, "a")
self.buffered = buffered
def write(self, data):
self.stream.write(data)
self.logfile.write(datetime.now().isoformat() + ": " + data + "\n")
if not self.buffered:
self.flush()
def flush(self):
self.stream.flush()
self.logfile.flush()
| bsd-3-clause | 5,154,322,660,538,338,000 | 34 | 80 | 0.620202 | false |
0x1306e6d/Baekjoon | snippets/sort/heap.py | 1 | 1136 | from random import shuffle
n = int(input())
arr = list(range(n))
shuffle(arr)
print(arr)
heap = [0] * n
heap_size = 0
def heap_insert(i):
global heap
global heap_size
index = heap_size
next_index = (index - 1) // 2
heap[index] = i
heap_size += 1
while (next_index >= 0) and (i > heap[next_index]):
heap[index], heap[next_index] = heap[next_index], heap[index]
index = next_index
next_index = ((index - 1) // 2)
def heap_remove():
global heap
global heap_size
root = heap[0]
heap[0] = heap[heap_size - 1]
heap_size -= 1
index = 0
next_index = (index * 2) + 1
while (next_index < heap_size):
if (next_index < (heap_size - 1)) and (heap[next_index] < heap[next_index + 1]):
next_index += 1
if heap[index] < heap[next_index]:
heap[index], heap[next_index] = heap[next_index], heap[index]
index = next_index
next_index = (index * 2) + 1
else:
break
return root
for i in arr:
heap_insert(i)
for i in range(n):
arr[i] = heap_remove()
print(arr)
| gpl-2.0 | -4,637,286,306,008,387,000 | 18.254237 | 88 | 0.544014 | false |
davidswelt/dmvccm | lq-nlp-commons/wsj.py | 1 | 5072 | # Copyright (C) 2007-2011 Franco M. Luque
# URL: <http://www.cs.famaf.unc.edu.ar/~francolq/>
# For license information, see LICENSE.txt
import codecs
import itertools
import os
from nltk.corpus.reader.util import read_sexpr_block
from nltk.corpus.reader import bracket_parse
from nltk import tree
from nltk import Tree
from nltk.util import LazyMap
import treebank
word_tags = ['CC', 'CD', 'DT', 'EX', 'FW', 'IN', 'JJ', 'JJR', 'JJS', 'LS', 'MD', 'NN', 'NNS', 'NNP', 'NNPS', 'PDT',
'POS', 'PRP', 'PRP$', 'RB', 'RBR', 'RBS', 'RP', 'SYM', 'TO', 'UH', 'VB', 'VBD', 'VBG', 'VBN', 'VBP', 'VBZ', 'WDT', 'WP', 'WP$', 'WRB']
currency_tags_words = ['#', '$', 'C$', 'A$']
ellipsis = ['*', '*?*', '0', '*T*', '*ICH*', '*U*', '*RNR*', '*EXP*', '*PPA*', '*NOT*']
punctuation_tags = ['.', ',', ':', '-LRB-', '-RRB-', '\'\'', '``']
punctuation_words = ['.', ',', ':', '-LRB-', '-RRB-', '\'\'', '``', '--', ';', '-', '?', '!', '...', '-LCB-', '-RCB-']
# tag de -- - ; ... es :
# tag de ? ! es .
# ' no es puntuacion sino POS (pronombre posesivo?)
# '-LCB-', '-RCB-' son los corchetes
# el puto arbol ['07/wsj_0758.mrg', 74] (antepenultimo) usa comillas simples
# funciona tanto si las hojas son lexico como POS tags.
def is_ellipsis(s):
return s == '-NONE-' or s.partition('-')[0] in ellipsis
# funciona tanto si las hojas son lexico como POS tags.
def is_punctuation(s):
# solo comparo con punctuation_words porque incluye a punctuation_tags.
return s in punctuation_words
class WSJTree(treebank.Tree):
def is_ellipsis(self, s):
return is_ellipsis(s)
def is_punctuation(self, s):
return is_punctuation(s)
# TODO: Rename this class to WSJ.
class WSJSents(bracket_parse.BracketParseCorpusReader):
def __init__(self):
bracket_parse.BracketParseCorpusReader.__init__(self, 'wsj_comb', '.*')
def tagged_sents(self):
# Remove punctuation, ellipsis and currency ($, #) at the same time:
f = lambda s: filter(lambda x: x[1] in word_tags, s)
return LazyMap(f, bracket_parse.BracketParseCorpusReader.tagged_sents(self))
# TODO: remove this class and rename WSJSents to WSJ.
class WSJ(treebank.SavedTreebank):
default_basedir = 'wsj_comb'
trees = []
filename = 'wsj.treebank'
def __init__(self, basedir=None):
if basedir == None:
self.basedir = self.default_basedir
else:
self.basedir = basedir
#self.reader = BracketParseCorpusReader(self.basedir, self.get_files())
def get_files(self):
l = os.listdir(self.basedir)
files = []
for d in l:
files = files + map(lambda s: d+'/'+s, os.listdir(self.basedir+'/'+d))
return files
"""def parsed(self, files=None):
if files is None:
files = self.get_files()
for (i, t) in itertools.izip(itertools.count(), treebank.SavedTreebank.parsed(self, files)):
yield WSJTree(t, labels=i)"""
def parsed(self, files=None):
"""
@param files: One or more WSJ treebank files to be processed
@type files: L{string} or L{tuple(string)}
@rtype: iterator over L{tree}
"""
if files is None or files == []:
files = self.get_files()
# Just one file to process? If so convert to a tuple so we can iterate
if isinstance(files, str):
files = (files,)
size = 0
for file in files:
path = os.path.join(self.basedir, file)
f = codecs.open(path, encoding='utf-8')
i = 0
t = read_parsed_tb_block(f)
#print "Parsing", len(t), "trees from file", file
print "Parsing file", file
while t != []:
size += 1
#yield treebank.Tree(t[0], [file, i])
yield WSJTree(t[0], [file, i])
i = i+1
t = t[1:]
if t == []:
t = read_parsed_tb_block(f)
print "Finished processing", size, "trees"
def get_tree(self, offset=0):
t = self.get_trees2(offset, offset+1)[0]
return t
# Devuelve los arboles que se encuentran en la posicion i con start <= i < end
def get_trees2(self, start=0, end=None):
lt = [t for t in itertools.islice(self.parsed(), start, end)]
return lt
def is_ellipsis(self, s):
return is_ellipsis(s)
def is_punctuation(self, s):
return is_punctuation(s)
def test():
tb = WSJ()
trees = tb.get_trees()
return tb
# ROBADO DE nltk 0.8, nltk/corpus/treebank.py, despues eliminado de nltk.
def treebank_bracket_parse(t):
try:
return Tree.fromstring(t, remove_empty_top_bracketing=True)
except IndexError:
# in case it's the real treebank format,
# strip first and last brackets before parsing
return tree.bracket_parse(t.strip()[1:-1])
def read_parsed_tb_block(stream):
return [treebank_bracket_parse(t) for t in read_sexpr_block(stream)]
| gpl-3.0 | 7,973,924,912,216,270,000 | 32.368421 | 134 | 0.568612 | false |
canvasnetworks/canvas | website/canvas/details_models.py | 1 | 11765 | from django.conf import settings
from apps.client_details.models import ClientDetailsBase
from apps.tags.models import Tag
from canvas import stickers
from canvas.cache_patterns import CachedCall
from canvas.redis_models import RealtimeChannel
from canvas.util import base36encode, strip_template_chars
class ContentDetails(ClientDetailsBase):
UGC_IMAGES = [
('tiny_square', True),
('small_square', True),
('square', True),
('medium_square', True),
('thumbnail', True),
('stream', True),
('small_column', True),
('column', True),
('explore_column', True),
('giant', True),
('mobile', True),
('footer', True),
'ugc_original',
]
TO_CLIENT_WHITELIST = [
'id',
'timestamp',
'remix_text',
('original', True),
('remix_of_first_caption_url', True),
('remix_of_giant_url', True),
('remix_of_thumbnail_url', True),
] + UGC_IMAGES
# TODO: Do this proper, this is a fix for suggest widget
def to_dict(self):
return self._d
def __init__(self, details):
ClientDetailsBase.__init__(self, details)
self.ugc_original = details.get('original')
for size in ['giant', 'thumbnail']:
attr = 'remix_of_' + size + '_url'
url = None
if getattr(self, attr, None):
url = self.ugc_url(getattr(self, attr))
setattr(self, attr, url)
for name in self.UGC_IMAGES:
if isinstance(name, tuple):
name, v = name
if hasattr(self, name):
val = getattr(self, name)
setattr(self, name, self.ugc_content(val))
def ugc_url(self, content):
if content.startswith("https://"):
raise Exception("Creating a ContentDetails from to_client dict.")
protocol = 'https' if settings.UGC_HTTPS_ENABLED else 'http'
return "{}://{}/ugc/{}".format(protocol, settings.UGC_HOST, content)
def get_absolute_url_for_image_type(self, image_type):
try:
url = self[image_type].get('name', self[image_type].get('url'))
except (KeyError, IndexError,):
return ''
if image_type == 'original':
return self.ugc_url(url)
return url
def ugc_content(self, content):
if content:
url = self.ugc_url(content['name'])
return dict(content, url=url, name=url)
return {}
@property
def all_images(self):
images = {}
for name in self.UGC_IMAGES:
if isinstance(name, tuple):
name, _ = name
try:
images[name] = getattr(self, name)
except AttributeError:
pass
return images
def is_animated(self):
try:
return bool(self._d['original']['animated'])
except KeyError:
return False
def get_absolute_url_for_expanded(self):
if self.is_animated():
return self.get_absolute_url_for_image_type('original')
return self.get_absolute_url_for_image_type('giant')
#def __getattr__(self, name):
# try:
# return super(ContentDetails, self).__getattr__(name)
# except AttributeError as e:
# if name in self.UGC_IMAGES or name == 'original':
# return None
# raise e
def __getitem__(self, item):
try:
return getattr(self, item)
except AttributeError:
raise IndexError
class CommentDetailsStickersMixin(object):
def sorted_sticker_counts(self):
counts = dict([(stickers.details_for(type_id), count) for type_id, count in self._d['sticker_counts'].items()])
sorted_counts = stickers.sorted_counts(counts)
count_json = [
{
'type_id': sticker.type_id,
'count': count,
'name': sticker.name,
}
for sticker, count
in sorted_counts
]
return count_json
def top_sticker(self):
try:
return self.sorted_sticker_counts()[0]
except IndexError:
return None
class CommentDetailsRealtimeMixin(object):
updates_channel = property(lambda self: RealtimeChannel('cu:%s' % self.id, 5, ttl=24*60*60))
class CommentDetails(ClientDetailsBase, CommentDetailsStickersMixin, CommentDetailsRealtimeMixin):
TO_CLIENT_WHITELIST = [
'top_sticker',
'sorted_sticker_counts',
'author_name',
'category',
'category_pretty_name',
'category_url',
'flag_counts',
'id',
'external_content',
'is_collapsed',
('is_remix', True, '_is_real_remix'),
'judged',
'last_reply_id',
'last_reply_time',
'ot_hidden',
'parent_id',
'parent_url',
'replied_comment',
'reply_content',
'reply_content_id',
'reply_count',
('reply_text', False, 'ugc_reply_text'),
'repost_count',
'share_page_url',
'short_id',
'staff',
'sticker_counts',
'tags',
'thread_op_comment_id',
'timestamp',
('title', False, 'ugc_title'),
'url',
'visibility',
]
def __init__(self, details):
super(CommentDetails, self).__init__(details)
self.pins = None
self._thread = None
@property
def external_content(self):
return self._d.get('external_content', [])
#TODO temporary until we nail the new Details API
# Just needs a separate entry point from to_client,
# though this will be moved into CachedDetails internals.
def to_dict(self):
return self._d
@classmethod
def from_id(cls, comment_id):
""" Does not include user pins. """
from canvas.models import Comment
return Comment.details_by_id(comment_id)()
@classmethod
def from_ids(cls, comment_ids):
""" Returns a list of CommentDetails instances. Does not include user pins. """
from canvas.models import Comment
details = [Comment.details_by_id(comment_id) for comment_id in comment_ids]
return CachedCall.many_multicall(details)[0]
@property
def tags(self):
return [Tag(tag) for tag in self._d['tags']]
@property
def linked_url(self):
if self.url:
hash_ = '' if self.is_op() else '#current'
return self.url + hash_
def has_replies(self):
return self._d.get('last_reply_id') is not None
def has_small_image(self):
return self._d['reply_content'].get('small_square') is not None
def has_content(self):
return bool(self._d.get('reply_content'))
def author_is_canvas(self):
""" Whether this is by the special Canvas user account. Returns `False` if anonymous. """
return self.author_name.lower() == 'canvas'
def is_author(self, user):
return self.author_id == user.id
def is_op(self):
return not self.parent_id
def ugc_reply_text(self):
return strip_template_chars(self.reply_text)
def ugc_title(self):
return strip_template_chars(self.title)
def is_remix(self):
content = self._d.get('reply_content')
if content:
return bool(content.get('remix_of_giant_url'))
return False
def _is_real_remix(self):
return self._d.get('is_remix')
def is_animated(self):
try:
return bool(self._d['reply_content']['original']['animated'])
except KeyError:
return False
def is_visible(self):
from canvas.models import Visibility
return Visibility.is_visible(self.visibility)
@property
def is_anonymous(self):
return self.author_name.lower() == 'anonymous'
def is_monster_top(self, mobile=False):
from apps.monster.models import MONSTER_GROUP, MONSTER_MOBILE_GROUP
group = {True: MONSTER_MOBILE_GROUP, False: MONSTER_GROUP}[mobile]
return bool(self.category and self.category == group and not self.parent_id)
def is_monster_bottom(self, mobile=False):
from apps.monster.models import MONSTER_GROUP, MONSTER_MOBILE_GROUP
group = {True: MONSTER_MOBILE_GROUP, False: MONSTER_GROUP}
return bool(self.category and self.category == group and self.parent_id)
def get_last_reply(self):
if self._d.get('last_reply_id') is not None:
return CommentDetails.from_id(self._d['last_reply_id'])
@property
def thread(self):
""" The thread that owns this comment, whether this is an OP or a reply. """
from apps.threads.details_models import ThreadDetails
if self._thread is None:
self._thread = ThreadDetails(self)
return self._thread
@property
def reply_content(self):
if self._d.get('reply_content'):
if settings.PROJECT == 'canvas':
return ContentDetails(self._d['reply_content'])
elif settings.PROJECT == 'drawquest':
from drawquest.details_models import ContentDetails as DrawquestContentDetails
return DrawquestContentDetails(self._d['reply_content'])
return {}
@property
def author_profile_url(self):
if not self.is_anonymous:
return '/user/' + self.author_name
def get_footer_absolute_url(self):
from realtime.footer import CommentFooter
protocol = 'https' if settings.HTTPS_ENABLED else 'http'
return protocol + '://' + settings.UGC_HOST + '/ugc/' + CommentFooter.get_path_from_comment_details(self)
def get_feed_thumbnail_absolute_url(self):
return self.reply_content.get_absolute_url_for_image_type('column')
def get_thumbnail_absolute_url(self):
if self.reply_content:
return self.reply_content.get_absolute_url_for_image_type('small_square')
return '/static/img/text-post.png'
@property
def parent_comment(self):
return self._d.get('parent_comment')
@parent_comment.setter
def parent_comment(self, val):
self._d['parent_comment'] = val
def short_id(self):
return base36encode(self._d.get('id'))
class RealtimeCommentDetails(ClientDetailsBase):
TO_CLIENT_WHITELIST = [
'sticker_counts',
'sorted_sticker_counts',
'top_sticker',
'reply_count',
'id',
]
def __init__(self, comment_details):
for key in self.TO_CLIENT_WHITELIST:
setattr(self, key, getattr(comment_details, key))
class FlaggedCommentDetails(CommentDetails):
TO_CLIENT_WHITELIST = [
'top_sticker',
'sorted_sticker_counts',
'author_name',
'category',
'category_url',
'first_flag',
'flag_counts',
'flag_count',
'id',
'external_content',
'is_collapsed',
'is_disabled',
'is_downvoted',
'is_inappropriate',
'is_offtopic',
'is_removed',
'judged',
'last_reply_id',
'last_reply_time',
'ot_hidden',
'parent_id',
'parent_url',
'real_username',
'replied_comment',
'reply_content',
'reply_content_id',
'reply_count',
'reply_text',
'repost_count',
'share_page_url',
'short_id',
'staff',
'sticker_counts',
'thread_op_comment_id',
'timestamp',
'title' ,
'url',
'user_ban_count',
'user_warning_count',
'visibility',
'flag_count',
]
| bsd-3-clause | -5,271,667,882,162,043,000 | 29.012755 | 119 | 0.573311 | false |
egbertbouman/tribler-g | Tribler/Test/test_buddycast_msg8plus.py | 1 | 17070 | # Written by Nicolas Neubauer, Arno Bakker
# see LICENSE.txt for license information
#
# Test case for BuddyCast overlay version 12 (and 8). To be integrated into
# test_buddycast_msg.py
#
# Very sensitive to the order in which things are put into DB,
# so not a robust test
import unittest
import os
import sys
import time
import tempfile
import shutil
from sha import sha
from random import randint,shuffle
from traceback import print_exc
from types import StringType, ListType, DictType
from threading import Thread
from time import sleep
from M2Crypto import Rand,EC
from Tribler.Test.test_as_server import TestAsServer
from olconn import OLConnection
from Tribler.__init__ import LIBRARYNAME
from Tribler.Core.BitTornado.bencode import bencode,bdecode
from Tribler.Core.BitTornado.BT1.MessageID import *
from Tribler.Core.CacheDB.CacheDBHandler import BarterCastDBHandler
from Tribler.Core.BuddyCast.buddycast import BuddyCastFactory, BuddyCastCore
from Tribler.Core.Overlay.SecureOverlay import OLPROTO_VER_FIRST, OLPROTO_VER_SECOND, OLPROTO_VER_THIRD, OLPROTO_VER_FOURTH, OLPROTO_VER_FIFTH, OLPROTO_VER_SIXTH, OLPROTO_VER_SEVENTH, OLPROTO_VER_EIGHTH, OLPROTO_VER_ELEVENTH, OLPROTO_VER_CURRENT, OLPROTO_VER_LOWEST
from Tribler.Core.simpledefs import *
from Tribler.Core.CacheDB.SqliteCacheDBHandler import *
from Tribler.Core.CacheDB.sqlitecachedb import CURRENT_MAIN_DB_VERSION
DEBUG=True
class TestBuddyCastMsg8Plus(TestAsServer):
"""
Testing BuddyCast 5 / overlay protocol v12+v8 interactions:
swarm size info exchange.
"""
def setUp(self):
""" override TestAsServer """
TestAsServer.setUp(self)
Rand.load_file('randpool.dat', -1)
def setUpPreSession(self):
""" override TestAsServer """
TestAsServer.setUpPreSession(self)
# Enable buddycast
self.config.set_buddycast(True)
BuddyCastCore.TESTASSERVER = True
self.config.set_start_recommender(True)
self.config.set_bartercast(True)
# Arno, 2010-02-02: Install empty superpeers.txt so no interference from
# real BuddyCast.
self.config.set_crawler(False)
# Write superpeers.txt
self.install_path = tempfile.mkdtemp()
spdir = os.path.join(self.install_path, LIBRARYNAME, 'Core')
os.makedirs(spdir)
statsdir = os.path.join(self.install_path, LIBRARYNAME, 'Core', 'Statistics')
os.makedirs(statsdir)
superpeerfilename = os.path.join(spdir, 'superpeer.txt')
print >> sys.stderr,"test: writing empty superpeers to",superpeerfilename
f = open(superpeerfilename, "w")
f.write('# Leeg')
f.close()
self.config.set_install_dir(self.install_path)
srcfiles = []
srcfiles.append(os.path.join(LIBRARYNAME,"schema_sdb_v"+str(CURRENT_MAIN_DB_VERSION)+".sql"))
for srcfile in srcfiles:
sfn = os.path.join('..','..',srcfile)
dfn = os.path.join(self.install_path,srcfile)
print >>sys.stderr,"test: copying",sfn,dfn
shutil.copyfile(sfn,dfn)
def setUpPostSession(self):
""" override TestAsServer """
TestAsServer.setUpPostSession(self)
self.mypermid = str(self.my_keypair.pub().get_der())
self.hispermid = str(self.his_keypair.pub().get_der())
self.myhash = sha(self.mypermid).digest()
self.buddycast = BuddyCastFactory.getInstance(superpeer=True)
self.buddycast.olthread_register(True)
# arg0 = sys.argv[0].lower()
# if arg0.endswith('.exe'):
# installdir = os.path.abspath(os.path.dirname(sys.argv[0]))
# else:
# installdir = os.getcwd()
# self.utility = Utility(installdir)
# wait for buddycast to have completed on run cycle,
# seems to create problems otherwise
while not self.buddycast.ranonce:
pass
def tearDown(self):
""" override TestAsServer """
TestAsServer.tearDown(self)
try:
os.remove('randpool.dat')
except:
pass
def singtest_all_olproto_ver_current(self):
self._test_all(OLPROTO_VER_CURRENT)
def singtest_all_olproto_ver_11(self):
self._test_all(11)
def singtest_all_olproto_ver_8(self):
self._test_all(8)
def _test_all(self,myoversion):
"""
I want to start a Tribler client once and then connect to
it many times. So there must be only one test method
to prevent setUp() from creating a new client every time.
The code is constructed so unittest will show the name of the
(sub)test where the error occured in the traceback it prints.
"""
# Arno, 2010-02-03: clicklog 1,2,3 must be run consecutively
# create_mypref() must be called after clicklog 1,2,3
self.subtest_good_buddycast_clicklog(1,myoversion)
self.subtest_good_buddycast_clicklog(2,myoversion)
self.subtest_good_buddycast_clicklog(3,myoversion)
self.subtest_terms(myoversion)
self.subtest_create_mypref()
self.subtest_create_bc(myoversion)
def get_good_clicklog_msg(self,n,myoversion=8):
if n==1:
# OLv8:
# infohash
# search terms
# click position
# reranking strategy
# OLv11:
# number of seeders
# number of leechers
# age of checking
# number of sources seen'
prec = ["hash1hash1hash1hash1", ["linux","ubuntu"], 1, 2]
if myoversion >= 11:
prec += [400, 500, 1000, 50]
preferences = [prec]
if myoversion >= 11:
prec = ['hash0hash0hash0hash0', 300, 800, 5000, 30]
collected_torrents = [prec]
else:
collected_torrents = ['hash0hash0hash0hash0']
elif n==2:
prec = ["hash2hash2hash2hash2", ["linux", "ubuntu"], 2, 2]
if myoversion >= 11:
prec += [600, 700,20000,60]
preferences = [prec]
if myoversion >= 11:
prec = ['hash2hash2hash2hash2', 500, 200, 70000, 8000]
collected_torrents = [prec]
else:
collected_torrents = ["hash2hash2hash2hash2"]
elif n==3:
prec = ["hash3hash3hash3hash3", ["linux","redhat"], 5 ,2 ]
if myoversion >= 11:
prec += [800, 900, 30000, 70]
preferences = [prec]
if myoversion >= 11:
prec = ['hash3hash3hash3hash3', 700, 200, 45000, 75]
collected_torrents = [prec]
else:
collected_torrents = ['hash3hash3hash3hash3']
return {
'preferences': preferences,
'ndls': 1,
'permid': self.mypermid,
'ip': '127.0.0.1', #'130.149.146.117',
'taste buddies': [],
'name': 'nic',
'random peers': [],
'collected torrents': collected_torrents,
'nfiles': 0,
'npeers': 0,
'port': self.hisport,
'connectable': 1}
def subtest_good_buddycast_clicklog(self, i, myoversion):
"""sends two buddy cast messages containing clicklog data,
then checks in the DB to find out whether the correct
data was stored.
This in fact checks quite a lot of things.
For example, the messages always contain terms [1,2]
"""
print >>sys.stderr,"\ntest: subtest_good_buddycast_clicklog",i,"selversion",myoversion
s = OLConnection(self.my_keypair,'localhost',self.hisport,myoversion=myoversion)
prefmsg = self.get_good_clicklog_msg(i,myoversion)
print >>sys.stderr,myoversion,`prefmsg`
msg = self.create_payload(prefmsg)
s.send(msg)
resp = s.recv()
if len(resp)>0:
print >>sys.stderr,"test: reply message %s:%s" % (getMessageName(resp[0]), resp[1:])
else:
print >>sys.stderr,"no reply message"
self.assert_(len(resp) > 0)
#if we have survived this, check if the content of the remote database is correct
search_db = self.session.open_dbhandler(NTFY_SEARCH)
term_db = self.session.open_dbhandler(NTFY_TERM)
pref_db = self.session.open_dbhandler(NTFY_PREFERENCES)
torrent_db = self.session.open_dbhandler(NTFY_TORRENTS)
torrent_id = None
while not torrent_id:
hash = prefmsg['preferences'][0][0]
print >> sys.stderr, "hash: %s, bin2str: %s" % (hash, bin2str(hash))
torrent_data = torrent_db.getTorrentID(hash)
print >> sys.stderr, "Torrent data for torrent %s: %s" % (prefmsg['preferences'][0][0], torrent_data)
torrent_id = torrent_data
if not torrent_id:
print >> sys.stderr, "torrent not yet saved, waiting..."
sleep(1)
# self.getAll("rowid, peer_id, torrent_id, click_position,reranking_strategy", order_by="peer_id, torrent_id")
real_prefs = pref_db.getAllEntries()
print >>sys.stderr,"test: getAllEntries returned",real_prefs
my_peer_id = real_prefs[0][1]
real_terms = term_db.getAllEntries()
real_search = search_db.getAllEntries()
if i==1:
wanted_prefs = [[1,my_peer_id,1,1,2]]
wanted_terms = [[1,u'linux'], [2,u'ubuntu']]
wanted_search = [[1,my_peer_id,'?',1,0],
[2,my_peer_id,'?',2,1]]
elif i==2:
# Arno, 2010-02-04: Nicolas assumed the collected torrent for i=1
# wouldn't be stored in DB?
wanted_prefs = [[1,my_peer_id,'?',1,2],[2,my_peer_id,torrent_id,2,2]]
wanted_terms = [[1,u'linux'], [2,u'ubuntu']]
wanted_search = [[1,my_peer_id,'?',1,0],
[2,my_peer_id,'?',2,1],
[3,my_peer_id,'?',1,0],
[4,my_peer_id,'?',2,1]]
elif i==3:
wanted_prefs = [[1,my_peer_id,'?',1,2],[2,my_peer_id,'?',2,2],[3,my_peer_id,torrent_id,5,2]]
wanted_terms = [[1,u'linux'], [2,u'ubuntu'], [3, u'redhat']]
wanted_search = [[1,my_peer_id,'?',1,0],
[2,my_peer_id,'?',2,1],
[3,my_peer_id,'?',1,0],
[4,my_peer_id,'?',2,1],
[5,my_peer_id,'?',1,0],
[6,my_peer_id,'?',3,1]]
print >> sys.stderr, "real_prefs: %s" % real_prefs
print >> sys.stderr, "real_terms: %s" % real_terms
print >> sys.stderr, "real_search: %s " % real_search
print >> sys.stderr, "wanted_prefs: %s" % wanted_prefs
print >> sys.stderr, "wanted_terms: %s" % wanted_terms
print >> sys.stderr, "wanted_search: %s " % wanted_search
self.assert_(self.lol_equals(real_search, wanted_search, "good buddycast %d: search" % i))
self.assert_(self.lol_equals(real_terms, wanted_terms, "good buddycast %d: terms" % i))
self.assert_(self.lol_equals(real_prefs, wanted_prefs, "good buddycast %d: prefs" % i))
def subtest_terms(self,myoversion):
"""assumes clicklog message 1 and 2 have been sent and digested"""
print >>sys.stderr,"\ntest: subtest_terms"
term_db = self.session.open_dbhandler(NTFY_TERM)
s = OLConnection(self.my_keypair,'localhost',self.hisport,myoversion=myoversion)
msg = self.get_good_clicklog_msg(3,myoversion)
msg = self.create_payload(msg)
s.send(msg)
resp = s.recv()
self.assert_(len(resp) > 0)
termid = term_db.getTermID(u"linux")
print >>sys.stderr, "TermID for Linux: %s" % termid
#self.assert_(termid == 1)
#self.assert_(term_db.getTerm(1)==bin2str(str(u"linux")))
completedTerms = term_db.getTermsStartingWith("li")
print >> sys.stderr, "terms starting with l: %s" % completedTerms
self.assert_(len(completedTerms)==1)
self.assert_(u'linux' in completedTerms)
term_db.insertTerm("asd#")
completedTerms = term_db.getTermsStartingWith("asd")
print >> sys.stderr, "terms starting with asd: %s" % completedTerms
self.assert_(len(completedTerms)==1)
# Arno, 2010-02-03: Nicolas had 'asd' here, but I don't see any place
# where the # should have been stripped.
#
self.assert_(u'asd#' in completedTerms)
def subtest_create_mypref(self):
print >>sys.stderr,"\ntest: creating test MyPreference data"
torrent_db = self.session.open_dbhandler(NTFY_TORRENTS)
torrent_db.addInfohash('mhashmhashmhashmhash')
torrent_id = torrent_db.getTorrentID('mhashmhashmhashmhash')
mypref_db = self.session.open_dbhandler(NTFY_MYPREFERENCES)
search_db = self.session.open_dbhandler(NTFY_SEARCH)
mypref_db.addMyPreference('mhashmhashmhashmhash', {'destination_path':''}, commit=True)
clicklog_data = {
'click_position': 1,
'reranking_strategy': 2,
'keywords': ['linux', 'fedora']
}
mypref_db.addClicklogToMyPreference('mhashmhashmhashmhash', clicklog_data, commit=True)
# self.getAll("torrent_id, click_position, reranking_strategy", order_by="torrent_id")
allEntries = mypref_db.getAllEntries()
print >> sys.stderr, "all mypref entries: %s" % allEntries
self.assert_(len(allEntries)==1)
# (torrent_id, click_pos, rerank_strategy)
mypref_wanted = [['?',1,2]]
self.assert_(self.lol_equals(allEntries, mypref_wanted, "create mypref all"))
# self.getAll("rowid, peer_id, torrent_id, term_id, term_order ", order_by="rowid")
real_search = search_db.getAllOwnEntries()
wanted_search = [[7,0,torrent_id,1,0],
[8,0,torrent_id,5,1]] # is now 5 for some reason
self.assert_(self.lol_equals(real_search, wanted_search, "create mypref allown"))
def subtest_create_bc(self,myoversion):
print >>sys.stderr,"\ntest: creating test create_bc"
torrent_db = self.session.open_dbhandler(NTFY_TORRENTS)
torrent_db._db.update("Torrent", status_id=1)
pref_db = self.session.open_dbhandler(NTFY_MYPREFERENCES)
pref_db.loadData()
msg = self.buddycast.buddycast_core.createBuddyCastMessage(0, myoversion, target_ip="127.0.0.1", target_port=80)
print >> sys.stderr, "created bc pref: %s" % msg
wantpref = ['mhashmhashmhashmhash',['linux','fedora'],1,2]
if myoversion >= OLPROTO_VER_ELEVENTH:
wantpref += [-1,-1,-1,-1]
wantprefs = [wantpref]
self.assert_(msg['preferences']==wantprefs)
def lol_equals(self, lol1, lol2, msg):
ok = True
for (l1, l2) in zip(lol1, lol2):
for (e1, e2) in zip(l1, l2):
if e1=='?' or e2=='?':
continue
if not e1==e2:
print >> sys.stderr, "%s != %s!" % (e1, e2)
ok = False
break
if not ok:
print >> sys.stderr, "%s: lol != lol:\nreal %s\nwanted %s" % (msg, lol1, lol2)
return ok
def create_payload(self,r):
return BUDDYCAST+bencode(r)
def test_suite():
suite = unittest.TestSuite()
# We should run the tests in a separate Python interpreter to prevent
# problems with our singleton classes, e.g. PeerDB, etc.
if len(sys.argv) != 2:
print "Usage: python test_buddycast_msg8plus.py <method name>"
else:
suite.addTest(TestBuddyCastMsg8Plus(sys.argv[1]))
return suite
def main():
unittest.main(defaultTest='test_suite',argv=[sys.argv[0]])
if __name__ == "__main__":
main()
| lgpl-2.1 | -2,051,229,991,025,018,400 | 37.513889 | 265 | 0.553017 | false |
cernops/keystone | keystone/resource/backends/base.py | 1 | 22834 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import copy
from oslo_log import log
from oslo_log import versionutils
import six
import keystone.conf
from keystone import exception
from keystone.i18n import _
from keystone.i18n import _LE
CONF = keystone.conf.CONF
LOG = log.getLogger(__name__)
def get_project_from_domain(domain_ref):
"""Create a project ref from the provided domain ref."""
project_ref = domain_ref.copy()
project_ref['is_domain'] = True
project_ref['domain_id'] = None
project_ref['parent_id'] = None
return project_ref
# The ResourceDriverBase class is the set of driver methods from earlier
# drivers that we still support, that have not been removed or modified. This
# class is then used to created the augmented V8 and V9 version abstract driver
# classes, without having to duplicate a lot of abstract method signatures.
# If you remove a method from V9, then move the abstract methods from this Base
# class to the V8 class. Do not modify any of the method signatures in the Base
# class - changes should only be made in the V8 and subsequent classes.
# Starting with V9, some drivers use a special value to represent a domain_id
# of None. See comment in Project class of resource/backends/sql.py for more
# details.
NULL_DOMAIN_ID = '<<keystone.domain.root>>'
@six.add_metaclass(abc.ABCMeta)
class ResourceDriverBase(object):
def _get_list_limit(self):
return CONF.resource.list_limit or CONF.list_limit
# project crud
@abc.abstractmethod
def list_projects(self, hints):
"""List projects in the system.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_from_ids(self, project_ids):
"""List projects for the provided list of ids.
:param project_ids: list of ids
:returns: a list of project_refs.
This method is used internally by the assignment manager to bulk read
a set of projects given their ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_ids_from_domain_ids(self, domain_ids):
"""List project ids for the provided list of domain ids.
:param domain_ids: list of domain ids
:returns: a list of project ids owned by the specified domain ids.
This method is used internally by the assignment manager to bulk read
a set of project ids given a list of domain ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_in_domain(self, domain_id):
"""List projects in the domain.
:param domain_id: the driver MUST only return projects
within this domain.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project(self, project_id):
"""Get a project by ID.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_project(self, project_id, project):
"""Update an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
:raises keystone.exception.Conflict: if project name already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_project(self, project_id):
"""Delete an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_parents(self, project_id):
"""List all parents from a project by its ID.
:param project_id: the driver will list the parents of this
project.
:returns: a list of project_refs or an empty list.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_projects_in_subtree(self, project_id):
"""List all projects in the subtree of a given project.
:param project_id: the driver will get the subtree under
this project.
:returns: a list of project_refs or an empty list
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def is_leaf_project(self, project_id):
"""Check if a project is a leaf in the hierarchy.
:param project_id: the driver will check if this project
is a leaf in the hierarchy.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
def _validate_default_domain(self, ref):
"""Validate that either the default domain or nothing is specified.
Also removes the domain from the ref so that LDAP doesn't have to
persist the attribute.
"""
ref = ref.copy()
domain_id = ref.pop('domain_id', CONF.identity.default_domain_id)
self._validate_default_domain_id(domain_id)
return ref
def _validate_default_domain_id(self, domain_id):
"""Validate that the domain ID belongs to the default domain."""
if domain_id != CONF.identity.default_domain_id:
raise exception.DomainNotFound(domain_id=domain_id)
class ResourceDriverV8(ResourceDriverBase):
"""Removed or redefined methods from V8.
Move the abstract methods of any methods removed or modified in later
versions of the driver from ResourceDriverBase to here. We maintain this
so that legacy drivers, which will be a subclass of ResourceDriverV8, can
still reference them.
"""
@abc.abstractmethod
def create_project(self, tenant_id, tenant):
"""Create a new project.
:param tenant_id: This parameter can be ignored.
:param dict tenant: The new project
Project schema::
type: object
properties:
id:
type: string
name:
type: string
domain_id:
type: string
description:
type: string
enabled:
type: boolean
parent_id:
type: string
is_domain:
type: boolean
required: [id, name, domain_id]
additionalProperties: true
If project doesn't match the schema the behavior is undefined.
The driver can impose requirements such as the maximum length of a
field. If these requirements are not met the behavior is undefined.
:raises keystone.exception.Conflict: if the project id already exists
or the name already exists for the domain_id.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project_by_name(self, tenant_name, domain_id):
"""Get a tenant by name.
:returns: tenant_ref
:raises keystone.exception.ProjectNotFound: if a project with the
tenant_name does not exist within the domain
"""
raise exception.NotImplemented() # pragma: no cover
# Domain management functions for backends that only allow a single
# domain. Although we no longer use this, a custom legacy driver might
# have made use of it, so keep it here in case.
def _set_default_domain(self, ref):
"""If the domain ID has not been set, set it to the default."""
if isinstance(ref, dict):
if 'domain_id' not in ref:
ref = ref.copy()
ref['domain_id'] = CONF.identity.default_domain_id
return ref
elif isinstance(ref, list):
return [self._set_default_domain(x) for x in ref]
else:
raise ValueError(_('Expected dict or list: %s') % type(ref))
# domain crud
@abc.abstractmethod
def create_domain(self, domain_id, domain):
"""Create a new domain.
:raises keystone.exception.Conflict: if the domain_id or domain name
already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_domains(self, hints):
"""List domains in the system.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of domain_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_domains_from_ids(self, domain_ids):
"""List domains for the provided list of ids.
:param domain_ids: list of ids
:returns: a list of domain_refs.
This method is used internally by the assignment manager to bulk read
a set of domains given their ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_domain(self, domain_id):
"""Get a domain by ID.
:returns: domain_ref
:raises keystone.exception.DomainNotFound: if domain_id does not exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_domain_by_name(self, domain_name):
"""Get a domain by name.
:returns: domain_ref
:raises keystone.exception.DomainNotFound: if domain_name does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_domain(self, domain_id, domain):
"""Update an existing domain.
:raises keystone.exception.DomainNotFound: if domain_id does not exist
:raises keystone.exception.Conflict: if domain name already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_domain(self, domain_id):
"""Delete an existing domain.
:raises keystone.exception.DomainNotFound: if domain_id does not exist
"""
raise exception.NotImplemented() # pragma: no cover
class ResourceDriverV9(ResourceDriverBase):
"""New or redefined methods from V8.
Add any new V9 abstract methods (or those with modified signatures) to
this class.
"""
@abc.abstractmethod
def create_project(self, project_id, project):
"""Create a new project.
:param project_id: This parameter can be ignored.
:param dict project: The new project
Project schema::
type: object
properties:
id:
type: string
name:
type: string
domain_id:
type: [string, null]
description:
type: string
enabled:
type: boolean
parent_id:
type: string
is_domain:
type: boolean
required: [id, name, domain_id]
additionalProperties: true
If the project doesn't match the schema the behavior is undefined.
The driver can impose requirements such as the maximum length of a
field. If these requirements are not met the behavior is undefined.
:raises keystone.exception.Conflict: if the project id already exists
or the name already exists for the domain_id.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project_by_name(self, project_name, domain_id):
"""Get a project by name.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if a project with the
project_name does not exist within the domain
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_projects_from_ids(self, project_ids):
"""Delete a given list of projects.
Deletes a list of projects. Ensures no project on the list exists
after it is successfully called. If an empty list is provided,
the it is silently ignored. In addition, if a project ID in the list
of project_ids is not found in the backend, no exception is raised,
but a message is logged.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_acting_as_domain(self, hints):
"""List all projects acting as domains.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
class V9ResourceWrapperForV8Driver(ResourceDriverV9):
"""Wrapper class to supported a V8 legacy driver.
In order to support legacy drivers without having to make the manager code
driver-version aware, we wrap legacy drivers so that they look like the
latest version. For the various changes made in a new driver, here are the
actions needed in this wrapper:
Method removed from new driver - remove the call-through method from this
class, since the manager will no longer be
calling it.
Method signature (or meaning) changed - wrap the old method in a new
signature here, and munge the input
and output parameters accordingly.
New method added to new driver - add a method to implement the new
functionality here if possible. If that is
not possible, then return NotImplemented,
since we do not guarantee to support new
functionality with legacy drivers.
This wrapper contains the following support for newer manager code:
- The current manager code expects domains to be represented as projects
acting as domains, something that may not be possible in a legacy driver.
Hence the wrapper will map any calls for projects acting as a domain back
onto the driver domain methods. The caveat for this, is that this assumes
that there can not be a clash between a project_id and a domain_id, in
which case it may not be able to locate the correct entry.
"""
@versionutils.deprecated(
as_of=versionutils.deprecated.MITAKA,
what='keystone.resource.ResourceDriverV8',
in_favor_of='keystone.resource.ResourceDriverV9',
remove_in=+2)
def __init__(self, wrapped_driver):
self.driver = wrapped_driver
def _get_domain_from_project(self, project_ref):
"""Create a domain ref from a project ref.
Based on the provided project ref (or partial ref), creates a
domain ref, so that the result can be passed to the driver
domain methods.
"""
domain_ref = project_ref.copy()
for k in ['parent_id', 'domain_id', 'is_domain']:
domain_ref.pop(k, None)
return domain_ref
def get_project_by_name(self, project_name, domain_id):
if domain_id is None:
try:
domain_ref = self.driver.get_domain_by_name(project_name)
return get_project_from_domain(domain_ref)
except exception.DomainNotFound:
raise exception.ProjectNotFound(project_id=project_name)
else:
return self.driver.get_project_by_name(project_name, domain_id)
def create_project(self, project_id, project):
if project['is_domain']:
new_domain = self._get_domain_from_project(project)
domain_ref = self.driver.create_domain(project_id, new_domain)
return get_project_from_domain(domain_ref)
else:
return self.driver.create_project(project_id, project)
def list_projects(self, hints):
"""List projects and/or domains.
We use the hints filter to determine whether we are listing projects,
domains or both.
If the filter includes domain_id==None, then we should only list
domains (convert to a project acting as a domain) since regular
projects always have a non-None value for domain_id.
Likewise, if the filter includes domain_id==<non-None value>, then we
should only list projects.
If there is no domain_id filter, then we need to do a combained listing
of domains and projects, converting domains to projects acting as a
domain.
"""
domain_listing_filter = None
for f in hints.filters:
if (f['name'] == 'domain_id'):
domain_listing_filter = f
if domain_listing_filter is not None:
if domain_listing_filter['value'] is not None:
proj_list = self.driver.list_projects(hints)
else:
domains = self.driver.list_domains(hints)
proj_list = [get_project_from_domain(p) for p in domains]
hints.filters.remove(domain_listing_filter)
return proj_list
else:
# No domain_id filter, so combine domains and projects. Although
# we hand any remaining filters into each driver, since each filter
# might need to be carried out more than once, we use copies of the
# filters, allowing the original filters to be passed back up to
# controller level where a final filter will occur.
local_hints = copy.deepcopy(hints)
proj_list = self.driver.list_projects(local_hints)
local_hints = copy.deepcopy(hints)
domains = self.driver.list_domains(local_hints)
for domain in domains:
proj_list.append(get_project_from_domain(domain))
return proj_list
def list_projects_from_ids(self, project_ids):
return [self.get_project(id) for id in project_ids]
def list_project_ids_from_domain_ids(self, domain_ids):
return self.driver.list_project_ids_from_domain_ids(domain_ids)
def list_projects_in_domain(self, domain_id):
return self.driver.list_projects_in_domain(domain_id)
def get_project(self, project_id):
try:
domain_ref = self.driver.get_domain(project_id)
return get_project_from_domain(domain_ref)
except exception.DomainNotFound:
return self.driver.get_project(project_id)
def _is_domain(self, project_id):
ref = self.get_project(project_id)
return ref.get('is_domain', False)
def update_project(self, project_id, project):
if self._is_domain(project_id):
update_domain = self._get_domain_from_project(project)
domain_ref = self.driver.update_domain(project_id, update_domain)
return get_project_from_domain(domain_ref)
else:
return self.driver.update_project(project_id, project)
def delete_project(self, project_id):
if self._is_domain(project_id):
try:
self.driver.delete_domain(project_id)
except exception.DomainNotFound:
raise exception.ProjectNotFound(project_id=project_id)
else:
self.driver.delete_project(project_id)
def delete_projects_from_ids(self, project_ids):
raise exception.NotImplemented() # pragma: no cover
def list_project_parents(self, project_id):
"""List a project's ancestors.
The current manager expects the ancestor tree to end with the project
acting as the domain (since that's now the top of the tree), but a
legacy driver will not have that top project in their projects table,
since it's still in the domain table. Hence we lift the algorithm for
traversing up the tree from the driver to here, so that our version of
get_project() is called, which will fetch the "project" from the right
table.
"""
project = self.get_project(project_id)
parents = []
examined = set()
while project.get('parent_id') is not None:
if project['id'] in examined:
msg = _LE('Circular reference or a repeated '
'entry found in projects hierarchy - '
'%(project_id)s.')
LOG.error(msg, {'project_id': project['id']})
return
examined.add(project['id'])
parent_project = self.get_project(project['parent_id'])
parents.append(parent_project)
project = parent_project
return parents
def list_projects_in_subtree(self, project_id):
return self.driver.list_projects_in_subtree(project_id)
def is_leaf_project(self, project_id):
return self.driver.is_leaf_project(project_id)
def list_projects_acting_as_domain(self, hints):
refs = self.driver.list_domains(hints)
return [get_project_from_domain(p) for p in refs]
| apache-2.0 | -2,394,938,092,894,301,000 | 35.07267 | 79 | 0.616055 | false |
sagiss/sardana | src/sardana/macroserver/macros/examples/__init__.py | 1 | 1078 | #!/usr/bin/env python
##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""This package contains the macroserver example macros"""
__docformat__ = 'restructuredtext'
| lgpl-3.0 | 7,363,242,013,853,924,000 | 36.172414 | 78 | 0.625232 | false |
Kouhia/pyyttoni | denymanage.py | 1 | 6830 | #! /usr/bin/python
# coding=UTF-8
# Denyhosts managements script v. 0.4a
# Works only with ipv4 addresses.
# You can search and remove IP addresses from denyhosts ban.
#
# NOTE! You should test the script first without real "TargetPaths".
# If you use other system than Debian then paths might be completely different.
#
# Only tested with Debian Linux 7.9, UTF-8 and DenyHosts 2.6-10
# Does not work with Python 3.x
#
# By: Kouhia
#
#
# Latest changes (0.4):
# - Changed os.popen to subprocess.popen
# - Fixed more reliable check for denyhosts
#
# Latest changes (0.3):
# - Fixed issues with UTF-8
# - Added check to see if denyhosts is running
# - Added starting and stopping function for denyhosts
# - Added check for Linux OS (Script does not work in windows).
# - Fixed horrible regex bug in IP search. Did not escape special chars :(
#
import os, platform
import sys
import re
import subprocess
import time
from sys import argv
# Target files where IP addresses are saved.
TargetPaths = [
'/etc/hosts.deny',
'/var/lib/denyhosts/hosts',
'/var/lib/denyhosts/hosts-restricted',
'/var/lib/denyhosts/hosts-root',
'/var/lib/denyhosts/hosts-valid',
'/var/lib/denyhosts/users-hosts'
]
# MatchCounter
MatchCounter = 0
# Stop denyhosts daemon
def StopDenyHosts ():
print "INFO: Stopping DenyHosts... "
initresult = subprocess.Popen("/etc/init.d/denyhosts " + "stop", shell=True).wait()
if initresult == 0:
return True # Only means that script is executed. Not that denyhosts is really stopped.
else:
return False
# Start denuhosts daemon
def StartDenyHosts ():
print "INFO: Starting DenyHosts... "
initresult = subprocess.Popen("/etc/init.d/denyhosts " + "start", shell=True).wait()
if initresult == 0:
return True # Only means that script is executed. Not that denyhosts is really started.
else:
return False
# Check if denyhosts daemon is running
def StatusDenyHosts ():
initresult = subprocess.Popen("/etc/init.d/denyhosts " + "status", shell=True).wait()
if initresult == 0:
return True
elif initresult == 3:
return False
else:
print "ERROR: Failed to recognize denyhost status from init script. Abort..."
quit()
# Check if OS is Linux
def IsLinux ():
if platform.system() == "Linux":
return True
print "ERROR: No Linux OS detected! Exit script..."
quit()
# Help text for script usage
def ScriptHelp ():
print "Usage: python %s (mode) (ipv4 address)" % sys.argv[0]
print "Valid modes are: search and remove"
# Script mode select
def ScriptMode (first, second):
if first == 'search':
print "INFO: We have found %d matches for that IP." % SearchTargets(second)
elif first == 'remove':
print "INFO: Removed %d IPs from targets." % RemoveTargets(second)
else:
print "ERROR: Invalid switch. Exit script."
ScriptHelp()
quit()
# Validate ipv4 address
def validate_ip (ip):
try:
a = ip.split('.')
if len(a) != 4:
return False
for x in a:
if not x.isdigit():
return False
i = int(x)
if i < 0 or i > 255:
return False
return True
except:
print "ERROR: Unexpected error while validating IPv4 address."
print sys.exc_info() # Prints unexpected error
quit()
# Search IP from target (must be a file path)
def SearchIP (ip, target):
try:
SearchCounter = 0
if ip in open(target).read():
print "INFO: Found IP address from target %s." % (target)
SearchCounter += 1
return True
else:
print "INFO: Did not find IP from target %s." % (target)
return False
except:
print "ERROR: Fatal unexpected error while searching IP from target. %s" % (target)
print sys.exc_info() # Prints unexpected error
quit()
# Remove IP from target (must be a file path)
def RemoveIP (ip, target):
try:
# Escape regex special characters from IP (especially ".").
# Horrible things will happen without this :(
ip = re.escape(ip)
# Counters
LineCounter = 1
RemoveCounter = 0
original = open(target, 'r+') # Open file in read and write mode
textlines = original.readlines() # read lines to variable
original.seek(0) # Go to beginning of file
for line in textlines: # For every line
#If we do not find target ip from line just write the line.
if re.search(r'%s' % ip, line) == None:
original.write(line) # write original line back to file
else: # Do nothing and add remove counter
# TEST: original.write(line) # write original line back to file
print "INFO: Removed line: %s" % (line)
RemoveCounter += 1
LineCounter += 1
# Shrink file and close filehandle
original.truncate()
original.close()
print "INFO: Found and removed %d IP address. Checked %d lines from target %s." % (RemoveCounter, LineCounter, target)
if RemoveCounter != 0: # Return true if IPs have been removed
return True
else:
return False
except:
print "ERROR: Fatal unexpected error while modifying target file."
print sys.exc_info() # Prints unexpected error
quit()
# Search IP from TargetPaths list
def SearchTargets (ip):
MatchCounter = 0
try:
for target in TargetPaths:
if SearchIP(ip, target):
MatchCounter += 1
return MatchCounter
except:
print "ERROR: Fatal unexpected error while searching IP from targets. Abort..."
print sys.exc_info() # Prints unexpected error
quit()
# Remove IP from TargetPaths list
def RemoveTargets (ip):
# First check if denyhosts is running, try to close it and abort if needed.
if StatusDenyHosts():
print "WARNING: Denyhosts is still running! Trying to close it..."
StopDenyHosts()
time.sleep(2)
if StatusDenyHosts():
print "ERROR: Unable to close denyhosts. Abort..."
quit()
MatchCounter = 0
try:
for target in TargetPaths:
if RemoveIP(ip, target):
MatchCounter += 1
return MatchCounter
except:
print "ERROR: Fatal unexpected error while removing IP from targets."
print sys.exc_info() # Prints unexpected error
quit()
###############
# Main #
###############
# Do not run if OS is not Linux
if IsLinux():
try:
script, first, second = argv
except ValueError:
print "ERROR: Did not get all the required input."
ScriptHelp()
quit()
except:
print "ERROR: Unexpected error with user input."
ScriptHelp()
quit()
if validate_ip(second):
ScriptMode(first, second)
else:
print "ERROR: Invalid IPv4 address."
quit()
# Check if denyhosts is running and start it if needed.
if StatusDenyHosts():
quit()
else:
StartDenyHosts()
time.sleep(2)
if StatusDenyHosts() == True:
quit()
else:
print "ERROR: Denyhosts not running after script execution."
| gpl-3.0 | 5,685,310,551,976,530,000 | 26.429719 | 122 | 0.664275 | false |
sam-washington/requests-aws4auth | requests_aws4auth/aws4auth.py | 1 | 30545 | """
Provides AWS4Auth class for handling Amazon Web Services version 4
authentication with the Requests module.
"""
# Licensed under the MIT License:
# http://opensource.org/licenses/MIT
from __future__ import unicode_literals
import hmac
import hashlib
import posixpath
import re
import shlex
import datetime
try:
from urllib.parse import urlparse, parse_qs, quote, unquote
except ImportError:
from urlparse import urlparse, parse_qs
from urllib import quote, unquote
from requests.auth import AuthBase
from .six import PY2, text_type
from .aws4signingkey import AWS4SigningKey
from .exceptions import DateMismatchError, NoSecretKeyError, DateFormatError
class AWS4Auth(AuthBase):
"""
Requests authentication class providing AWS version 4 authentication for
HTTP requests. Implements header-based authentication only, GET URL
parameter and POST parameter authentication are not supported.
Provides authentication for regions and services listed at:
http://docs.aws.amazon.com/general/latest/gr/rande.html
The following services do not support AWS auth version 4 and are not usable
with this package:
* Simple Email Service (SES)' - AWS auth v3 only
* Simple Workflow Service - AWS auth v3 only
* Import/Export - AWS auth v2 only
* SimpleDB - AWS auth V2 only
* DevPay - AWS auth v1 only
* Mechanical Turk - has own signing mechanism
You can reuse AWS4Auth instances to sign as many requests as you need.
Basic usage
-----------
>>> import requests
>>> from requests_aws4auth import AWS4Auth
>>> auth = AWS4Auth('<ACCESS ID>', '<ACCESS KEY>', 'eu-west-1', 's3')
>>> endpoint = 'http://s3-eu-west-1.amazonaws.com'
>>> response = requests.get(endpoint, auth=auth)
>>> response.text
<?xml version="1.0" encoding="UTF-8"?>
<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<Owner>
<ID>bcaf1ffd86f461ca5fb16fd081034f</ID>
<DisplayName>webfile</DisplayName>
...
This example lists your buckets in the eu-west-1 region of the Amazon S3
service.
STS Temporary Credentials
-------------------------
>>> from requests_aws4auth import AWS4Auth
>>> auth = AWS4Auth('<ACCESS ID>', '<ACCESS KEY>', 'eu-west-1', 's3',
session_token='<SESSION TOKEN>')
...
This example shows how to construct an AWS4Auth object for use with STS
temporary credentials. The ``x-amz-security-token`` header is added with
the session token. Temporary credential timeouts are not managed -- in
case the temporary credentials expire, they need to be re-generated and
the AWS4Auth object re-constructed with the new credentials.
Date handling
-------------
If an HTTP request to be authenticated contains a Date or X-Amz-Date
header, AWS will only accept authorisation if the date in the header
matches the scope date of the signing key (see
http://docs.aws.amazon.com/general/latest/gr/sigv4-date-handling.html).
From version 0.8 of requests-aws4auth, if the header date does not match
the scope date, the AWS4Auth class will automatically regenerate its
signing key, using the same scope parameters as the previous key except for
the date, which will be changed to match the request date. (If a request
does not include a date, the current date is added to the request in an
X-Amz-Date header).
The new behaviour from version 0.8 has implications for thread safety and
secret key security, see the "Automatic key regeneration", "Secret key
storage" and "Multithreading" sections below.
This also means that AWS4Auth is now attempting to parse and extract dates
from the values in X-Amz-Date and Date headers. Supported date formats are:
* RFC 7231 (e.g. Mon, 09 Sep 2011 23:36:00 GMT)
* RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT)
* C time (e.g. Wed Dec 4 00:00:00 2002)
* Amz-Date format (e.g. 20090325T010101Z)
* ISO 8601 / RFC 3339 (e.g. 2009-03-25T10:11:12.13-01:00)
If either header is present but AWS4Auth cannot extract a date because all
present date headers are in an unrecognisable format, AWS4Auth will delete
any X-Amz-Date and Date headers present and replace with a single
X-Amz-Date header containing the current date. This behaviour can be
modified using the 'raise_invalid_date' keyword argument of the AWS4Auth
constructor.
Automatic key regeneration
--------------------------
If you do not want the signing key to be automatically regenerated when a
mismatch between the request date and the scope date is encountered, use
the alternative StrictAWS4Auth class, which is identical to AWS4Auth except
that upon encountering a date mismatch it just raises a DateMismatchError.
You can also use the PassiveAWS4Auth class, which mimics the AWS4Auth
behaviour prior to version 0.8 and just signs and sends the request,
whether the date matches or not. In this case it is up to the calling code
to handle an authentication failure response from AWS caused by a date
mismatch.
Secret key storage
------------------
To allow automatic key regeneration, the secret key is stored in the
AWS4Auth instance, in the signing key object. If you do not want this to
occur, instantiate the instance using an AWS4Signing key which was created
with the store_secret_key parameter set to False:
>>> sig_key = AWS4SigningKey(secret_key, region, service, date, False)
>>> auth = StrictAWS4Auth(access_id, sig_key)
The AWS4Auth class will then raise a NoSecretKeyError when it attempts to
regenerate its key. A slightly more conceptually elegant way to handle this
is to use the alternative StrictAWS4Auth class, again instantiating it with
an AWS4SigningKey instance created with store_secret_key = False.
Multithreading
--------------
If you share AWS4Auth (or even StrictAWS4Auth) instances between threads
you are likely to encounter problems. Because AWS4Auth instances may
unpredictably regenerate their signing key as part of signing a request,
threads using the same instance may find the key changed by another thread
halfway through the signing process, which may result in undefined
behaviour.
It may be possible to rig up a workable instance sharing mechanism using
locking primitives and the StrictAWS4Auth class, however this poor author
can't think of a scenario which works safely yet doesn't suffer from at
some point blocking all threads for at least the duration of an HTTP
request, which could be several seconds. If several requests come in in
close succession which all require key regenerations then the system could
be forced into serial operation for quite a length of time.
In short, it's best to create a thread-local instance of AWS4Auth for each
thread that needs to do authentication.
Class attributes
----------------
AWS4Auth.access_id -- the access ID supplied to the instance
AWS4Auth.region -- the AWS region for the instance
AWS4Auth.service -- the endpoint code for the service for this instance
AWS4Auth.date -- the date the instance is valid for
AWS4Auth.signing_key -- instance of AWS4SigningKey used for this instance,
either generated from the supplied parameters or
supplied directly on the command line
"""
default_include_headers = ['host', 'content-type', 'date', 'x-amz-*']
def __init__(self, *args, **kwargs):
"""
AWS4Auth instances can be created by supplying key scope parameters
directly or by using an AWS4SigningKey instance:
>>> auth = AWS4Auth(access_id, secret_key, region, service
... [, date][, raise_invalid_date=False][, session_token=None])
or
>>> auth = AWS4Auth(access_id, signing_key[, raise_invalid_date=False])
access_id -- This is your AWS access ID
secret_key -- This is your AWS secret access key
region -- The region you're connecting to, as per the list at
http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
e.g. us-east-1. For services which don't require a region
(e.g. IAM), use us-east-1.
service -- The name of the service you're connecting to, as per
endpoints at:
http://docs.aws.amazon.com/general/latest/gr/rande.html
e.g. elasticbeanstalk.
date -- Date this instance is valid for. 8-digit date as str of the
form YYYYMMDD. Key is only valid for requests with a
Date or X-Amz-Date header matching this date. If date is
not supplied the current date is used.
signing_key -- An AWS4SigningKey instance.
raise_invalid_date
-- Must be supplied as keyword argument. AWS4Auth tries to
parse a date from the X-Amz-Date and Date headers of the
request, first trying X-Amz-Date, and then Date if
X-Amz-Date is not present or is in an unrecognised
format. If one or both of the two headers are present
yet neither are in a format which AWS4Auth recognises
then it will remove both headers and replace with a new
X-Amz-Date header using the current date.
If this behaviour is not wanted, set the
raise_invalid_date keyword argument to True, and
instead an InvalidDateError will be raised when neither
date is recognised. If neither header is present at all
then an X-Amz-Date header will still be added containing
the current date.
See the AWS4Auth class docstring for supported date
formats.
session_token
-- Must be supplied as keyword argument. If session_token
is set, then it is used for the x-amz-security-token
header, for use with STS temporary credentials.
"""
l = len(args)
if l not in [2, 4, 5]:
msg = 'AWS4Auth() takes 2, 4 or 5 arguments, {} given'.format(l)
raise TypeError(msg)
self.access_id = args[0]
if isinstance(args[1], AWS4SigningKey) and l == 2:
# instantiate from signing key
self.signing_key = args[1]
self.region = self.signing_key.region
self.service = self.signing_key.service
self.date = self.signing_key.date
elif l in [4, 5]:
# instantiate from args
secret_key = args[1]
self.region = args[2]
self.service = args[3]
self.date = args[4] if l == 5 else None
self.signing_key = None
self.regenerate_signing_key(secret_key=secret_key)
else:
raise TypeError()
raise_invalid_date = kwargs.get('raise_invalid_date', False)
if raise_invalid_date in [True, False]:
self.raise_invalid_date = raise_invalid_date
else:
raise ValueError('raise_invalid_date must be True or False in AWS4Auth.__init__()')
self.session_token = kwargs.get('session_token')
if self.session_token:
self.default_include_headers.append('x-amz-security-token')
self.include_hdrs = kwargs.get('include_hdrs',
self.default_include_headers)
AuthBase.__init__(self)
def regenerate_signing_key(self, secret_key=None, region=None,
service=None, date=None):
"""
Regenerate the signing key for this instance. Store the new key in
signing_key property.
Take scope elements of the new key from the equivalent properties
(region, service, date) of the current AWS4Auth instance. Scope
elements can be overridden for the new key by supplying arguments to
this function. If overrides are supplied update the current AWS4Auth
instance's equivalent properties to match the new values.
If secret_key is not specified use the value of the secret_key property
of the current AWS4Auth instance's signing key. If the existing signing
key is not storing its secret key (i.e. store_secret_key was set to
False at instantiation) then raise a NoSecretKeyError and do not
regenerate the key. In order to regenerate a key which is not storing
its secret key, secret_key must be supplied to this function.
Use the value of the existing key's store_secret_key property when
generating the new key. If there is no existing key, then default
to setting store_secret_key to True for new key.
"""
if secret_key is None and (self.signing_key is None or
self.signing_key.secret_key is None):
raise NoSecretKeyError
secret_key = secret_key or self.signing_key.secret_key
region = region or self.region
service = service or self.service
date = date or self.date
if self.signing_key is None:
store_secret_key = True
else:
store_secret_key = self.signing_key.store_secret_key
self.signing_key = AWS4SigningKey(secret_key, region, service, date,
store_secret_key)
self.region = region
self.service = service
self.date = self.signing_key.date
def __call__(self, req):
"""
Interface used by Requests module to apply authentication to HTTP
requests.
Add x-amz-content-sha256 and Authorization headers to the request. Add
x-amz-date header to request if not already present and req does not
contain a Date header.
Check request date matches date in the current signing key. If not,
regenerate signing key to match request date.
If request body is not already encoded to bytes, encode to charset
specified in Content-Type header, or UTF-8 if not specified.
req -- Requests PreparedRequest object
"""
# check request date matches scope date
req_date = self.get_request_date(req)
if req_date is None:
# no date headers or none in recognisable format
# replace them with x-amz-header with current date and time
if 'date' in req.headers: del req.headers['date']
if 'x-amz-date' in req.headers: del req.headers['x-amz-date']
now = datetime.datetime.utcnow()
req_date = now.date()
req.headers['x-amz-date'] = now.strftime('%Y%m%dT%H%M%SZ')
req_scope_date = req_date.strftime('%Y%m%d')
if req_scope_date != self.date:
self.handle_date_mismatch(req)
# encode body and generate body hash
if hasattr(req, 'body') and req.body is not None:
self.encode_body(req)
content_hash = hashlib.sha256(req.body)
else:
content_hash = hashlib.sha256(b'')
req.headers['x-amz-content-sha256'] = content_hash.hexdigest()
if self.session_token:
req.headers['x-amz-security-token'] = self.session_token
# generate signature
result = self.get_canonical_headers(req, self.include_hdrs)
cano_headers, signed_headers = result
cano_req = self.get_canonical_request(req, cano_headers,
signed_headers)
sig_string = self.get_sig_string(req, cano_req, self.signing_key.scope)
sig_string = sig_string.encode('utf-8')
hsh = hmac.new(self.signing_key.key, sig_string, hashlib.sha256)
sig = hsh.hexdigest()
auth_str = 'AWS4-HMAC-SHA256 '
auth_str += 'Credential={}/{}, '.format(self.access_id,
self.signing_key.scope)
auth_str += 'SignedHeaders={}, '.format(signed_headers)
auth_str += 'Signature={}'.format(sig)
req.headers['Authorization'] = auth_str
return req
@classmethod
def get_request_date(cls, req):
"""
Try to pull a date from the request by looking first at the
x-amz-date header, and if that's not present then the Date header.
Return a datetime.date object, or None if neither date header
is found or is in a recognisable format.
req -- a requests PreparedRequest object
"""
date = None
for header in ['x-amz-date', 'date']:
if header not in req.headers:
continue
try:
date_str = cls.parse_date(req.headers[header])
except DateFormatError:
continue
try:
date = datetime.datetime.strptime(date_str, '%Y-%m-%d').date()
except ValueError:
continue
else:
break
return date
@staticmethod
def parse_date(date_str):
"""
Check if date_str is in a recognised format and return an ISO
yyyy-mm-dd format version if so. Raise DateFormatError if not.
Recognised formats are:
* RFC 7231 (e.g. Mon, 09 Sep 2011 23:36:00 GMT)
* RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT)
* C time (e.g. Wed Dec 4 00:00:00 2002)
* Amz-Date format (e.g. 20090325T010101Z)
* ISO 8601 / RFC 3339 (e.g. 2009-03-25T10:11:12.13-01:00)
date_str -- Str containing a date and optional time
"""
months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug',
'sep', 'oct', 'nov', 'dec']
formats = {
# RFC 7231, e.g. 'Mon, 09 Sep 2011 23:36:00 GMT'
r'^(?:\w{3}, )?(\d{2}) (\w{3}) (\d{4})\D.*$':
lambda m: '{}-{:02d}-{}'.format(
m.group(3),
months.index(m.group(2).lower())+1,
m.group(1)),
# RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT)
# assumes current century
r'^\w+day, (\d{2})-(\w{3})-(\d{2})\D.*$':
lambda m: '{}{}-{:02d}-{}'.format(
str(datetime.date.today().year)[:2],
m.group(3),
months.index(m.group(2).lower())+1,
m.group(1)),
# C time, e.g. 'Wed Dec 4 00:00:00 2002'
r'^\w{3} (\w{3}) (\d{1,2}) \d{2}:\d{2}:\d{2} (\d{4})$':
lambda m: '{}-{:02d}-{:02d}'.format(
m.group(3),
months.index(m.group(1).lower())+1,
int(m.group(2))),
# x-amz-date format dates, e.g. 20100325T010101Z
r'^(\d{4})(\d{2})(\d{2})T\d{6}Z$':
lambda m: '{}-{}-{}'.format(*m.groups()),
# ISO 8601 / RFC 3339, e.g. '2009-03-25T10:11:12.13-01:00'
r'^(\d{4}-\d{2}-\d{2})(?:[Tt].*)?$':
lambda m: m.group(1),
}
out_date = None
for regex, xform in formats.items():
m = re.search(regex, date_str)
if m:
out_date = xform(m)
break
if out_date is None:
raise DateFormatError
else:
return out_date
def handle_date_mismatch(self, req):
"""
Handle a request whose date doesn't match the signing key scope date.
This AWS4Auth class implementation regenerates the signing key. See
StrictAWS4Auth class if you would prefer an exception to be raised.
req -- a requests prepared request object
"""
req_datetime = self.get_request_date(req)
new_key_date = req_datetime.strftime('%Y%m%d')
self.regenerate_signing_key(date=new_key_date)
@staticmethod
def encode_body(req):
"""
Encode body of request to bytes and update content-type if required.
If the body of req is Unicode then encode to the charset found in
content-type header if present, otherwise UTF-8, or ASCII if
content-type is application/x-www-form-urlencoded. If encoding to UTF-8
then add charset to content-type. Modifies req directly, does not
return a modified copy.
req -- Requests PreparedRequest object
"""
if isinstance(req.body, text_type):
split = req.headers.get('content-type', 'text/plain').split(';')
if len(split) == 2:
ct, cs = split
cs = cs.split('=')[1]
req.body = req.body.encode(cs)
else:
ct = split[0]
if (ct == 'application/x-www-form-urlencoded' or
'x-amz-' in ct):
req.body = req.body.encode()
else:
req.body = req.body.encode('utf-8')
req.headers['content-type'] = ct + '; charset=utf-8'
def get_canonical_request(self, req, cano_headers, signed_headers):
"""
Create the AWS authentication Canonical Request string.
req -- Requests PreparedRequest object. Should already
include an x-amz-content-sha256 header
cano_headers -- Canonical Headers section of Canonical Request, as
returned by get_canonical_headers()
signed_headers -- Signed Headers, as returned by
get_canonical_headers()
"""
url = urlparse(req.url)
path = self.amz_cano_path(url.path)
# AWS handles "extreme" querystrings differently to urlparse
# (see post-vanilla-query-nonunreserved test in aws_testsuite)
split = req.url.split('?', 1)
qs = split[1] if len(split) == 2 else ''
qs = self.amz_cano_querystring(qs)
payload_hash = req.headers['x-amz-content-sha256']
req_parts = [req.method.upper(), path, qs, cano_headers,
signed_headers, payload_hash]
cano_req = '\n'.join(req_parts)
return cano_req
@classmethod
def get_canonical_headers(cls, req, include=None):
"""
Generate the Canonical Headers section of the Canonical Request.
Return the Canonical Headers and the Signed Headers strs as a tuple
(canonical_headers, signed_headers).
req -- Requests PreparedRequest object
include -- List of headers to include in the canonical and signed
headers. It's primarily included to allow testing against
specific examples from Amazon. If omitted or None it
includes host, content-type and any header starting 'x-amz-'
except for x-amz-client context, which appears to break
mobile analytics auth if included. Except for the
x-amz-client-context exclusion these defaults are per the
AWS documentation.
"""
if include is None:
include = cls.default_include_headers
include = [x.lower() for x in include]
headers = req.headers.copy()
# Temporarily include the host header - AWS requires it to be included
# in the signed headers, but Requests doesn't include it in a
# PreparedRequest
if 'host' not in headers:
headers['host'] = urlparse(req.url).netloc.split(':')[0]
# Aggregate for upper/lowercase header name collisions in header names,
# AMZ requires values of colliding headers be concatenated into a
# single header with lowercase name. Although this is not possible with
# Requests, since it uses a case-insensitive dict to hold headers, this
# is here just in case you duck type with a regular dict
cano_headers_dict = {}
for hdr, val in headers.items():
hdr = hdr.strip().lower()
val = cls.amz_norm_whitespace(val).strip()
if (hdr in include or '*' in include or
('x-amz-*' in include and hdr.startswith('x-amz-') and not
hdr == 'x-amz-client-context')):
vals = cano_headers_dict.setdefault(hdr, [])
vals.append(val)
# Flatten cano_headers dict to string and generate signed_headers
cano_headers = ''
signed_headers_list = []
for hdr in sorted(cano_headers_dict):
vals = cano_headers_dict[hdr]
val = ','.join(sorted(vals))
cano_headers += '{}:{}\n'.format(hdr, val)
signed_headers_list.append(hdr)
signed_headers = ';'.join(signed_headers_list)
return (cano_headers, signed_headers)
@staticmethod
def get_sig_string(req, cano_req, scope):
"""
Generate the AWS4 auth string to sign for the request.
req -- Requests PreparedRequest object. This should already
include an x-amz-date header.
cano_req -- The Canonical Request, as returned by
get_canonical_request()
"""
amz_date = req.headers['x-amz-date']
hsh = hashlib.sha256(cano_req.encode())
sig_items = ['AWS4-HMAC-SHA256', amz_date, scope, hsh.hexdigest()]
sig_string = '\n'.join(sig_items)
return sig_string
def amz_cano_path(self, path):
"""
Generate the canonical path as per AWS4 auth requirements.
Not documented anywhere, determined from aws4_testsuite examples,
problem reports and testing against the live services.
path -- request path
"""
safe_chars = '/~'
qs = ''
fixed_path = path
if '?' in fixed_path:
fixed_path, qs = fixed_path.split('?', 1)
fixed_path = posixpath.normpath(fixed_path)
fixed_path = re.sub('/+', '/', fixed_path)
if path.endswith('/') and not fixed_path.endswith('/'):
fixed_path += '/'
full_path = fixed_path
# If Python 2, switch to working entirely in str as quote() has problems
# with Unicode
if PY2:
full_path = full_path.encode('utf-8')
safe_chars = safe_chars.encode('utf-8')
qs = qs.encode('utf-8')
# S3 seems to require unquoting first. 'host' service is used in
# amz_testsuite tests
if self.service in ['s3', 'host']:
full_path = unquote(full_path)
full_path = quote(full_path, safe=safe_chars)
if qs:
qm = b'?' if PY2 else '?'
full_path = qm.join((full_path, qs))
if PY2:
full_path = unicode(full_path)
return full_path
@staticmethod
def amz_cano_querystring(qs):
"""
Parse and format querystring as per AWS4 auth requirements.
Perform percent quoting as needed.
qs -- querystring
"""
safe_qs_amz_chars = '&=+'
safe_qs_unresvd = '-_.~'
# If Python 2, switch to working entirely in str
# as quote() has problems with Unicode
if PY2:
qs = qs.encode('utf-8')
safe_qs_amz_chars = safe_qs_amz_chars.encode()
safe_qs_unresvd = safe_qs_unresvd.encode()
qs = unquote(qs)
space = b' ' if PY2 else ' '
qs = qs.split(space)[0]
qs = quote(qs, safe=safe_qs_amz_chars)
qs_items = {}
for name, vals in parse_qs(qs, keep_blank_values=True).items():
name = quote(name, safe=safe_qs_unresvd)
vals = [quote(val, safe=safe_qs_unresvd) for val in vals]
qs_items[name] = vals
qs_strings = []
for name, vals in qs_items.items():
for val in vals:
qs_strings.append('='.join([name, val]))
qs = '&'.join(sorted(qs_strings))
if PY2:
qs = unicode(qs)
return qs
@staticmethod
def amz_norm_whitespace(text):
"""
Replace runs of whitespace with a single space.
Ignore text enclosed in quotes.
"""
return ' '.join(shlex.split(text, posix=False))
class StrictAWS4Auth(AWS4Auth):
"""
Instances of this subclass will not automatically regenerate their signing
keys when asked to sign a request whose date does not match the scope date
of the signing key. Instances will instead raise a DateMismatchError.
Keys of StrictAWSAuth instances can be regenerated manually by calling the
regenerate_signing_key() method.
Keys will still store the secret key by default. If this is not desired
then create the instance by passing an AWS4SigningKey created with
store_secret_key set to False to the StrictAWS4AUth constructor:
>>> sig_key = AWS4SigningKey(secret_key, region, service, date, False)
>>> auth = StrictAWS4Auth(access_id, sig_key)
"""
def handle_date_mismatch(self, req):
"""
Handle a request whose date doesn't match the signing key process, by
raising a DateMismatchError.
Overrides the default behaviour of AWS4Auth where the signing key
is automatically regenerated to match the request date
To update the signing key if this is hit, call
StrictAWS4Auth.regenerate_signing_key().
"""
raise DateMismatchError
class PassiveAWS4Auth(AWS4Auth):
"""
This subclass does not perform any special handling of a mismatched request
and scope date, it signs the request and allows Requests to send it. It is
up to the calling code to handle a failed authentication response from AWS.
This behaviour mimics the behaviour of AWS4Auth for versions 0.7 and
earlier.
"""
def handle_date_mismatch(self, req):
pass
| mit | -730,816,101,825,409,900 | 41.306094 | 95 | 0.598854 | false |
zzrcxb/Pugoo | GoGame/circle.py | 1 | 1081 | class Circle:
def __init__(self, members, enclosed, color, key, range=None, type='normal'): # type = 'normal', 'internal', 'dual'
self.members = set(members)
self.enclosed = set(enclosed)
self.color = color
self.range = range
self.type = type
self.key = key
def __str__(self):
return 'Members:' + repr(self.members) + '\nEnclosed:' + repr(self.enclosed)
def __repr__(self):
return self.__str__()
class Circles:
def __init__(self):
self.circles = {}
def append(self, other):
if other.key in self.circles:
return None
else:
self.circles[other.key] = other
def extend(self, other):
for key in other.circles:
self.append(other.circles[key])
def print(self):
for key in self.circles:
print(self.circles[key])
def __str__(self):
res = ''
for circle in self.circles:
res += repr(circle) + '\n'
return res
def __repr__(self):
return self.__str__() | gpl-3.0 | -1,687,486,913,983,174,400 | 24.761905 | 120 | 0.531915 | false |
whereaswhile/DLSR | convnet-folk_master/shownet_bbx_bcfp.py | 1 | 23553 | # Copyright (c) 2011, Alex Krizhevsky ([email protected])
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy
import sys
import getopt as opt
from util import *
from math import sqrt, ceil, floor
import os
import scipy.io as sio
import scipy.spatial.distance as ssd
import cPickle as cpickle
from gpumodel import IGPUModel
import random as r
import numpy.random as nr
from convnet import ConvNet
from options import *
from data import DataProvider, dp_types
from w_util import readLines
try:
import pylab as pl
except:
print "This script requires the matplotlib python library (Ubuntu/Fedora package name python-matplotlib). Please install it."
# sys.exit(1)
import matplotlib.cm as cm
class ShowNetError(Exception):
pass
class ShowConvNet(ConvNet):
def __init__(self, op, load_dic):
ConvNet.__init__(self, op, load_dic)
def get_gpus(self):
self.need_gpu = self.op.get_value('show_preds') or self.op.get_value('write_features') or self.op.get_value('write_pixel_proj')
if self.need_gpu:
ConvNet.get_gpus(self)
def init_data_providers(self):
class Dummy:
def advance_batch(self):
pass
if self.need_gpu:
ConvNet.init_data_providers(self)
else:
self.train_data_provider = self.test_data_provider = Dummy()
def import_model(self):
if self.need_gpu:
ConvNet.import_model(self)
def init_data_providers(self):
self.dp_params['convnet'] = self
self.dp_params['imgprovider'] = self.img_provider_file
try:
if self.need_gpu:
self.test_data_provider = DataProvider.get_instance(self.data_path_test, self.test_batch_range,
type=self.dp_type_test, dp_params=self.dp_params, test=True)
self.test_batch_range = self.test_data_provider.batch_range
except Exception, e:
print "Unable to create data provider: %s" % e
self.print_data_providers()
sys.exit()
def init_model_state(self):
#ConvNet.init_model_state(self)
if self.op.get_value('show_preds'):
self.sotmax_idx = self.get_layer_idx(self.op.get_value('show_preds'), check_type='softmax')
if self.op.get_value('write_features'):
self.ftr_layer_idx = self.get_layer_idx(self.op.get_value('write_features'))
if self.op.get_value('write_pixel_proj'):
tmp = self.op.get_value('write_pixel_proj')
tmp = tmp.split[',']
self.ftr_layer_idx = self.get_layer_idx[tmp[0]]
self.ftr_res_idx = int(tmp[1])
def init_model_lib(self):
if self.need_gpu:
if self.op.get_value('write_pixel_proj'):
# in pixel projection model, activation matrix cannot be shared
for l in self.model_state['layers']:
l['usesActs'] = True
ConvNet.init_model_lib(self)
def plot_cost(self):
if self.show_cost not in self.train_outputs[0][0]:
raise ShowNetError("Cost function with name '%s' not defined by given convnet." % self.show_cost)
train_errors = [o[0][self.show_cost][self.cost_idx] for o in self.train_outputs]
test_errors = [o[0][self.show_cost][self.cost_idx] for o in self.test_outputs]
# numbatches = len(self.train_batch_range)
numbatches = self.model_state['batchnum']
test_errors = numpy.row_stack(test_errors)
test_errors = numpy.tile(test_errors, (1, self.testing_freq))
test_errors = list(test_errors.flatten())
test_errors += [test_errors[-1]] * max(0,len(train_errors) - len(test_errors))
test_errors = test_errors[:len(train_errors)]
numepochs = len(train_errors) / float(numbatches)
print numepochs, numbatches
pl.figure(1)
x = range(0, len(train_errors))
pl.plot(x, train_errors, 'k-', label='Training set')
pl.plot(x, test_errors, 'r-', label='Test set')
pl.legend()
ticklocs = range(numbatches, len(train_errors) - len(train_errors) % numbatches + 1, numbatches)
epoch_label_gran = int(ceil(numepochs / 20.)) # aim for about 20 labels
epoch_label_gran = int(ceil(float(epoch_label_gran) / 10) * 10) # but round to nearest 10
ticklabels = map(lambda x: str((x[1] / numbatches)) if x[0] % epoch_label_gran == epoch_label_gran-1 else '', enumerate(ticklocs))
pl.xticks(ticklocs, ticklabels)
pl.xlabel('Epoch')
# pl.ylabel(self.show_cost)
pl.title(self.show_cost)
def make_filter_fig(self, filters, filter_start, fignum, _title, num_filters, combine_chans):
FILTERS_PER_ROW = 16
MAX_ROWS = 16
MAX_FILTERS = FILTERS_PER_ROW * MAX_ROWS
num_colors = filters.shape[0]
f_per_row = int(ceil(FILTERS_PER_ROW / float(1 if combine_chans else num_colors)))
filter_end = min(filter_start+MAX_FILTERS, num_filters)
filter_rows = int(ceil(float(filter_end - filter_start) / f_per_row))
filter_size = int(sqrt(filters.shape[1]))
fig = pl.figure(fignum)
fig.text(.5, .95, '%s %dx%d filters %d-%d' % (_title, filter_size, filter_size, filter_start, filter_end-1), horizontalalignment='center')
num_filters = filter_end - filter_start
if not combine_chans:
bigpic = n.zeros((filter_size * filter_rows + filter_rows + 1, filter_size*num_colors * f_per_row + f_per_row + 1), dtype=n.single)
else:
bigpic = n.zeros((3, filter_size * filter_rows + filter_rows + 1, filter_size * f_per_row + f_per_row + 1), dtype=n.single)
for m in xrange(filter_start,filter_end ):
filter = filters[:,:,m]
y, x = (m - filter_start) / f_per_row, (m - filter_start) % f_per_row
if not combine_chans:
for c in xrange(num_colors):
filter_pic = filter[c,:].reshape((filter_size,filter_size))
bigpic[1 + (1 + filter_size) * y:1 + (1 + filter_size) * y + filter_size,
1 + (1 + filter_size*num_colors) * x + filter_size*c:1 + (1 + filter_size*num_colors) * x + filter_size*(c+1)] = filter_pic
else:
filter_pic = filter.reshape((3, filter_size,filter_size))
bigpic[:,
1 + (1 + filter_size) * y:1 + (1 + filter_size) * y + filter_size,
1 + (1 + filter_size) * x:1 + (1 + filter_size) * x + filter_size] = filter_pic
pl.xticks([])
pl.yticks([])
if not combine_chans:
pl.imshow(bigpic, cmap=pl.cm.gray, interpolation='nearest')
else:
bigpic = bigpic.swapaxes(0,2).swapaxes(0,1)
pl.imshow(bigpic, interpolation='nearest')
def plot_filters(self):
filter_start = 0 # First filter to show
layer_names = [l['name'] for l in self.layers]
if self.show_filters not in layer_names:
raise ShowNetError("Layer with name '%s' not defined by given convnet." % self.show_filters)
layer = self.layers[layer_names.index(self.show_filters)]
filters = layer['weights'][self.input_idx]
if layer['type'] == 'fc': # Fully-connected layer
num_filters = layer['outputs']
channels = self.channels
elif layer['type'] in ('conv', 'local'): # Conv layer
num_filters = layer['filters']
channels = layer['filterChannels'][self.input_idx]
if layer['type'] == 'local':
filters = filters.reshape((layer['modules'], layer['filterPixels'][self.input_idx] * channels, num_filters))
filter_start = r.randint(0, layer['modules']-1)*num_filters # pick out some random modules
filters = filters.swapaxes(0,1).reshape(channels * layer['filterPixels'][self.input_idx], num_filters * layer['modules'])
num_filters *= layer['modules']
filters = filters.reshape(channels, filters.shape[0]/channels, filters.shape[1])
# Convert YUV filters to RGB
if self.yuv_to_rgb and channels == 3:
R = filters[0,:,:] + 1.28033 * filters[2,:,:]
G = filters[0,:,:] + -0.21482 * filters[1,:,:] + -0.38059 * filters[2,:,:]
B = filters[0,:,:] + 2.12798 * filters[1,:,:]
filters[0,:,:], filters[1,:,:], filters[2,:,:] = R, G, B
combine_chans = not self.no_rgb and channels == 3
# Make sure you don't modify the backing array itself here -- so no -= or /=
filters = filters - filters.min()
filters = filters / filters.max()
self.make_filter_fig(filters, filter_start, 2, 'Layer %s' % self.show_filters, num_filters, combine_chans)
def plot_predictions(self):
data = self.get_next_batch(train=False)[2] # get a test batch
num_classes = self.test_data_provider.get_num_classes()
NUM_ROWS = 2
NUM_COLS = 4
NUM_IMGS = NUM_ROWS * NUM_COLS
NUM_TOP_CLASSES = min(num_classes, 4) # show this many top labels
label_names = self.test_data_provider.batch_meta['label_names']
if self.only_errors:
preds = n.zeros((data[0].shape[1], num_classes), dtype=n.single)
else:
preds = n.zeros((NUM_IMGS, num_classes), dtype=n.single)
rand_idx = nr.randint(0, data[0].shape[1], NUM_IMGS)
data[0] = n.require(data[0][:,rand_idx], requirements='C')
data[1] = n.require(data[1][:,rand_idx], requirements='C')
data += [preds]
# Run the model
self.libmodel.startFeatureWriter(data, self.sotmax_idx)
self.finish_batch()
fig = pl.figure(3)
fig.text(.4, .95, '%s test case predictions' % ('Mistaken' if self.only_errors else 'Random'))
if self.only_errors:
err_idx = nr.permutation(n.where(preds.argmax(axis=1) != data[1][0,:])[0])[:NUM_IMGS] # what the net got wrong
data[0], data[1], preds = data[0][:,err_idx], data[1][:,err_idx], preds[err_idx,:]
data[0] = self.test_data_provider.get_plottable_data(data[0])
for r in xrange(NUM_ROWS):
for c in xrange(NUM_COLS):
img_idx = r * NUM_COLS + c
if data[0].shape[0] <= img_idx:
break
pl.subplot(NUM_ROWS*2, NUM_COLS, r * 2 * NUM_COLS + c + 1)
pl.xticks([])
pl.yticks([])
greyscale = False
try:
img = data[0][img_idx,:,:,:]
except IndexError:
# maybe greyscale?
greyscale = True
img = data[0][img_idx,:,:]
if len(img.shape) == 3 and img.shape[2]==1:
img = img.reshape(img.shape[:2])
greyscale = True
if not greyscale:
pl.imshow(img, interpolation='nearest')
else:
pl.imshow(img, interpolation='nearest', cmap=cm.Greys_r)
true_label = int(data[1][0,img_idx])
img_labels = sorted(zip(preds[img_idx,:], label_names), key=lambda x: x[0])[-NUM_TOP_CLASSES:]
pl.subplot(NUM_ROWS*2, NUM_COLS, (r * 2 + 1) * NUM_COLS + c + 1, aspect='equal')
ylocs = n.array(range(NUM_TOP_CLASSES)) + 0.5
height = 0.5
width = max(ylocs)
pl.barh(ylocs, [l[0]*width for l in img_labels], height=height, \
color=['r' if l[1] == label_names[true_label] else 'b' for l in img_labels])
pl.title(label_names[true_label])
pl.yticks(ylocs + height/2, [l[1] for l in img_labels])
pl.xticks([width/2.0, width], ['50%', ''])
pl.ylim(0, ylocs[-1] + height*2)
def rect_overlap(self, bbgt, bb):
bi=[max(bb[0],bbgt[0]), max(bb[1],bbgt[1]), min(bb[2],bbgt[2]), min(bb[3],bbgt[3])];
iw=bi[2]-bi[0]+1;
ih=bi[3]-bi[1]+1;
ov=0
if iw>0 and ih>0:
ua=(bb[2]-bb[0]+1)*(bb[3]-bb[1]+1) + (bbgt[2]-bbgt[0]+1)*(bbgt[3]-bbgt[1]+1) - iw*ih
ov=iw*ih*1.0/ua
return ov
def increase_acc_count(self, pred, nacc, ncnt):
if ncnt==0:
self.remain_pred=pred[0:0]
pred=numpy.concatenate((self.remain_pred, pred), axis=0)
base_view=int(numpy.sqrt(self.mult_view))
idx=range(0, len(pred), self.mult_view)
if len(pred)%self.mult_view!=0:
idx=idx[:-1]
for i in idx:
imid=self.imgList[ncnt]
ncnt+=1
sz=self.imgSize[imid-1]
l=int(self.scale_view*min(sz[0], sz[1]))
ll=224.0
b=[1, 1, sz[1], sz[0]] #global bbx
#b=self.bbx[imid-1] #gt bbx
bx = (b[1]+b[3])/2.0
by = (b[0]+b[2])/2.0
x0 = max(0, bx-l)
x1 = min(sz[0]-l, bx)
y0 = max(0, by-l)
y1 = min(sz[1]-l, by)
bpred = numpy.array([0, 0, 0, 0])
for j in range(self.mult_view):
cidx=j%base_view
ridx=j/base_view
dx = int(x0+(x1-x0)/(base_view-1)*cidx)
dy = int(y0+(y1-y0)/(base_view-1)*ridx)
bpred += pred[i+j]*l/ll + numpy.array([dy, dx, dy, dx]) + 1
bpred=numpy.round(bpred/self.mult_view).astype(int)
ov = self.rect_overlap(self.bbx[imid-1], bpred)
if ov > 0.5:
nacc+=1;
self.remain_pred=pred[len(idx)*self.mult_view:]
#print 'remain: ', self.remain_label
return (nacc, ncnt)
def do_write_features(self):
if len(self.cluster_file)>0: #clustering only
print "clustering mode, no feature will be saved"
clst=cpickle.load(open(self.cluster_file, 'rb'))
clst=clst['centers']
print "%d cluster centers found" % clst.shape[0]
cfid=open('cluster_label.txt', 'w')
elif len(self.feature_path)==0: #evaluate acc only
print "evaluation mode, no feature will be saved"
nacc = 0
ncnt = 0
self.mult_view=max(1, self.mult_view)
elif not os.path.exists(self.feature_path):
os.makedirs(self.feature_path)
next_data = self.get_next_batch(train=False)
b1 = next_data[1]
num_ftrs = self.layers[self.ftr_layer_idx]['outputs']
data_dims = [_.shape[0] for _ in next_data[2]]
print "input data dimensions: {}".format(data_dims)
assert(data_dims.count(4)==1)
label_idx = data_dims.index(4) # regression data
bb=cpickle.load(open(self.bbx_file, 'rb'))
self.bbx=bb['bbx']
self.imgSize=bb['imsize'] #[w, h]
self.bcfList=bb['bcfidx']
if self.img_list!='-1':
self.imgList=readLines(self.img_list)
self.imgList=[int(_.rstrip()) for _ in self.imgList]
self.imgList=[numpy.where(self.bcfList==_)[0] for _ in self.imgList] #index in bbx, from 1
mask=numpy.array([len(_) for _ in self.imgList])
self.imgList=numpy.array(self.imgList)[mask==1]
self.imgList=[_[0]+1 for _ in self.imgList]
else:
self.imgList=range(1, 1+self.bbx.shape[0])
print "%d images found" % len(self.imgList)
print "%d bbxes loaded" % self.bbx.shape[0]
print "writing features: layer idx={}, {} fitlers, label_idx={}".format(self.ftr_layer_idx, num_ftrs, label_idx)
print "starting from batch: {}".format(b1)
while True:
batch = next_data[1]
data = next_data[2]
ftrs = n.zeros((data[0].shape[1], num_ftrs), dtype=n.single)
self.libmodel.startFeatureWriter(data + [ftrs], self.ftr_layer_idx)
# load the next batch while the current one is computing
next_data = self.get_next_batch(train=False)
self.finish_batch()
# ftrs=ftrs*100 #predited, zero data input, bbx multiplier
output = {'source_model':self.load_file, 'num_vis':num_ftrs, 'data': ftrs}
if len(self.cluster_file)>0: #clustering only
d = ssd.cdist(ftrs, clst, 'euclidean')
#print 'dist:', d[100, 3], numpy.linalg.norm(ftrs[100]-clst[3])
di = numpy.argmin(d, axis=1)
#print 'di:', di.shape, di
cfid.write(' '.join(str(_) for _ in di.tolist())+'\n')
if batch%10==0:
print "Batch %d evaluated" % batch
elif len(self.feature_path)==0: #evaluate only
nacc, ncnt=self.increase_acc_count(ftrs, nacc, ncnt)
if ncnt>0:
print "Batch %d evaluated: %.2f" % (batch, 1.0*nacc/ncnt*100)
else:
path_out = os.path.join(self.feature_path, 'data_batch_%d' % batch)
pickle(path_out,output)
print "Wrote feature file %s" % path_out
sys.stdout.flush()
if next_data[1] == b1:
break
if len(self.cluster_file)>0: #clustering only
cfid.close()
print "clustering done!"
elif len(self.feature_path)==0: #evaluate only
print "overall accuracy: %.3f%%" % (1.0*nacc/ncnt*100)
def do_write_pixel_proj(self):
if not os.path.exists(self.feature_path):
os.makedirs(self.feature_path)
next_data = self.get_next_batch(train=False)
b1 = next_data[1]
num_ftrs = self.layers[self.ftr_layer_idx]['outputs']
while True:
batch = next_data[1]
data = next_data[2]
ftrs = n.zeros((data[0].shape[1], num_ftrs), dtype=n.single)
projs = n.zeros(data[0].shape, dtype=n.single)
print data[0].shape
self.libmodel.startProjWriter(data + [projs], self.ftr_layer_idx, self.ftr_res_idx)
# load the next batch while the current one is computing
next_data = self.get_next_batch(train=False)
self.finish_batch()
path_out = os.path.join(self.feature_path, 'data_batch_%d' % batch)
output = {'source_model':self.load_file, 'ftridx':self.ftr_layer_idx, 'data': pts, 'labels':data[1]}
try:
output['aux'] = self.test_data_provider.getftraux()
except AttributeError:
pass
pickle(path_out,output)
print "Wrote res file %s" % path_out
if next_data[1] == b1:
break
# pickle(os.path.join(self.feature_path, 'batches.meta'), output)
def start(self):
if self.verbose:
self.op.print_values()
if self.show_cost:
self.plot_cost()
if self.show_filters:
self.plot_filters()
if self.show_preds:
self.plot_predictions()
if self.write_features:
self.do_write_features()
if self.write_pixel_proj:
self.do_write_pixel_proj()
pl.show()
sys.exit(0)
@classmethod
def get_options_parser(cls):
op = ConvNet.get_options_parser()
for option in list(op.options):
if option not in ('data_path_train', 'data_path_test', 'dp_type_train', 'dp_type_test', 'gpu', 'rnorm_const', 'img_provider_file', 'load_file', 'train_batch_range', 'test_batch_range', 'verbose'):
op.delete_option(option)
op.add_option("test-only", "test_only", BooleanOptionParser, "Test and quit?", default=1)
op.add_option("show-cost", "show_cost", StringOptionParser, "Show specified objective function", default="")
op.add_option("show-filters", "show_filters", StringOptionParser, "Show learned filters in specified layer", default="")
op.add_option("input-idx", "input_idx", IntegerOptionParser, "Input index for layer given to --show-filters", default=0)
op.add_option("cost-idx", "cost_idx", IntegerOptionParser, "Cost function return value index for --show-cost", default=0)
op.add_option("no-rgb", "no_rgb", BooleanOptionParser, "Don't combine filter channels into RGB in layer given to --show-filters", default=False)
op.add_option("yuv-to-rgb", "yuv_to_rgb", BooleanOptionParser, "Convert RGB filters to YUV in layer given to --show-filters", default=False)
op.add_option("channels", "channels", IntegerOptionParser, "Number of channels in layer given to --show-filters (fully-connected layers only)", default=0)
op.add_option("show-preds", "show_preds", StringOptionParser, "Show predictions made by given softmax on test set", default="")
op.add_option("only-errors", "only_errors", BooleanOptionParser, "Show only mistaken predictions (to be used with --show-preds)", default=False, requires=['show_preds'])
op.add_option("write-features", "write_features", StringOptionParser, "Write test data features from given layer", default="", requires=['feature-path'])
op.add_option("feature-path", "feature_path", StringOptionParser, "Write test data features to this path (to be used with --write-features)", default="")
op.add_option("write-pixel-proj", "write_pixel_proj", StringOptionParser, "Write the projection of some response on pixel space", default = "", requires=['response_idx'])
op.add_option("multiview", "mult_view", IntegerOptionParser, "Number of views for multi-view testing", default=1)
op.add_option("scaleview", "scale_view", FloatOptionParser, "Scaling factor of the views in multi-view testing", default=1.0)
op.add_option("bbxfile", "bbx_file", StringOptionParser, "Contains ground truth bounding box for each image", default="")
op.add_option("imglist", "img_list", StringOptionParser, "Image list file", default="")
op.add_option("clusterfile", "cluster_file", StringOptionParser, "Cluster center saved in pickle format", default="")
op.options['load_file'].default = None
return op
if __name__ == "__main__":
try:
op = ShowConvNet.get_options_parser()
op, load_dic = IGPUModel.parse_options(op)
model = ShowConvNet(op, load_dic)
model.start()
except (UnpickleError, ShowNetError, opt.GetoptError), e:
print "----------------"
print "Error:"
print e
| gpl-2.0 | 6,005,349,874,969,878,000 | 46.774848 | 208 | 0.587399 | false |
puttarajubr/commcare-hq | corehq/apps/twilio/models.py | 1 | 2143 | import logging
from corehq.apps.sms.mixin import SMSBackend, SMSLoadBalancingMixin
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.twilio.forms import TwilioBackendForm
from dimagi.ext.couchdbkit import *
from twilio.rest import TwilioRestClient
from django.conf import settings
class TwilioBackend(SMSBackend, SMSLoadBalancingMixin):
account_sid = StringProperty()
auth_token = StringProperty()
@classmethod
def get_api_id(cls):
return "TWILIO"
@classmethod
def get_generic_name(cls):
return "Twilio"
@classmethod
def get_template(cls):
return "twilio/backend.html"
@classmethod
def get_form_class(cls):
return TwilioBackendForm
@classmethod
def get_opt_in_keywords(cls):
return ["START", "YES"]
@classmethod
def get_opt_out_keywords(cls):
return ["STOP", "STOPALL", "UNSUBSCRIBE", "CANCEL", "END", "QUIT"]
def get_load_balancing_interval(self):
# Twilio automatically rate limits at 1 sms/sec, but we'll also
# balance the sms load evenly between the phone numbers used by
# this backend over the last 60 seconds.
return 60
@property
def phone_numbers(self):
"""
Prior to introducing load balancing, the Twilio backend only had
one phone number, so need to handle old Twilio backends which don't
have the x_phone_numbers property set.
"""
if self.x_phone_numbers:
return self.x_phone_numbers
else:
return [self.phone_number]
def send(self, msg, *args, **kwargs):
orig_phone_number = kwargs.get("orig_phone_number")
client = TwilioRestClient(self.account_sid, self.auth_token,
timeout=settings.SMS_GATEWAY_TIMEOUT)
to = msg.phone_number
from_ = orig_phone_number or self.phone_numbers[0]
body = msg.text
message = client.messages.create(
body=body,
to=to,
from_=from_
)
msg.system_phone_number = from_
msg.backend_message_id = message.sid
msg.save()
| bsd-3-clause | -5,999,389,994,968,135,000 | 29.614286 | 75 | 0.642091 | false |
GabMus/lithium-projman | server/lithium/settings.py | 1 | 4701 | # This file is part of Lithium-Projman.
#
# Lithium-Projman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lithium-Projman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Lithium-Projman. If not, see <http://www.gnu.org/licenses/>.
"""
Django settings for lithium project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
from . import email
#Before starting the project, create a file called email.py inside the
#lithium folder and insert there your email smtp data, to send emails.
#The structure of the email.py file is the following:
#EMAIL_HOST="smtp.server.com"
#EMAIL_PORT=465
#EMAIL_HOST_USER="[email protected]"
#EMAIL_HOST_PASSWORD="my password"
#EMAIL_USE_SSL=True
EMAIL_HOST=email.EMAIL_HOST
EMAIL_PORT=email.EMAIL_PORT
EMAIL_HOST_USER=email.EMAIL_HOST_USER
EMAIL_HOST_PASSWORD=email.EMAIL_HOST_PASSWORD
EMAIL_USE_SSL=email.EMAIL_USE_SSL
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATIC_ROOT = os.path.normpath(os.path.dirname(__file__))+'/../static'
MEDIA_ROOT = os.path.normpath(os.path.dirname(__file__))+'/../media'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3)ii17s&4=1wwk034prfk5-f02dc^184gw7-ek^a&xe4qwj4u%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'projman',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'lithium.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lithium.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'database.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Rome'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL= '/media/'
MEDIA_INCOMPLETE_URL= '/'
| gpl-3.0 | -6,743,733,827,809,564,000 | 28.198758 | 91 | 0.707722 | false |
MikaelSchultz/dofiloop-sentinel | sentinel/sensor/models.py | 1 | 6540 | from django.db import models
from django.utils.translation import ugettext as _
from device.models import SnmpDevice
from device.choices import *
# Create your models here.
class PartitionSensor(models.Model):
snmp_device = models.ForeignKey(SnmpDevice, on_delete=models.CASCADE)
enabled = models.BooleanField(
null=False,
default=True,
verbose_name=_('Sensor Enabled')
)
index = models.PositiveSmallIntegerField(
null=False,
default=0,
verbose_name=_('Sensor Index')
)
description = models.CharField(
null=False,
default='',
max_length=256,
verbose_name=_('Description')
)
type = models.SmallIntegerField(
choices=storage_type_choices,
null=False,
default=1,
verbose_name=_('Device')
)
allocation_units = models.IntegerField(
null=False,
default=0,
verbose_name=_('Allocation Units')
)
total = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Total (kB)')
)
available = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Available (kB)')
)
used = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Used (kB)')
)
percent_used = models.IntegerField(
null=False,
default=0,
verbose_name=_('Percent Used')
)
threshold_warning = models.IntegerField(
null=False,
default=80,
verbose_name=_('Warning Threshold')
)
threshold_critical = models.IntegerField(
null=False,
default=90,
verbose_name=_('Critical Threshold')
)
timestamp_created = models.DateTimeField(
auto_now_add=True,
verbose_name=_('Timestamp Created')
)
timestamp_modified = models.DateTimeField(
auto_now=True,
verbose_name=_('Timestamp Modified')
)
class MemorySensor(models.Model):
snmp_device = models.ForeignKey(SnmpDevice, on_delete=models.CASCADE)
enabled = models.BooleanField(
null=False,
default=True,
verbose_name=_('Sensor Enabled')
)
index = models.PositiveSmallIntegerField(
null=False,
default=0,
verbose_name=_('Sensor Index')
)
description = models.CharField(
null=False,
default='',
max_length=256,
verbose_name=_('Description')
)
type = models.SmallIntegerField(
choices=storage_type_choices,
null=False,
default=1,
verbose_name=_('Type')
)
allocation_units = models.IntegerField(
null=False,
default=0,
verbose_name=_('Allocation Units')
)
total = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Total (kB)')
)
available = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Available (kB)')
)
used = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Used (kB)')
)
percent_used = models.IntegerField(
null=False,
default=0,
verbose_name=_('Percent Used')
)
threshold_warning = models.IntegerField(
null=False,
default=80,
verbose_name=_('Warning Threshold')
)
threshold_critical = models.IntegerField(
null=False,
default=90,
verbose_name=_('Critical Threshold')
)
timestamp_created = models.DateTimeField(
auto_now_add=True,
verbose_name=_('Timestamp Created')
)
timestamp_modified = models.DateTimeField(
auto_now=True,
verbose_name=_('Timestamp Modified')
)
class NetworkInterfaceSensor(models.Model):
snmp_device = models.ForeignKey(SnmpDevice, on_delete=models.CASCADE)
enabled = models.BooleanField(
null=False,
default=True,
verbose_name=_('Sensor Enabled')
)
index = models.PositiveSmallIntegerField(
null=False,
default=0,
verbose_name=_('Sensor Index')
)
description = models.CharField(
null=False,
default='',
max_length=256,
verbose_name=_('Description')
)
type = models.SmallIntegerField(
choices=snmp_interface_type_choices,
null=False,
default=0,
verbose_name=_('Type')
)
speed = models.BigIntegerField(
null=True,
default=None,
verbose_name=_('Speed')
)
physical_address = models.CharField(
null=False,
default='',
max_length=256,
verbose_name=_('Physical Address')
)
admin_status = models.CharField(
null=False,
default='',
max_length=256,
verbose_name=_('Admin Status')
)
operational_status = models.CharField(
null=False,
default='',
max_length=256,
verbose_name=_('Operational Status')
)
in_octets = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('In Octets')
)
in_bps = models.IntegerField(
null=False,
default=0,
verbose_name=_('In Bits/Second')
)
in_bps_threshold_warning_unit = models.SmallIntegerField(
choices=threshold_choices_bit,
null=False,
default=1,
verbose_name=_('In Bps Warning Threshold Unit')
)
in_bps_threshold_warning_value = models.PositiveSmallIntegerField(
null=False,
default=1,
verbose_name=_('In Bps Warning Threshold Value')
)
out_octets = models.BigIntegerField(
null=False,
default=0,
verbose_name=_('Out Octets')
)
out_bps = models.IntegerField(
null=False,
default=0,
verbose_name=_('Out Bits/Second')
)
out_bps_threshold_warning_unit = models.SmallIntegerField(
choices=threshold_choices_bit,
null=False,
default=1,
verbose_name=_('Out Bps Warning Threshold Unit')
)
out_bps_threshold_warning_value = models.PositiveSmallIntegerField(
null=False,
default=1,
verbose_name=_('Out Bps Warning Threshold Value')
)
timestamp_created = models.DateTimeField(
auto_now_add=True,
verbose_name=_('Timestamp Created')
)
timestamp_modified = models.DateTimeField(
auto_now=True,
verbose_name=_('Timestamp Modified')
)
| mit | 1,307,033,748,098,659,800 | 21.629758 | 73 | 0.583639 | false |
smartsheet-platform/smartsheet-python-sdk | smartsheet/models/workspace.py | 1 | 3824 | # pylint: disable=C0111,R0902,R0904,R0912,R0913,R0915,E1101
# Smartsheet Python SDK.
#
# Copyright 2018 Smartsheet.com, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from .enums import AccessLevel
from .folder import Folder
from .report import Report
from .sheet import Sheet
from .sight import Sight
from .template import Template
from ..types import *
from ..util import serialize
from ..util import deserialize
class Workspace(object):
"""Smartsheet Workspace data model."""
def __init__(self, props=None, base_obj=None):
"""Initialize the Workspace model."""
self._base = None
if base_obj is not None:
self._base = base_obj
self._access_level = EnumeratedValue(AccessLevel)
self._favorite = Boolean()
self._folders = TypedList(Folder)
self._id_ = Number()
self._name = String()
self._permalink = String()
self._reports = TypedList(Report)
self._sheets = TypedList(Sheet)
self._sights = TypedList(Sight)
self._templates = TypedList(Template)
if props:
deserialize(self, props)
# requests package Response object
self.request_response = None
self.__initialized = True
def __getattr__(self, key):
if key == 'id':
return self.id_
else:
raise AttributeError(key)
def __setattr__(self, key, value):
if key == 'id':
self.id_ = value
else:
super(Workspace, self).__setattr__(key, value)
@property
def access_level(self):
return self._access_level
@access_level.setter
def access_level(self, value):
self._access_level.set(value)
@property
def favorite(self):
return self._favorite.value
@favorite.setter
def favorite(self, value):
self._favorite.value = value
@property
def folders(self):
return self._folders
@folders.setter
def folders(self, value):
self._folders.load(value)
@property
def id_(self):
return self._id_.value
@id_.setter
def id_(self, value):
self._id_.value = value
@property
def name(self):
return self._name.value
@name.setter
def name(self, value):
self._name.value = value
@property
def permalink(self):
return self._permalink.value
@permalink.setter
def permalink(self, value):
self._permalink.value = value
@property
def reports(self):
return self._reports
@reports.setter
def reports(self, value):
self._reports.load(value)
@property
def sheets(self):
return self._sheets
@sheets.setter
def sheets(self, value):
self._sheets.load(value)
@property
def sights(self):
return self._sights
@sights.setter
def sights(self, value):
self._sights.load(value)
@property
def templates(self):
return self._templates
@templates.setter
def templates(self, value):
self._templates.load(value)
def to_dict(self):
return serialize(self)
def to_json(self):
return json.dumps(self.to_dict())
def __str__(self):
return self.to_json()
| apache-2.0 | 7,016,440,368,249,686,000 | 23.202532 | 75 | 0.624477 | false |
huqa/pyfibot | pyfibot/modules/module_kivatietaa.py | 1 | 2467 | # -*- coding: utf-8 -*-
'''
Created on 17.8.2013
@author: Hukka
'''
import time
from math import ceil
#from datetime import timedelta
#from datetime import datetime
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
def generate_kivatietaa(nick,text):
#print len(text)
if not text:
return False
if len(text) < 10 or len(text) > 133:
return False
IMAGE_LOCATION = "images/kivat_med_template.png"
OUTPUT_LOCATION = "~/public_html/kiva_tietaa/"
#MDX = 120
font_size = 14
nick_size = 9
rivit = ceil(len(text) / 36.0)
#print rivit
img = Image.open(IMAGE_LOCATION)
#img = img.convert("RGBA")
txt_img = Image.new("RGBA", (300, 255))
font = ImageFont.truetype('fonts/arial.ttf', font_size)
font2 = ImageFont.truetype('fonts/arial.ttf', nick_size)
draw = ImageDraw.Draw(img)
draw_txt = ImageDraw.Draw(txt_img)
start_m = 0
end_m = 0
merkit = 36
x_pos = 6
y_pos = 6
#text = "Aku-setä! " + text
for rivi in range(1,int(rivit)+1):
end_m = merkit * rivi
teksti = text[start_m:end_m]
start_m = end_m
draw.text((x_pos,y_pos), teksti.strip(), (2,2,2), font=font)
y_pos = y_pos + 18
#draw_txt.text((4,188), str(nick), (2,2,2), font=font2)
draw_txt.text((192,245), str(nick), (2,2,2), font=font2)
txt_img = txt_img.rotate(270)
img.paste(txt_img, None, txt_img)
#draw.text((x_pos,88), text, (2,2,2), font=font)
stamp = int(time.time())
filename = "kt_" + str(stamp) + ".png"
img.save(OUTPUT_LOCATION + filename)
#set scheduler
return filename
#return url
def destroy_image(image):
import os
folder = '~/public_html/kiva_tietaa'
file_path = os.path.join(folder, image)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
except:
pass
def handle_kivatietaa(bot, user, channel, args):
if not args:
return
nick = getNick(user)
file = generate_kivatietaa(nick,args)
if file is False:
return bot.say(channel, "MATHO FUCKIN ERROR")
#aika = datetime.now()+timedelta(minutes=20)
#bot.scheduler.add_date_job(destroy_image, aika, [file])
return bot.say(channel, "http://server.tld/kiva_tietaa/%s" % file)
#if __name__ == "__main__":
# generate_kivatietaa("mcherwanta","Nyt kyllä näin niin ison hirven että rupes oikeen hirvittään! Tämä on tarinani!") | bsd-3-clause | -4,403,296,499,761,643,500 | 27.367816 | 120 | 0.611269 | false |
offlinehacker/flumotion | flumotion/monitor/nagios/component.py | 1 | 8063 | # -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
from twisted.internet import reactor, defer
from flumotion.common import planet, log
from flumotion.monitor.nagios import util
class Mood(util.LogCommand):
description = "Check the mood of a component."
usage = "[mood options] [component id]"
def addOptions(self):
default = "hungry"
self.parser.add_option('-w', '--warning',
action="store", dest="warning",
help="moods to give a warning for (defaults to %s)" % (default),
default=default)
default = "sleeping,lost,sad"
self.parser.add_option('-c', '--critical',
action="store", dest="critical",
help="moods to give a critical for (defaults to %s)" % (default),
default=default)
def handleOptions(self, options):
self._warning = options.warning.split(',')
self._critical = options.critical.split(',')
def do(self, args):
if not args:
self.stderr.write(
'Please specify a component to check the mood of.\n.')
return 3
self._component = args[0]
# call our callback after connecting
self.parentCommand.managerDeferred.addCallback(self._callback)
def _callback(self, result):
d = self.parentCommand.adminModel.callRemote('getPlanetState')
def gotPlanetStateCb(result):
self.debug('gotPlanetStateCb')
c = util.findComponent(result, self._component)
if not c:
return util.unknown('Could not find component %s' %
self._component)
moodValue = c.get('mood')
moodName = planet.moods.get(moodValue).name
if moodName in self._critical:
return util.critical('Component %s is %s' % (self._component,
moodName))
if moodName in self._warning:
return util.warning('Component %s is %s' % (self._component,
moodName))
return util.ok('Component %s is %s' % (self._component,
moodName))
d.addCallback(gotPlanetStateCb)
d.addCallback(lambda e: setattr(reactor, 'exitStatus', e))
return d
class FlipFlopDetector(object):
def __init__(self, timeout, flipflops, mood_a, mood_b, state):
self.timeout = timeout
self.flipflops = flipflops
self.mood_a = mood_a
self.mood_b = mood_b
self.state = state
self.cancel = None
self.flip_count = 0
if state.get('mood') == self.mood_a:
self.current_state = self.mood_a
else:
self.current_state = None
self.waiting_d = defer.Deferred()
def wait(self):
return self.waiting_d
def start(self):
self.state.addListener(self, set_=self.state_set)
self.cancel = reactor.callLater(self.timeout,
self.success)
def state_set(self, cs, key, value):
if key != 'mood':
return
# the first time it goes to mood_a is not treated as a flip
if value == self.mood_a and self.current_state is None:
self.current_state = value
return
# mood_a -> mood_b and mood_a -> mood_b transitions are flips
if (self.current_state, value) in ((self.mood_a, self.mood_b),
(self.mood_b, self.mood_a)):
self.current_state = value
self.flip_count += 1
if self.flip_count >= self.flipflops:
self.failure()
def success(self):
self.state.removeListener(self)
s = ''
if self.flip_count != 1:
s = 's'
self.waiting_d.callback("%d mood change%s detected" %
(self.flip_count, s))
def failure(self):
self.state.removeListener(self)
if self.cancel:
self.cancel.cancel()
s = ''
if self.flip_count != 1:
s = 's'
self.waiting_d.errback(Exception("%d mood change%s detected" %
(self.flip_count, s)))
class FlipFlop(util.LogCommand):
"""
This check connects to the manager and watches the state of a component for
a given amout of time. Raises a critical if the mood alternates between two
extremes (by default: happy and hungry) more than the given number of
times.
"""
description = "Check if the mood of a component is flipflopping."
def addOptions(self):
self.parser.add_option('-i', '--component-id',
action="store",
help="component id of the component")
self.parser.add_option('-t', '--timeout', type="int",
action="store", default=15,
help="how long to test for flopflops")
self.parser.add_option('-f', '--flipflops', type="int",
action="store", default=2,
help=("how many mood changes should "
"be considered a flipflop"))
self.parser.add_option('-a', '--mood-a',
action="store", default="happy",
help=("the initial mood of the flipflop"))
self.parser.add_option('-b', '--mood-b',
action="store", default="hungry",
help=("the final mood of the flipflop"))
def handleOptions(self, options):
if not options.component_id:
raise util.NagiosUnknown("Please specify a component id "
"with '-i [component-id]'")
try:
self.mood_a = getattr(planet.moods, options.mood_a).value
except AttributeError:
raise util.NagiosUnknown("Invalid mood name '%s'" % options.mood_a)
try:
self.mood_b = getattr(planet.moods, options.mood_b).value
except AttributeError:
raise util.NagiosUnknown("Invalid mood name '%s'" % options.mood_b)
self.component_id = options.component_id
self.timeout = options.timeout
self.flipflops = options.flipflops
def do(self, args):
self.parentCommand.managerDeferred.addCallback(self._get_planet_state)
self.parentCommand.managerDeferred.addCallback(self._got_planet_state)
def _get_planet_state(self, _):
return self.parentCommand.adminModel.callRemote('getPlanetState')
def _got_planet_state(self, planet_state):
c = util.findComponent(planet_state, self.component_id)
if not c:
return util.unknown('Could not find component %s' %
self.component_id)
return self._detect_flipflops(c)
def _detect_flipflops(self, component_state):
f = FlipFlopDetector(self.timeout, self.flipflops, self.mood_a,
self.mood_b, component_state)
f.start()
d = f.wait()
return d.addCallbacks(util.ok, lambda f:
util.critical(f.getErrorMessage()))
| gpl-2.0 | 1,263,489,664,059,242,500 | 36.328704 | 79 | 0.575344 | false |
programmdesign/blitzdb | blitzdb/tests/test_documents.py | 1 | 3805 | import pytest
import copy
from blitzdb import Document
import six
@pytest.fixture(scope="function")
def mockup_backend():
class Backend(object):
def __init__(self):
self.attributes = {'foo': 'bar', 'baz': 123}
def get(self, DocumentClass, pk):
return DocumentClass(copy.deepcopy(self.attributes))
return Backend()
def test_unicode():
doc = Document({'pk' : 'foo'})
if six.PY2:
assert unicode(str(doc)) == unicode(doc)
else:
assert doc.__unicode__ == doc.__str__
def test_basic_attributes():
attributes = {'foo': 'bar', 'baz': 1243, 'd': {1: 3, 4: 5}, 'l': [1, 2, 3, 4]}
doc = Document(attributes)
assert doc.foo == 'bar'
assert doc.baz == 1243
assert doc.d == {1: 3, 4: 5}
assert doc.l == [1, 2, 3, 4]
assert doc.foo == doc['foo']
assert doc.baz == doc['baz']
assert doc.d == doc['d']
assert doc.attributes == attributes
def test_iteration():
attributes = {'foo': 'bar', 'baz': 1243, 'd': {1: 3, 4: 5}, 'l': [1, 2, 3, 4]}
doc = Document(attributes)
for key in doc:
assert key in attributes
for key,value in doc.items():
assert key in attributes
assert attributes[key] == value
for value in doc.values():
assert value in attributes.values()
def test_attribute_deletion():
attributes = {'foo': 'bar', 'baz': 1243, 'd': {1: 3, 4: 5}, 'l': [1, 2, 3, 4]}
doc = Document(attributes)
del doc.foo
with pytest.raises(AttributeError):
doc.foo
with pytest.raises(KeyError):
doc['foo']
with pytest.raises(KeyError):
del doc['foo']
with pytest.raises(AttributeError):
del doc.foo
def test_lazy_attributes(mockup_backend):
def get_lazy_doc():
return Document({'pk': 1}, lazy=True, default_backend=mockup_backend)
# Fetchin of attribute by class attribute
doc = get_lazy_doc()
assert doc._lazy == True
assert doc.foo == 'bar'
assert doc._lazy == False
# Fetching of attribute by dict
doc = get_lazy_doc()
assert doc._lazy == True
assert doc['foo'] == 'bar'
assert doc._lazy == False
# Getting all attributes
doc = get_lazy_doc()
assert doc._lazy == True
attributes = doc.attributes
del attributes['pk']
assert attributes == mockup_backend.attributes
assert doc._lazy == False
# Deletion by dict
doc = get_lazy_doc()
assert doc._lazy == True
del doc['foo']
with pytest.raises(KeyError):
doc['foo']
assert doc._lazy == False
# Deletion by attribute
doc = get_lazy_doc()
assert doc._lazy == True
del doc.foo
with pytest.raises(AttributeError):
doc.foo
assert doc._lazy == False
# Update by dict
doc = get_lazy_doc()
assert doc._lazy == True
doc['foo'] = 'faz'
assert doc._lazy == False
assert doc['foo'] == 'faz'
# Update by attribute
doc = get_lazy_doc()
assert doc._lazy == True
doc.foo = 'faz'
assert doc._lazy == False
assert doc.foo == 'faz'
def test_container_operations():
attributes = {'foo': 'bar', 'baz': 1243, 'd': {1: 3, 4: 5}, 'l': [1, 2, 3, 4]}
doc = Document(attributes)
with pytest.raises(KeyError):
doc['fooz']
assert ('foo' in doc) == True
assert ('fooz' in doc) == False
assert list(doc.keys()) == list(attributes.keys())
assert list(doc.values()) == list(attributes.values())
assert doc.items() == attributes.items()
def test_different_primary_key_names():
class MyDocument(Document):
class Meta:
primary_key = 'foobar'
doc = MyDocument({'foo': 'bar', 'foobar': 1})
assert doc.pk == 1
doc.pk = 2
assert doc.attributes['foobar'] == 2
| mit | -5,519,629,868,456,842,000 | 20.256983 | 82 | 0.578187 | false |
mozaik-association/mozaik | odoo_addons/mozaik_mandate/ext_mandate.py | 1 | 18650 | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of mozaik_mandate, an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# mozaik_mandate is free software:
# you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# mozaik_mandate is distributed in the hope that it will
# be useful but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the
# GNU Affero General Public License
# along with mozaik_mandate.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.addons.mozaik_mandate.abstract_mandate import abstract_candidature
from openerp.addons.mozaik_mandate.mandate import mandate_category
CANDIDATURE_AVAILABLE_STATES = [
('draft', 'Draft'),
('declared', 'Declared'),
('rejected', 'Rejected'),
('suggested', 'Suggested'),
('elected', 'Elected'),
]
candidature_available_states = dict(CANDIDATURE_AVAILABLE_STATES)
class ext_selection_committee(orm.Model):
_name = 'ext.selection.committee'
_description = 'Selection Committee'
_inherit = ['abstract.selection.committee']
_candidature_model = 'ext.candidature'
_assembly_model = 'ext.assembly'
_assembly_category_model = 'ext.assembly.category'
_mandate_category_foreign_key = 'ext_assembly_category_id'
_form_view = 'ext_selection_committee_form_view'
_parameters_key = 'ext_candidature_invalidation_delay'
def _get_suggested_candidatures(self, cr, uid, ids, context=None):
"""
==============================
_get_suggested_candidatures
==============================
Return list of candidature ids in suggested state
:rparam: committee id
:rtype: list of ids
"""
return super(ext_selection_committee,
self)._get_suggested_candidatures(cr,
uid,
ids,
context=context)
_columns = {
'mandate_category_id': fields.many2one('mandate.category',
string='Mandate Category',
required=True,
track_visibility='onchange',
domain=[('type', '=', 'ext')]),
'is_virtual': fields.boolean('Is Virtual'),
'assembly_id': fields.many2one(_assembly_model,
string='External Assembly',
track_visibility='onchange'),
'candidature_ids': fields.one2many(_candidature_model,
'selection_committee_id',
'External Candidatures',
domain=[('active', '<=', True)],
context={'force_recompute': True}),
'assembly_category_id': fields.related(
'mandate_category_id',
_mandate_category_foreign_key,
string='External Assembly Category',
type='many2one',
relation=_assembly_category_model,
store=False),
'partner_ids': fields.many2many(
'res.partner', 'ext_selection_committee_res_partner_rel',
'committee_id', 'partner_id',
string='Members', domain=[('is_company', '=', False)]),
}
_defaults = {
'is_virtual': True,
}
_order = 'assembly_id, mandate_start_date, mandate_category_id, name'
# constraints
_unicity_keys = 'assembly_id, mandate_start_date, mandate_category_id, \
name'
# view methods: onchange, button
def action_copy(self, cr, uid, ids, context=None):
"""
==========================
action_copy
==========================
Duplicate committee and keep rejected external candidatures
:rparam: True
:rtype: boolean
"""
return super(ext_selection_committee, self).action_copy(
cr,
uid,
ids,
context=context)
def button_accept_candidatures(self, cr, uid, ids, context=None):
"""
==========================
button_accept_candidatures
==========================
This method calls the candidature workflow for each candidature_id in
order to update their state
:rparam: True
:rtype: boolean
:raise: Error if all candidatures are not in suggested state
"""
return super(ext_selection_committee,
self).button_accept_candidatures(cr,
uid,
ids,
context=context)
def button_refuse_candidatures(self, cr, uid, ids, context=None):
"""
==========================
button_refuse_candidatures
==========================
This method calls the candidature workflow for each candidature_id in
order to update their state
:rparam: True
:rtype: boolean
:raise: Error if all candidatures are not in suggested state
"""
return super(ext_selection_committee,
self).button_refuse_candidatures(cr,
uid,
ids,
context=context)
def onchange_assembly_id(self, cr, uid, ids, assembly_id, context=None):
return super(ext_selection_committee,
self).onchange_assembly_id(cr,
uid,
ids,
assembly_id,
context=None)
def process_invalidate_candidatures_after_delay(self, cr, uid,
context=None):
"""
===========================================
process_invalidate_candidatures_after_delay
===========================================
This method is used to invalidate candidatures after a defined
elapsed time
:rparam: True
:rtype: boolean
"""
return super(ext_selection_committee,
self).process_invalidate_candidatures_after_delay(
cr,
uid,
context=context)
class ext_candidature(orm.Model):
_name = 'ext.candidature'
_description = "External Candidature"
_inherit = ['abstract.candidature']
_mandate_model = 'ext.mandate'
_selection_committee_model = 'ext.selection.committee'
_init_mandate_columns = list(abstract_candidature._init_mandate_columns)
_init_mandate_columns.extend(['ext_assembly_id',
'months_before_end_of_mandate'])
_allowed_inactive_link_models = [_selection_committee_model]
_mandate_form_view = 'ext_mandate_form_view'
_unique_id_sequence = 400000000
_mandate_category_store_trigger = {
'ext.candidature': (lambda self, cr, uid, ids, context=None:
ids, ['selection_committee_id'], 20),
_selection_committee_model: (lambda self, cr, uid, ids, context=None:
self.pool.get('ext.candidature').search(
cr,
uid,
[('selection_committee_id',
'in', ids)],
context=context),
['mandate_category_id'], 20),
}
_ext_assembly_store_trigger = {
'ext.candidature': (lambda self, cr, uid, ids, context=None:
ids, ['selection_committee_id'], 20),
_selection_committee_model: (lambda self, cr, uid, ids, context=None:
self.pool.get('ext.candidature').search(
cr,
uid,
[('selection_committee_id',
'in', ids)],
context=context),
['ext_assembly_id'], 20),
}
_designation_assembly_store_trigger = {
'ext.candidature': (lambda self, cr, uid, ids, context=None:
ids, ['selection_committee_id'], 20),
_selection_committee_model: (lambda self, cr, uid, ids, context=None:
self.pool.get('ext.candidature').search(
cr,
uid,
[('selection_committee_id',
'in', ids)],
context=context),
['designation_int_assembly_id'], 20),
}
_mandate_start_date_store_trigger = {
'ext.candidature': (lambda self, cr, uid, ids, context=None:
ids, ['selection_committee_id'], 20),
_selection_committee_model: (lambda self, cr, uid, ids, context=None:
self.pool.get('ext.candidature').search(
cr,
uid,
[('selection_committee_id',
'in', ids)],
context=context),
['mandate_start_date'], 20),
}
_columns = {
'state': fields.selection(CANDIDATURE_AVAILABLE_STATES,
'Status',
readonly=True,
track_visibility='onchange',),
'selection_committee_id': fields.many2one(_selection_committee_model,
string='Selection Committee',
required=True,
select=True,
track_visibility='onchange'),
'mandate_category_id': fields.related(
'selection_committee_id',
'mandate_category_id',
string='Mandate Category',
type='many2one',
relation="mandate.category",
store=_mandate_category_store_trigger,
domain=[('type', '=', 'ext')]),
'mandate_start_date': fields.related(
'selection_committee_id',
'mandate_start_date',
string='Mandate Start Date',
type='date',
store=_mandate_start_date_store_trigger),
'ext_assembly_id': fields.related('selection_committee_id',
'assembly_id',
string='External Assembly',
type='many2one',
relation="ext.assembly",
store=_ext_assembly_store_trigger),
'designation_int_assembly_id': fields.related(
'selection_committee_id',
'designation_int_assembly_id',
string='Designation Assembly',
type='many2one',
relation="int.assembly",
store=_designation_assembly_store_trigger),
'months_before_end_of_mandate': fields.related(
'ext_assembly_id',
'months_before_end_of_mandate',
string='Alert Delay (#Months)',
type='integer',
relation="ext.assembly",
store=False),
'mandate_ids': fields.one2many(_mandate_model,
'candidature_id',
'External Mandates',
domain=[('active', '<=', True)]),
}
_order = 'ext_assembly_id, mandate_start_date, mandate_category_id, \
partner_name'
# view methods: onchange, button
def onchange_selection_committee_id(self, cr, uid, ids,
selection_committee_id, context=None):
res = {}
selection_committee = self.pool.get(
self._selection_committee_model).browse(
cr,
uid,
selection_committee_id,
context)
assembly = selection_committee.designation_int_assembly_id.id
res['value'] = dict(
ext_assembly_id=selection_committee.assembly_id.id,
designation_int_assembly_id=assembly,
mandate_category_id=selection_committee.mandate_category_id.id)
return res
def button_create_mandate(self, cr, uid, ids, context=None):
return super(ext_candidature,
self).button_create_mandate(cr, uid, ids, context=context)
class ext_mandate(orm.Model):
_name = 'ext.mandate'
_description = "External Mandate"
_inherit = ['abstract.mandate']
_allowed_inactive_link_models = ['ext.candidature']
_undo_redirect_action = 'mozaik_mandate.ext_mandate_action'
_unique_id_sequence = 400000000
_unique_id_store_trigger = {
'ext.mandate': (lambda self, cr, uid, ids, context=None:
ids, ['partner_id'], 20),
}
def _compute_unique_id(self, cr, uid, ids, fname, arg, context=None):
return super(ext_mandate,
self)._compute_unique_id(cr,
uid,
ids,
fname,
arg,
context=context)
_columns = {
'unique_id': fields.function(_compute_unique_id,
type="integer",
string="Unique id",
store=_unique_id_store_trigger),
'mandate_category_id': fields.many2one('mandate.category',
string='Mandate Category',
select=True,
required=True,
track_visibility='onchange',
domain=[('type', '=', 'ext')]),
'ext_assembly_id': fields.many2one('ext.assembly',
'External Assembly',
select=True,
required=True),
'ext_assembly_category_id': fields.related(
'mandate_category_id',
'ext_assembly_category_id',
string='External Assembly Category',
type='many2one',
relation="ext.assembly.category",
store=False),
'candidature_id': fields.many2one('ext.candidature',
'Candidature'),
'is_submission_mandate': fields.boolean(
string='With Wages Declaration',
help='Submission to a Mandates and Wages Declaration'),
'is_submission_assets': fields.boolean(
string='With Assets Declaration',
help='Submission to a Mandates and Assets Declaration'),
'competencies_m2m_ids': fields.many2many(
'thesaurus.term',
'ext_mandate_term_competencies_rel',
id1='ext_mandate_id',
id2='thesaurus_term_id',
string='Remits'),
'months_before_end_of_mandate': fields.integer(
'Alert Delay (#Months)',
track_visibility='onchange', group_operator='max'),
}
_order = 'partner_id, ext_assembly_id, start_date, mandate_category_id'
# constraints
_unicity_keys = 'partner_id, ext_assembly_id, start_date, \
mandate_category_id'
# view methods: onchange, button
def onchange_mandate_category_id(self, cr, uid, ids, mandate_category_id,
context=None):
ext_assembly_category_id = False
if mandate_category_id:
category_data = self.pool.get('mandate.category').read(
cr,
uid,
mandate_category_id,
['ext_assembly_category_id'],
context)
ext_assembly_category_id =\
category_data['ext_assembly_category_id'] or False
res = {
'ext_assembly_category_id': ext_assembly_category_id,
'ext_assembly_id': False,
}
return {
'value': res,
}
def onchange_ext_assembly_id(self, cr, uid, ids, ext_assembly_id,
context=None):
res = {}
res['value'] = dict(months_before_end_of_mandate=False,
designation_int_assembly_id=False)
if ext_assembly_id:
assembly = self.pool.get('ext.assembly').browse(cr,
uid,
ext_assembly_id)
months_before_end_of_mandate = \
assembly.months_before_end_of_mandate
designation_int_assembly_id = \
assembly.designation_int_assembly_id.id
res['value'] = dict(
months_before_end_of_mandate=months_before_end_of_mandate,
designation_int_assembly_id=designation_int_assembly_id)
return res
| agpl-3.0 | -946,782,091,890,759,300 | 41.099323 | 79 | 0.467239 | false |
jaeilepp/mne-python | mne/time_frequency/_stockwell.py | 2 | 10181 | # Authors : Denis A. Engemann <[email protected]>
# Alexandre Gramfort <[email protected]>
#
# License : BSD 3-clause
from copy import deepcopy
import math
import numpy as np
from scipy import fftpack
# XXX explore cuda optimazation at some point.
from ..io.pick import pick_types, pick_info
from ..utils import verbose, warn
from ..parallel import parallel_func, check_n_jobs
from .tfr import AverageTFR, _get_data
def _check_input_st(x_in, n_fft):
"""Aux function."""
# flatten to 2 D and memorize original shape
n_times = x_in.shape[-1]
def _is_power_of_two(n):
return not (n > 0 and ((n & (n - 1))))
if n_fft is None or (not _is_power_of_two(n_fft) and n_times > n_fft):
# Compute next power of 2
n_fft = 2 ** int(math.ceil(math.log(n_times, 2)))
elif n_fft < n_times:
raise ValueError("n_fft cannot be smaller than signal size. "
"Got %s < %s." % (n_fft, n_times))
if n_times < n_fft:
warn('The input signal is shorter ({0}) than "n_fft" ({1}). '
'Applying zero padding.'.format(x_in.shape[-1], n_fft))
zero_pad = n_fft - n_times
pad_array = np.zeros(x_in.shape[:-1] + (zero_pad,), x_in.dtype)
x_in = np.concatenate((x_in, pad_array), axis=-1)
else:
zero_pad = 0
return x_in, n_fft, zero_pad
def _precompute_st_windows(n_samp, start_f, stop_f, sfreq, width):
"""Precompute stockwell gausian windows (in the freq domain)."""
tw = fftpack.fftfreq(n_samp, 1. / sfreq) / n_samp
tw = np.r_[tw[:1], tw[1:][::-1]]
k = width # 1 for classical stowckwell transform
f_range = np.arange(start_f, stop_f, 1)
windows = np.empty((len(f_range), len(tw)), dtype=np.complex)
for i_f, f in enumerate(f_range):
if f == 0.:
window = np.ones(len(tw))
else:
window = ((f / (np.sqrt(2. * np.pi) * k)) *
np.exp(-0.5 * (1. / k ** 2.) * (f ** 2.) * tw ** 2.))
window /= window.sum() # normalisation
windows[i_f] = fftpack.fft(window)
return windows
def _st(x, start_f, windows):
"""Compute ST based on Ali Moukadem MATLAB code (used in tests)."""
n_samp = x.shape[-1]
ST = np.empty(x.shape[:-1] + (len(windows), n_samp), dtype=np.complex)
# do the work
Fx = fftpack.fft(x)
XF = np.concatenate([Fx, Fx], axis=-1)
for i_f, window in enumerate(windows):
f = start_f + i_f
ST[..., i_f, :] = fftpack.ifft(XF[..., f:f + n_samp] * window)
return ST
def _st_power_itc(x, start_f, compute_itc, zero_pad, decim, W):
"""Aux function."""
n_samp = x.shape[-1]
n_out = (n_samp - zero_pad)
n_out = n_out // decim + bool(n_out % decim)
psd = np.empty((len(W), n_out))
itc = np.empty_like(psd) if compute_itc else None
X = fftpack.fft(x)
XX = np.concatenate([X, X], axis=-1)
for i_f, window in enumerate(W):
f = start_f + i_f
ST = fftpack.ifft(XX[:, f:f + n_samp] * window)
if zero_pad > 0:
TFR = ST[:, :-zero_pad:decim]
else:
TFR = ST[:, ::decim]
TFR_abs = np.abs(TFR)
TFR_abs[TFR_abs == 0] = 1.
if compute_itc:
TFR /= TFR_abs
itc[i_f] = np.abs(np.mean(TFR, axis=0))
TFR_abs *= TFR_abs
psd[i_f] = np.mean(TFR_abs, axis=0)
return psd, itc
def tfr_array_stockwell(data, sfreq, fmin=None, fmax=None, n_fft=None,
width=1.0, decim=1, return_itc=False, n_jobs=1):
"""Compute power and intertrial coherence using Stockwell (S) transform.
See [1]_, [2]_, [3]_, [4]_ for more information.
Parameters
----------
data : ndarray
The signal to transform. Any dimensionality supported as long
as the last dimension is time.
sfreq : float
The sampling frequency.
fmin : None, float
The minimum frequency to include. If None defaults to the minimum fft
frequency greater than zero.
fmax : None, float
The maximum frequency to include. If None defaults to the maximum fft.
n_fft : int | None
The length of the windows used for FFT. If None, it defaults to the
next power of 2 larger than the signal length.
width : float
The width of the Gaussian window. If < 1, increased temporal
resolution, if > 1, increased frequency resolution. Defaults to 1.
(classical S-Transform).
decim : int
The decimation factor on the time axis. To reduce memory usage.
return_itc : bool
Return intertrial coherence (ITC) as well as averaged power.
n_jobs : int
Number of parallel jobs to use.
Returns
-------
st_power : ndarray
The multitaper power of the Stockwell transformed data.
The last two dimensions are frequency and time.
itc : ndarray
The intertrial coherence. Only returned if return_itc is True.
freqs : ndarray
The frequencies.
References
----------
.. [1] Stockwell, R. G. "Why use the S-transform." AMS Pseudo-differential
operators: Partial differential equations and time-frequency
analysis 52 (2007): 279-309.
.. [2] Moukadem, A., Bouguila, Z., Abdeslam, D. O, and Dieterlen, A.
Stockwell transform optimization applied on the detection of split in
heart sounds (2014). Signal Processing Conference (EUSIPCO), 2013
Proceedings of the 22nd European, pages 2015--2019.
.. [3] Wheat, K., Cornelissen, P. L., Frost, S.J, and Peter C. Hansen
(2010). During Visual Word Recognition, Phonology Is Accessed
within 100 ms and May Be Mediated by a Speech Production
Code: Evidence from Magnetoencephalography. The Journal of
Neuroscience, 30 (15), 5229-5233.
.. [4] K. A. Jones and B. Porjesz and D. Chorlian and M. Rangaswamy and C.
Kamarajan and A. Padmanabhapillai and A. Stimus and H. Begleiter
(2006). S-transform time-frequency analysis of P300 reveals deficits in
individuals diagnosed with alcoholism.
Clinical Neurophysiology 117 2128--2143
See Also
--------
mne.time_frequency.tfr_stockwell
mne.time_frequency.tfr_multitaper
mne.time_frequency.tfr_array_multitaper
mne.time_frequency.tfr_morlet
mne.time_frequency.tfr_array_morlet
"""
n_epochs, n_channels = data.shape[:2]
n_out = data.shape[2] // decim + bool(data.shape[2] % decim)
data, n_fft_, zero_pad = _check_input_st(data, n_fft)
freqs = fftpack.fftfreq(n_fft_, 1. / sfreq)
if fmin is None:
fmin = freqs[freqs > 0][0]
if fmax is None:
fmax = freqs.max()
start_f = np.abs(freqs - fmin).argmin()
stop_f = np.abs(freqs - fmax).argmin()
freqs = freqs[start_f:stop_f]
W = _precompute_st_windows(data.shape[-1], start_f, stop_f, sfreq, width)
n_freq = stop_f - start_f
psd = np.empty((n_channels, n_freq, n_out))
itc = np.empty((n_channels, n_freq, n_out)) if return_itc else None
parallel, my_st, _ = parallel_func(_st_power_itc, n_jobs)
tfrs = parallel(my_st(data[:, c, :], start_f, return_itc, zero_pad,
decim, W)
for c in range(n_channels))
for c, (this_psd, this_itc) in enumerate(iter(tfrs)):
psd[c] = this_psd
if this_itc is not None:
itc[c] = this_itc
return psd, itc, freqs
@verbose
def tfr_stockwell(inst, fmin=None, fmax=None, n_fft=None,
width=1.0, decim=1, return_itc=False, n_jobs=1,
verbose=None):
"""Time-Frequency Representation (TFR) using Stockwell Transform.
Parameters
----------
inst : Epochs | Evoked
The epochs or evoked object.
fmin : None, float
The minimum frequency to include. If None defaults to the minimum fft
frequency greater than zero.
fmax : None, float
The maximum frequency to include. If None defaults to the maximum fft.
n_fft : int | None
The length of the windows used for FFT. If None, it defaults to the
next power of 2 larger than the signal length.
width : float
The width of the Gaussian window. If < 1, increased temporal
resolution, if > 1, increased frequency resolution. Defaults to 1.
(classical S-Transform).
decim : int
The decimation factor on the time axis. To reduce memory usage.
return_itc : bool
Return intertrial coherence (ITC) as well as averaged power.
n_jobs : int
The number of jobs to run in parallel (over channels).
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
power : AverageTFR
The averaged power.
itc : AverageTFR
The intertrial coherence. Only returned if return_itc is True.
See Also
--------
mne.time_frequency.tfr_array_stockwell
mne.time_frequency.tfr_multitaper
mne.time_frequency.tfr_array_multitaper
mne.time_frequency.tfr_morlet
mne.time_frequency.tfr_array_morlet
Notes
-----
.. versionadded:: 0.9.0
"""
# verbose dec is used b/c subfunctions are verbose
data = _get_data(inst, return_itc)
picks = pick_types(inst.info, meg=True, eeg=True)
info = pick_info(inst.info, picks)
data = data[:, picks, :]
n_jobs = check_n_jobs(n_jobs)
power, itc, freqs = tfr_array_stockwell(data, sfreq=info['sfreq'],
fmin=fmin, fmax=fmax, n_fft=n_fft,
width=width, decim=decim,
return_itc=return_itc,
n_jobs=n_jobs)
times = inst.times[::decim].copy()
nave = len(data)
out = AverageTFR(info, power, times, freqs, nave, method='stockwell-power')
if return_itc:
out = (out, AverageTFR(deepcopy(info), itc, times.copy(),
freqs.copy(), nave, method='stockwell-itc'))
return out
| bsd-3-clause | 8,823,945,369,712,957,000 | 36.707407 | 79 | 0.59611 | false |
PHSCRC/boxbot | hardware/melexis.py | 1 | 1398 | #!/usr/bin/env python3
from adafruit.i2c import Adafruit_I2C
try:
from .component import *
except SystemError as err:
from component import *
class Melexis(LoopedComponent, I2CComponent):
_mswait = 50
_FN = "temp"
def init(self, fahrenheit=False):
super().init()
self._i2c = Adafruit_I2C(0x5A)
self.mode = fahrenheit
self._set_init()
def readAmbient(self):
return self._readTemp(0x06)
def readObject(self):
return self._readTemp(0x07)
def readObject2(self):
return self._readTemp(0x08)
def getDifference(self):
"""Returns how much warmer the object is than the ambient
temperature."""
return self.readObject() - self.readAmbient()
def _readTemp(self, reg):
temp = self._i2c.readS16(reg)
temp = temp * .02 - 273.15
if self.mode:
return (temp * 9 / 5) + 32
else:
return temp
def tick(self):
self.writedata((self.readObject(),
self.readAmbient()))
if __name__ == "__main__":
sensor = Melexis(fahrenheit=True)
import time
with sensor:
while True:
print("Object: {}ºF ({}ºF warmer than ambient)".format(
round(sensor.readObject(), 3),
round(sensor.getDifference(), 3)))
time.sleep(0.5)
| mit | -4,064,561,164,616,934,000 | 24.851852 | 67 | 0.563754 | false |
naturali/tensorflow | tensorflow/contrib/rnn/python/ops/gru_ops.py | 1 | 5673 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrapper for the Block GRU Op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import load_library
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import resource_loader
_gru_ops_so = load_library.load_op_library(
resource_loader.get_path_to_datafile("_gru_ops.so"))
assert _gru_ops_so, "Could not load _gru_ops.so."
ops.RegisterShape("GRUBlockCellGrad")(common_shapes.call_cpp_shape_fn)
@ops.RegisterGradient("GRUBlockCell")
def _GRUBlockCellGrad(op, *grad):
r"""Gradient for GRUBlockCell.
Args:
op: Op for which the gradient is defined.
*grad: Gradients of the optimization function wrt output
for the Op.
Returns:
d_x: Gradients wrt to x
d_h: Gradients wrt to h
d_w_ru: Gradients wrt to w_ru
d_w_c: Gradients wrt to w_c
d_b_ru: Gradients wrt to b_ru
d_b_c: Gradients wrt to b_c
Mathematics behind the Gradients below:
```
d_c_bar = d_h \circ (1-u) \circ (1-c \circ c)
d_u_bar = d_h \circ (h-c) \circ u \circ (1-u)
d_r_bar_u_bar = [d_r_bar d_u_bar]
[d_x_component_1 d_h_prev_component_1] = d_r_bar_u_bar * w_ru^T
[d_x_component_2 d_h_prevr] = d_c_bar * w_c^T
d_x = d_x_component_1 + d_x_component_2
d_h_prev = d_h_prev_component_1 + d_h_prevr \circ r + u
```
Below calculation is performed in the python wrapper for the Gradients
(not in the gradient kernel.)
```
d_w_ru = x_h_prevr^T * d_c_bar
d_w_c = x_h_prev^T * d_r_bar_u_bar
d_b_ru = sum of d_r_bar_u_bar along axis = 0
d_b_c = sum of d_c_bar along axis = 0
```
"""
x, h_prev, w_ru, w_c, b_ru, b_c = op.inputs
r, u, c, _ = op.outputs
_, _, _, d_h = grad
d_x, d_h_prev, d_c_bar, d_r_bar_u_bar = _gru_ops_so.gru_block_cell_grad(
x, h_prev, w_ru, w_c, b_ru, b_c, r, u, c, d_h)
x_h_prev = array_ops.concat(1, [x, h_prev])
d_w_ru = math_ops.matmul(x_h_prev, d_r_bar_u_bar, transpose_a=True)
d_b_ru = nn_ops.bias_add_grad(d_r_bar_u_bar)
x_h_prevr = array_ops.concat(1, [x, h_prev * r])
d_w_c = math_ops.matmul(x_h_prevr, d_c_bar, transpose_a=True)
d_b_c = nn_ops.bias_add_grad(d_c_bar)
return d_x, d_h_prev, d_w_ru, d_w_c, d_b_ru, d_b_c
ops.RegisterShape("GRUBlockCell")(common_shapes.call_cpp_shape_fn)
class GRUBlockCell(rnn_cell.RNNCell):
r"""Block GRU cell implementation.
The implementation is based on: http://arxiv.org/abs/1406.1078
Computes the LSTM cell forward propagation for 1 time step.
This kernel op implements the following mathematical equations:
Baises are initialized with :
`b_ru` - constant_initializer(1.0)
`b_c` - constant_initializer(0.0)
```
x_h_prev = [x, h_prev]
[r_bar u_bar] = x_h_prev * w_ru + b_ru
r = sigmoid(r_bar)
u = sigmoid(u_bar)
h_prevr = h_prev \circ r
x_h_prevr = [x h_prevr]
c_bar = x_h_prevr * w_c + b_c
c = tanh(c_bar)
h = (1-u) \circ c + u \circ h_prev
```
"""
def __init__(self, cell_size):
"""Initialize the Block GRU cell.
Args:
cell_size: int, GRU cell size.
"""
self._cell_size = cell_size
@property
def state_size(self):
return self._cell_size
@property
def output_size(self):
return self._cell_size
def __call__(self, x, h_prev, scope=None):
"""GRU cell."""
with vs.variable_scope(scope or type(self).__name__):
input_size = x.get_shape().with_rank(2)[1]
# Check if the input size exist.
if input_size is None:
raise ValueError("Expecting input_size to be set.")
# Check cell_size == state_size from h_prev.
cell_size = h_prev.get_shape().with_rank(2)[1]
if cell_size != self._cell_size:
raise ValueError("Shape of h_prev[1] incorrect: cell_size %i vs %s" %
(self._cell_size, cell_size))
if cell_size is None:
raise ValueError("cell_size from `h_prev` should not be None.")
w_ru = vs.get_variable("w_ru", [input_size + self._cell_size,
self._cell_size * 2])
b_ru = vs.get_variable(
"b_ru", [self._cell_size * 2],
initializer=init_ops.constant_initializer(1.0))
w_c = vs.get_variable("w_c",
[input_size + self._cell_size, self._cell_size])
b_c = vs.get_variable(
"b_c", [self._cell_size],
initializer=init_ops.constant_initializer(0.0))
_gru_block_cell = _gru_ops_so.gru_block_cell # pylint: disable=invalid-name
_, _, _, new_h = _gru_block_cell(
x=x, h_prev=h_prev, w_ru=w_ru, w_c=w_c, b_ru=b_ru, b_c=b_c)
return new_h, new_h
| apache-2.0 | -2,168,683,449,094,958,300 | 29.831522 | 82 | 0.628415 | false |
gclenaghan/SIEVE | data/generate_data_files.py | 1 | 2231 | """
Generate data files and TOC file for the visualization.
Depends on vtn_sieve and pysieve (vtn_sieve is a private repository that cannot be shared)
See file specification at file_spec.md
"""
from pysieve import substbased
from seqdistance.matrices import binarySubst, addGapScores, binGapScores
from vtn_sieve import *
import pandas as pd
def generateData(studyClasses, analysisClasses, analysisParams, nperms=10000):
for sc in studyClasses:
"""For each study, loop over all analyses and produce files for each."""
s = sc()
for va in s.validAnalyses:
s.loadData(**va)
s.to_fasta()
s.to_treatment_file()
for ac in analysisClasses:
a = ac(sievedata=s.data)
a.initialize(params=analysisParams)
a.computeDistance(params=analysisParams)
a.computeObserved(distFilter=None)
a.permutationTest(nperms, clusterClient=None)
a.computePvalues()
a.to_distance_csv()
a.to_csv()
def generateTOC(studyClasses, analysisClasses):
tocColumns = ['study','protein','reference','distance_method']
toc = {k:[] for k in tocColumns}
for sc in studyClasses:
"""For each study, loop over all analyses and produce files for each."""
s = sc()
for va in s.validAnalyses:
for ac in analysisClasses:
a = ac(None)
toc['study'].append(s.studyName)
toc['protein'].append(va['proteinName'])
toc['reference'].append(va['insertName'])
toc['distance_method'].append(a.methodName)
tocDf = pd.DataFrame(toc)[tocColumns]
return tocDf
if __name__ == '__main__':
#studyClasses = [sieveVTN502, sieveVTN503, sieveVTN505, sieveRV144]
studyClasses = [sieveVTN502, sieveRV144]
analysisClasses = [substbased.vxmatch_siteAnalysis]
analysisParams = dict(subst=addGapScores(binarySubst, binGapScores))
#generateData(studyClasses, analysisClasses, analysisParams)
tocDf = generateTOC(studyClasses, analysisClasses)
tocDf.to_csv('sieve_toc.csv', index=False)
>>>>>>> 5174f991c7ddfc922307ecc69d71094e4f2d4787
| mit | 5,045,476,629,932,464,000 | 38.140351 | 90 | 0.645002 | false |
cheetah90/PokemonGoScraper | pogom/utils.py | 1 | 14592 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import configargparse
import uuid
import os
import json
from datetime import datetime, timedelta
import logging
import shutil
import requests
import platform
from . import config
log = logging.getLogger(__name__)
def parse_unicode(bytestring):
decoded_string = bytestring.decode(sys.getfilesystemencoding())
return decoded_string
def verify_config_file_exists(filename):
fullpath = os.path.join(os.path.dirname(__file__), filename)
if not os.path.exists(fullpath):
log.info('Could not find %s, copying default', filename)
shutil.copy2(fullpath + '.example', fullpath)
def get_args():
# fuck PEP8
configpath = os.path.join(os.path.dirname(__file__), '../config/config.ini')
parser = configargparse.ArgParser(default_config_files=[configpath])
parser.add_argument('-minx', '--minx', type=float, help='Minimum X coordinate')
parser.add_argument('-miny', '--miny', type=float, help='Minimum Y coordinate')
parser.add_argument('-maxx', '--maxx', type=float, help='Maximum X coordinate')
parser.add_argument('-maxy', '--maxy', type=float, help='Maximum Y coordinate')
parser.add_argument('-a', '--auth-service', type=str.lower, action='append',
help='Auth Services, either one for all accounts or one per account. \
ptc or google. Defaults all to ptc.')
parser.add_argument('-u', '--username', action='append',
help='Usernames, one per account.')
parser.add_argument('-p', '--password', action='append',
help='Passwords, either single one for all accounts or one per account.')
parser.add_argument('-l', '--location', type=parse_unicode,
help='Location, can be an address or coordinates', default= "foo")
parser.add_argument('-st', '--step-limit', help='Steps', type=int,
default=12)
parser.add_argument('-sd', '--scan-delay',
help='Time delay between requests in scan threads',
type=float, default=10)
parser.add_argument('-ld', '--login-delay',
help='Time delay between each login attempt',
type=float, default=5)
parser.add_argument('-lr', '--login-retries',
help='Number of logins attempts before refreshing a thread',
type=int, default=3)
parser.add_argument('-sr', '--scan-retries',
help='Number of retries for a given scan cell',
type=int, default=5)
parser.add_argument('-dc', '--display-in-console',
help='Display Found Pokemon in Console',
action='store_true', default=False)
parser.add_argument('-H', '--host', help='Set web server listening host',
default='127.0.0.1')
parser.add_argument('-P', '--port', type=int,
help='Set web server listening port', default=5000)
parser.add_argument('-L', '--locale',
help='Locale for Pokemon names (default: {},\
check {} for more)'.
format(config['LOCALE'], config['LOCALES_DIR']), default='en')
parser.add_argument('-c', '--china',
help='Coordinates transformer for China',
action='store_true')
parser.add_argument('-d', '--debug', help='Debug Mode', action='store_true')
parser.add_argument('-m', '--mock',
help='Mock mode. Starts the web server but not the background thread.',
action='store_true', default=False)
parser.add_argument('-ns', '--no-server',
help='No-Server Mode. Starts the searcher but not the Webserver.',
action='store_true', default=False)
parser.add_argument('-os', '--only-server',
help='Server-Only Mode. Starts only the Webserver without the searcher.',
action='store_true', default=False)
parser.add_argument('-nsc', '--no-search-control',
help='Disables search control',
action='store_false', dest='search_control', default=True)
parser.add_argument('-fl', '--fixed-location',
help='Hides the search bar for use in shared maps.',
action='store_true', default=False)
parser.add_argument('-k', '--gmaps-key',
help='Google Maps Javascript API Key',
required=True)
parser.add_argument('-C', '--cors', help='Enable CORS on web server',
action='store_true', default=False)
parser.add_argument('-D', '--db', help='Database filename',
default='pogom.db')
parser.add_argument('-cd', '--clear-db',
help='Deletes the existing database before starting the Webserver.',
action='store_true', default=False)
parser.add_argument('-np', '--no-pokemon',
help='Disables Pokemon from the map (including parsing them into local db)',
action='store_true', default=False)
parser.add_argument('-ng', '--no-gyms',
help='Disables Gyms from the map (including parsing them into local db)',
action='store_true', default=False)
parser.add_argument('-nk', '--no-pokestops',
help='Disables PokeStops from the map (including parsing them into local db)',
action='store_true', default=False)
parser.add_argument('-pd', '--purge-data',
help='Clear pokemon from database this many hours after they disappear \
(0 to disable)', type=int, default=0)
parser.add_argument('-px', '--proxy', help='Proxy url (e.g. socks5://127.0.0.1:9050)')
parser.add_argument('--db-type', help='Type of database to be used (default: sqlite)',
default='sqlite')
parser.add_argument('--db-name', help='Name of the database to be used')
parser.add_argument('--db-user', help='Username for the database')
parser.add_argument('--db-pass', help='Password for the database')
parser.add_argument('--db-host', help='IP or hostname for the database')
parser.add_argument('--db-port', help='Port for the database', type=int, default=3306)
parser.add_argument('--db-max_connections', help='Max connections (per thread) for the database',
type=int, default=5)
parser.add_argument('-wh', '--webhook', help='Define URL(s) to POST webhook information to',
nargs='*', default=False, dest='webhooks')
parser.set_defaults(DEBUG=False)
args = parser.parse_args()
if args.only_server:
if args.location is None:
parser.print_usage()
print(sys.argv[0] + ": error: arguments -l/--location is required")
sys.exit(1)
else:
errors = []
num_auths = 1
num_usernames = 0
num_passwords = 0
if (args.username is None):
errors.append('Missing `username` either as -u/--username or in config')
else:
num_usernames = len(args.username)
if (args.location is None):
errors.append('Missing `location` either as -l/--location or in config')
if (args.password is None):
errors.append('Missing `password` either as -p/--password or in config')
else:
num_passwords = len(args.password)
if (args.step_limit is None):
errors.append('Missing `step_limit` either as -st/--step-limit or in config')
if args.auth_service is None:
args.auth_service = ['ptc']
else:
num_auths = len(args.auth_service)
if num_usernames > 1:
if num_passwords > 1 and num_usernames != num_passwords:
errors.append('The number of provided passwords ({}) must match the username count ({})'.format(num_passwords, num_usernames))
if num_auths > 1 and num_usernames != num_auths:
errors.append('The number of provided auth ({}) must match the username count ({})'.format(num_auths, num_usernames))
if len(errors) > 0:
parser.print_usage()
print(sys.argv[0] + ": errors: \n - " + "\n - ".join(errors))
sys.exit(1)
# Fill the pass/auth if set to a single value
if num_passwords == 1:
args.password = [args.password[0]] * num_usernames
if num_auths == 1:
args.auth_service = [args.auth_service[0]] * num_usernames
# Make our accounts list
args.accounts = []
# Make the accounts list
for i, username in enumerate(args.username):
args.accounts.append({'username': username, 'password': args.password[i], 'auth_service': args.auth_service[i]})
return args
def insert_mock_data(position):
num_pokemon = 6
num_pokestop = 6
num_gym = 6
log.info('Creating fake: %d pokemon, %d pokestops, %d gyms',
num_pokemon, num_pokestop, num_gym)
from .models import Pokemon, Pokestop, Gym
from .search import generate_location_steps
latitude, longitude = float(position[0]), float(position[1])
locations = [l for l in generate_location_steps((latitude, longitude), num_pokemon)]
disappear_time = datetime.now() + timedelta(hours=1)
detect_time = datetime.now()
for i in range(1, num_pokemon):
Pokemon.create(encounter_id=uuid.uuid4(),
spawnpoint_id='sp{}'.format(i),
pokemon_id=(i + 1) % 150,
latitude=locations[i][0],
longitude=locations[i][1],
disappear_time=disappear_time,
detect_time=detect_time)
for i in range(1, num_pokestop):
Pokestop.create(pokestop_id=uuid.uuid4(),
enabled=True,
latitude=locations[i + num_pokemon][0],
longitude=locations[i + num_pokemon][1],
last_modified=datetime.now(),
# Every other pokestop be lured
lure_expiration=disappear_time if (i % 2 == 0) else None,
)
for i in range(1, num_gym):
Gym.create(gym_id=uuid.uuid4(),
team_id=i % 3,
guard_pokemon_id=(i + 1) % 150,
latitude=locations[i + num_pokemon + num_pokestop][0],
longitude=locations[i + num_pokemon + num_pokestop][1],
last_modified=datetime.now(),
enabled=True,
gym_points=1000
)
def i8ln(word):
if config['LOCALE'] == "en":
return word
if not hasattr(i8ln, 'dictionary'):
file_path = os.path.join(
config['ROOT_PATH'],
config['LOCALES_DIR'],
'{}.min.json'.format(config['LOCALE']))
if os.path.isfile(file_path):
with open(file_path, 'r') as f:
i8ln.dictionary = json.loads(f.read())
else:
log.warning('Skipping translations - Unable to find locale file: %s', file_path)
return word
if word in i8ln.dictionary:
return i8ln.dictionary[word]
else:
log.debug('Unable to find translation for "%s" in locale %s!', word, config['LOCALE'])
return word
def get_pokemon_data(pokemon_id):
if not hasattr(get_pokemon_data, 'pokemon'):
file_path = os.path.join(
config['ROOT_PATH'],
config['DATA_DIR'],
'pokemon.min.json')
with open(file_path, 'r') as f:
get_pokemon_data.pokemon = json.loads(f.read())
return get_pokemon_data.pokemon[str(pokemon_id)]
def get_pokemon_name(pokemon_id):
return i8ln(get_pokemon_data(pokemon_id)['name'])
def get_pokemon_rarity(pokemon_id):
return i8ln(get_pokemon_data(pokemon_id)['rarity'])
def get_pokemon_types(pokemon_id):
pokemon_types = get_pokemon_data(pokemon_id)['types']
return map(lambda x: {"type": i8ln(x['type']), "color": x['color']}, pokemon_types)
def send_to_webhook(message_type, message):
args = get_args()
data = {
'type': message_type,
'message': message
}
if args.webhooks:
webhooks = args.webhooks
for w in webhooks:
try:
requests.post(w, json=data, timeout=(None, 1))
except requests.exceptions.ReadTimeout:
log.debug('Response timeout on webhook endpoint %s', w)
except requests.exceptions.RequestException as e:
log.debug(e)
def get_encryption_lib_path():
# win32 doesn't mean necessarily 32 bits
if sys.platform == "win32" or sys.platform == "cygwin":
if platform.architecture()[0] == '64bit':
lib_name = "encrypt64bit.dll"
else:
lib_name = "encrypt32bit.dll"
elif sys.platform == "darwin":
lib_name = "libencrypt-osx-64.so"
elif os.uname()[4].startswith("arm") and platform.architecture()[0] == '32bit':
lib_name = "libencrypt-linux-arm-32.so"
elif os.uname()[4].startswith("aarch64") and platform.architecture()[0] == '64bit':
lib_name = "libencrypt-linux-arm-64.so"
elif sys.platform.startswith('linux'):
if "centos" in platform.platform():
if platform.architecture()[0] == '64bit':
lib_name = "libencrypt-centos-x86-64.so"
else:
lib_name = "libencrypt-linux-x86-32.so"
else:
if platform.architecture()[0] == '64bit':
lib_name = "libencrypt-linux-x86-64.so"
else:
lib_name = "libencrypt-linux-x86-32.so"
elif sys.platform.startswith('freebsd'):
lib_name = "libencrypt-freebsd-64.so"
else:
err = "Unexpected/unsupported platform '{}'".format(sys.platform)
log.error(err)
raise Exception(err)
lib_path = os.path.join(os.path.dirname(__file__), "libencrypt", lib_name)
if not os.path.isfile(lib_path):
err = "Could not find {} encryption library {}".format(sys.platform, lib_path)
log.error(err)
raise Exception(err)
return lib_path
| agpl-3.0 | 1,059,105,654,188,054,000 | 40.691429 | 142 | 0.566269 | false |
RobotGame/rgserver | matchmaker/sandbox.py | 1 | 3717 | import ast
import imp
import limitexec as le
import os
import pkg_resources
import pwd
import random
import resource
import sys
import time
import traceback
from rgkit.settings import settings
def load_map():
map_filename = pkg_resources.resource_filename(
'rgkit', 'maps/default.py')
map_data = ast.literal_eval(open(map_filename).read())
settings.init_map(map_data)
def proxy_process_routine(user_code, queue_in, queue_out, queue_output):
start_time = time.time()
pid = os.getpid()
queue_output.put('Starting {} at {}.\n'.format(pid, start_time))
class Logger:
def write(self, data):
queue_output.put(data)
def flush(self):
pass
# Cannot use sys as drop_privileges will disable it
out = sys.stdout = sys.stderr = Logger()
trace_func = traceback.print_exc
exit_func = os._exit
try:
def limit_resources():
MEM_LIMIT = (2 ** 20) * 1024 # MB
for rsrc in ('DATA', 'RSS', 'AS'):
resource.setrlimit(
getattr(
resource,
'RLIMIT_' + rsrc),
(MEM_LIMIT,
MEM_LIMIT))
resource.setrlimit(resource.RLIMIT_NPROC, (10, 10))
def disable_modules(*blacklist):
'''Always disable sys.'''
def disable_mod(mod):
sys.modules[mod] = None
globals()[mod] = None
pass
for mod_name in blacklist:
disable_mod(mod_name)
disable_mod('sys')
# counting on iptables to restrict network access for `nobody`
def drop_privileges(uid_name='nobody'):
uid = pwd.getpwnam(uid_name).pw_uid
# limit_resources()
os.chroot('jail')
os.chdir('jail')
os.setgroups([])
os.umask(0)
os.setuid(uid)
os.nice(5) # Lower priority
disable_modules(
'ctypes',
'imp',
'inspect',
'multiprocessing',
'os',
'pdb',
'posix',
'dbcon')
# No sleeping!
time.sleep = lambda s: 0
def make_user_robot(code, mod):
try:
exec code in mod.__dict__
except:
trace_func(file=out)
return None
finally:
cmp_time = time.time()
out.write(
'Compilation: {0:.4g}s\n'.format(cmp_time - start_time))
if 'Robot' in mod.__dict__:
bot = mod.__dict__['Robot']()
ini_time = time.time()
out.write(
'Initialization: {0:.4g}s\n'.format(
ini_time - cmp_time))
return bot
for data in iter(queue_in.get, None):
if 'query' in data:
load_map()
mod = imp.new_module('usercode')
drop_privileges()
robot = make_user_robot(user_code, mod)
if robot is not None:
queue_out.put({'result': 'ok'})
else:
queue_out.put({'result': 'failed'})
else:
robot.__dict__.update(data['properties'])
random.seed(data['game'].seed)
with le.time_limit(data['timelimit']):
action = robot.act(data['game'])
queue_out.put({'result': 'ok', 'ret': action})
except:
trace_func(file=out)
exit_func(0)
| mit | -2,026,913,588,068,066,000 | 28.736 | 76 | 0.470541 | false |
jschultz/nvivotools | NVPX2RQDA.py | 1 | 5482 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016 Jonathan Schultz
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import os
import subprocess
import re
import sys
# First set up environment for SQL Anywhere server and restart process if necessary
helperpath = os.path.dirname(os.path.realpath(__file__)) + os.path.sep + 'helpers' + os.path.sep
# Set environment variables for SQL Anywhere server
if not os.environ.get('_sqlanywhere'):
envlines = subprocess.check_output(helperpath + 'sqlanyenv.sh').splitlines()
for envline in envlines:
env = re.match(r"(?P<name>\w+)=(?P<quote>['\"]?)(?P<value>.+)(?P=quote)", envline).groupdict()
os.environ[env['name']] = env['value']
os.environ['_sqlanywhere'] = 'TRUE'
os.execv(sys.argv[0], sys.argv)
# Environment is now ready
import argparse
import NVivo
import RQDA
import shutil
import tempfile
parser = argparse.ArgumentParser(description='Convert an NVivo for Mac (.nvpx) file into an RQDA project.')
# --cmdline argument means retain full output file path name, otherwise strip directory,
# so that things work under Wooey.
parser.add_argument('--cmdline', action='store_true',
help=argparse.SUPPRESS)
parser.add_argument('-v', '--verbosity', type=int, default=1)
parser.add_argument('-nv', '--nvivoversion', choices=["10", "11"], default="10",
help='NVivo version (10 or 11)')
parser.add_argument('-u', '--users', choices=["skip", "overwrite"], default="overwrite",
help='User action.')
parser.add_argument('-p', '--project', choices=["skip", "overwrite"], default="overwrite",
help='Project action.')
parser.add_argument('-nc', '--node-categories', choices=["skip", "overwrite"], default="overwrite",
help='Node category action.')
parser.add_argument('-n', '--nodes', choices=["skip", "overwrite"], default="overwrite",
help='Node action.')
parser.add_argument('-c', '--cases', choices=["skip", "overwrite"], default="overwrite",
help='case action.')
parser.add_argument('-ca', '--node-attributes', choices=["skip", "overwrite"], default="overwrite",
help='Case attribute table action.')
parser.add_argument('-sc', '--source-categories', choices=["skip", "overwrite"], default="overwrite",
help='Source category action.')
parser.add_argument('-s', '--sources', choices=["skip", "overwrite"], default="overwrite",
help='Source action.')
parser.add_argument('-sa', '--source-attributes', choices=["skip", "overwrite"], default="overwrite",
help='Source attribute action.')
parser.add_argument('-t', '--taggings', choices=["skip", "overwrite"], default="overwrite",
help='Tagging action.')
parser.add_argument('-a', '--annotations', choices=["skip", "overwrite"], default="overwrite",
help='Annotation action.')
parser.add_argument('infile', type=argparse.FileType('rb'),
help="Input NVivo for Mac file (extension .nvpx)")
parser.add_argument('outfilename', type=str, nargs='?',
help="Output RQDA file")
args = parser.parse_args()
# Fill in extra arguments that NVivo module expects
args.mac = True
args.windows = False
tmpinfilename = tempfile.mktemp()
tmpinfileptr = file(tmpinfilename, 'wb')
tmpinfileptr.write(args.infile.read())
args.infile.close()
tmpinfileptr.close()
if args.outfilename is None:
args.outfilename = args.infile.name.rsplit('.',1)[0] + '.rqda'
tmpnormfilename = tempfile.mktemp()
# Find a free sock for SQL Anywhere server to bind to
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("",0))
freeport = str(s.getsockname()[1])
s.close()
DEVNULL = open(os.devnull, 'wb')
dbproc = subprocess.Popen(['sh', helperpath + 'sqlanysrv.sh', '-x TCPIP(port='+freeport+')', '-ga', '-xd', tmpinfilename, '-n', 'NVivo'+freeport], stdout=subprocess.PIPE, stdin=DEVNULL)
# Wait until SQL Anywhere engine starts...
while dbproc.poll() is None:
line = dbproc.stdout.readline()
if line == 'Now accepting requests\n':
break
if args.verbosity > 0:
print("Started database server on port " + freeport, file=sys.stderr)
args.indb = 'sqlalchemy_sqlany://wiwalisataob2aaf:iatvmoammgiivaam@localhost:' + freeport + '/NVivo' + freeport
args.outdb = 'sqlite:///' + tmpnormfilename
NVivo.Normalise(args)
os.remove(tmpinfilename)
tmpoutfilename = tempfile.mktemp()
args.indb = 'sqlite:///' + tmpnormfilename
args.outdb = 'sqlite:///' + tmpoutfilename
# Small hack
args.case_attributes = args.node_attributes
RQDA.Norm2RQDA(args)
if not args.cmdline:
args.outfilename = os.path.basename(args.outfilename)
shutil.move(tmpoutfilename, args.outfilename)
os.remove(tmpnormfilename)
| gpl-3.0 | -778,848,020,103,417,500 | 37.605634 | 186 | 0.673477 | false |
tanbro/exosip2ctypes | src/exosip2ctypes/message.py | 1 | 10258 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from ctypes import POINTER, byref, string_at, create_string_buffer, c_void_p, c_char_p, c_int, c_size_t
from ._c import lib, osip_parser, osip_content_type, osip_from, osip_header, osip_content_length, osip_body
from .error import raise_if_osip_error
from .utils import to_str, to_bytes
__all__ = ['OsipMessage', 'ExosipMessage']
class OsipMessage(object):
def __init__(self, ptr):
"""class for osip2 message API
:param ctypes.c_void_p ptr: Pointer to the `osip_message_t` structure in C library
"""
if not ptr:
raise RuntimeError('Null pointer.')
self._ptr = ptr
def __str__(self):
"""Get a string representation of a osip_message_t element.
:rtype: str
"""
dest = c_char_p()
length = c_size_t()
error_code = osip_parser.FuncMessageToStr.c_func(
self._ptr, byref(dest), byref(length))
raise_if_osip_error(error_code)
if not dest:
return str(None)
result = string_at(dest, length.value)
result = to_str(result)
lib.free(dest)
return result
@property
def ptr(self):
"""Pointer to the `osip_message_t` C Structure
:rtype: ctypes.c_void_p
"""
return self._ptr
@property
def call_id(self):
"""Call-id header.
:rtype: str
"""
ret = osip_parser.FuncMessageGetCallId.c_func(self._ptr)
result = ret.contents.number
return to_str(result)
@call_id.setter
def call_id(self, val):
buf = create_string_buffer(to_bytes(val))
error_code = osip_parser.FuncMessageSetCallId.c_func(self._ptr, buf)
raise_if_osip_error(error_code)
@property
def content_type(self):
"""Content Type string of the SIP message
:rtype: str
"""
head_ptr = osip_parser.FuncMessageGetContentType.c_func(self._ptr)
if not head_ptr:
return None
dest = c_char_p()
err_code = osip_content_type.FuncContentTypeToStr.c_func(
head_ptr, byref(dest))
raise_if_osip_error(err_code)
if not dest:
return None
result = to_str(dest.value)
lib.free(dest)
return result.strip()
@content_type.setter
def content_type(self, val):
buf = create_string_buffer(to_bytes(val))
err_code = osip_parser.FuncMessageSetContentType.c_func(self._ptr, buf)
raise_if_osip_error(err_code)
@property
def content_length(self):
"""Content-length header.
:rtype: int
"""
pdest = osip_parser.FuncMessageGetContentLength.c_func(self._ptr)
if isinstance(pdest, (type(None), c_void_p)):
return None
return int(pdest.contents.value)
@content_length.setter
def content_length(self, val):
val = int(val)
if val < 0:
raise ValueError(
'Content-Length header value must be greater than or equal 0.')
buf = create_string_buffer(to_bytes(str(val)))
error_code = osip_parser.FuncMessageSetContentLength.c_func(
self._ptr, buf)
raise_if_osip_error(error_code)
@property
def from_(self):
"""From header
:rtype: str
"""
ptr = osip_parser.FuncMessageGetFrom.c_func(self._ptr)
dest = c_char_p()
error_code = osip_from.FuncFromToStr.c_func(ptr, byref(dest))
raise_if_osip_error(error_code)
if not dest:
return None
result = to_str(dest.value)
lib.free(dest)
return result.strip()
@from_.setter
def from_(self, val):
buf = create_string_buffer(to_bytes(val))
error_code = osip_parser.FuncMessageSetFrom.c_func(self._ptr, buf)
raise_if_osip_error(error_code)
@property
def to(self):
"""To header.
:rtype: str
"""
ptr = osip_parser.FuncMessageGetTo.c_func(self._ptr)
dest = c_char_p()
error_code = osip_from.FuncFromToStr.c_func(ptr, byref(dest))
raise_if_osip_error(error_code)
if not dest:
return None
result = to_str(dest.value)
lib.free(dest)
return result.strip()
@to.setter
def to(self, val):
buf = create_string_buffer(to_bytes(val))
error_code = osip_parser.FuncMessageSetTo.c_func(self._ptr, buf)
raise_if_osip_error(error_code)
@property
def contacts(self):
"""Get Contact header list.
:rtype: list
"""
result = []
pos = 0
while True:
dest = c_void_p()
found_pos = osip_parser.FuncMessageGetContact.c_func(
self._ptr, c_int(pos), byref(dest))
if int(found_pos) < 0:
break
pos = int(found_pos) + 1
pch_contact = c_char_p()
error_code = osip_from.FuncFromToStr.c_func(
dest, byref(pch_contact))
raise_if_osip_error(error_code)
contact = to_str(pch_contact.value)
lib.free(pch_contact)
result.append(contact.strip())
return result
def add_contact(self, val):
"""Set the Contact header.
:param str val: The string describing the element.
.. attention:: This method will **ADD** a create `Contact` header
"""
buf = create_string_buffer(to_bytes(val))
error_code = osip_parser.FuncMessageSetContact.c_func(self._ptr, buf)
raise_if_osip_error(error_code)
@property
def allows(self):
"""Get Allow header list.
:rtype: list
"""
result = []
pos = 0
while True:
dest = POINTER(osip_content_length.Allow)()
found_pos = osip_parser.FuncMessageGetAllow.c_func(
self._ptr, c_int(pos), byref(dest))
if int(found_pos) < 0:
break
pos = int(found_pos) + 1
result.append(to_str(dest.contents.value))
return result
def add_allow(self, val):
"""Set the Allow header.
:param str val: The string describing the element.
.. attention:: This method will **ADD** a create `ALLOW` header
"""
buf = create_string_buffer(to_bytes(val))
error_code = osip_parser.FuncMessageSetAllow.c_func(self._ptr, buf)
raise_if_osip_error(error_code)
def get_headers(self, name):
"""Find "unknown" header's list. (not defined in oSIP)
:param str name: The name of the header to find.
:return: Header's value string list.
:rtype: list
"""
result = []
pc_name = create_string_buffer(to_bytes(name))
pos = 0
while True:
p_header = POINTER(osip_header.Header)()
found_pos = osip_parser.FuncMessageHeaderGetByName.c_func(
self._ptr,
pc_name,
c_int(pos),
byref(p_header)
)
if int(found_pos) < 0:
break
pos = int(found_pos) + 1
val = p_header.contents.hvalue
result.append(to_str(val))
return result
def add_header(self, name, value):
"""Allocate and Add an "unknown" header (not defined in oSIP).
:param str name: The token name.
:param str value: The token value.
.. attention:: This method will **ADD** a create header
"""
pc_name = create_string_buffer(to_bytes(name))
pc_value = create_string_buffer(to_bytes(value))
error_code = osip_parser.FuncMessageSetHeader.c_func(
self._ptr,
pc_name,
pc_value
)
raise_if_osip_error(error_code)
@property
def bodies(self):
"""Get body header list.
:rtype: list
"""
result = []
pos = 0
while True:
p_body = c_void_p()
found_pos = osip_parser.FuncMessageGetBody.c_func(
self._ptr, c_int(pos), byref(p_body))
if int(found_pos) < 0:
break
pos = int(found_pos) + 1
dest = c_char_p()
length = c_size_t()
ret = osip_body.FuncBodyToStr.c_func(
p_body, byref(dest), byref(length))
raise_if_osip_error(ret)
val = string_at(dest, length.value)
val = to_str(val)
lib.free(dest)
result.append(val)
return result
def add_body(self, val):
"""Fill the body of message.
:param str val: Body string.
.. attention:: This method will **ADD** a create body
"""
buf = create_string_buffer(to_bytes(val))
err_code = osip_parser.FuncMessageSetBody.c_func(
self._ptr, buf, len(buf))
raise_if_osip_error(err_code)
class ExosipMessage(OsipMessage):
def __init__(self, ptr, context):
"""class for eXosip2 message API
:param ctypes.c_void_p ptr: Pointer to the `osip_message_t` structure in C library
:param Context context: eXosip context
.. danger:: Do **NOT** con/destruct the class yourself unless you known what you are doing.
.. attention::
In eXosip2, messages are managed inside the library,
so we should **NOT** free :class:`OsipMessage` object manually.
"""
if not context:
raise RuntimeError('No context.')
self._context = context
super(ExosipMessage, self).__init__(ptr)
def send(self):
self._context.send_message(self)
@property
def context(self):
"""eXosip context of the message
:rtype: Context
"""
return self._context
| gpl-3.0 | -4,613,109,619,077,963,000 | 29.084848 | 107 | 0.538214 | false |
scikit-learn/enhancement_proposals | slep006/cases_opt1.py | 1 | 1986 | from defs import (accuracy_score, GroupKFold, make_scorer, SelectKBest,
LogisticRegressionCV, cross_validate, make_pipeline, X, y,
my_groups, my_weights, my_other_weights)
# %%
# Case A: weighted scoring and fitting
lr = LogisticRegressionCV(
cv=GroupKFold(),
scoring='accuracy',
)
cross_validate(lr, X, y, cv=GroupKFold(),
props={'sample_weight': my_weights, 'groups': my_groups},
scoring='accuracy')
# Error handling: if props={'sample_eight': my_weights, ...} was passed
# instead, the estimator would fit and score without weight, silently failing.
# %%
# Case B: weighted scoring and unweighted fitting
class MyLogisticRegressionCV(LogisticRegressionCV):
def fit(self, X, y, props=None):
props = props.copy()
props.pop('sample_weight', None)
super().fit(X, y, props=props)
# %%
# Case C: unweighted feature selection
# Currently feature selection does not handle sample_weight, and as long as
# that remains the case, it will simply ignore the prop passed to it. Hence:
lr = LogisticRegressionCV(
cv=GroupKFold(),
scoring='accuracy',
)
sel = SelectKBest()
pipe = make_pipeline(sel, lr)
cross_validate(pipe, X, y, cv=GroupKFold(),
props={'sample_weight': my_weights, 'groups': my_groups},
scoring='accuracy')
# %%
# Case D: different scoring and fitting weights
weighted_acc = make_scorer(accuracy_score)
def specially_weighted_acc(est, X, y, props):
props = props.copy()
props['sample_weight'] = 'scoring_weight'
return weighted_acc(est, X, y, props)
lr = LogisticRegressionCV(
cv=GroupKFold(),
scoring=specially_weighted_acc,
)
cross_validate(lr, X, y, cv=GroupKFold(),
props={
'scoring_weight': my_weights,
'sample_weight': my_other_weights,
'groups': my_groups,
},
scoring=specially_weighted_acc)
| bsd-3-clause | 4,641,983,158,123,055,000 | 28.205882 | 78 | 0.632931 | false |
sejros/The-Nature-of-Python-Examples | chp03_oscillation/1 Pendulum.py | 1 | 3277 | # coding=utf-8
from math import sin, cos, pi, atan2, sqrt
from tkinter import *
import numpy as np
WIDTH = 800
HEIGHT = 600
mousepos = np.array([WIDTH / 2, HEIGHT / 2])
is_mouse_down = False
is_rmouse_down = False
class Pendulum:
def __init__(self, anchor_x, anchor_y, lenght, radius):
# self.position = np.array([WIDTH / 2.0, HEIGHT / 2.0])
self.anchor = np.array([anchor_x, anchor_y])
self.lenght = lenght
self.raduis = radius
self.angle = pi / 4
self.ang_vel = 0.0
self.ang_acc = 0.0
self.bob = None
self.arm = None
self.mass = 1.0
self.dragging = False
self.bob_pos = self.anchor + np.array([self.lenght, 0])
# def apply(self, force):
# self.acceleration += force / self.mass
def run(self, canvas):
self.update()
if self.dragging:
self.drag()
self.draw(canvas)
def update(self):
self.ang_acc = - 0.5 * sin(self.angle) / self.lenght
self.ang_vel += self.ang_acc
self.ang_vel *= 0.999
self.angle += self.ang_vel
pos = np.array([sin(self.angle), cos(self.angle)])
self.bob_pos = self.anchor + pos * self.lenght
def draw(self, canvas):
canvas.delete(self.arm)
color = "grey"
if self.dragging:
color = "red"
self.arm = canvas.create_line(self.bob_pos[0], self.bob_pos[1],
self.anchor[0], self.anchor[1],
width=self.raduis / 8, fill="grey")
canvas.delete(self.bob)
self.bob = canvas.create_oval(self.bob_pos[0] - self.raduis / 2,
self.bob_pos[1] - self.raduis / 2,
self.bob_pos[0] + self.raduis / 2,
self.bob_pos[1] + self.raduis / 2,
fill=color)
def clicked(self, pos):
dist = self.bob_pos - pos
dist = sqrt(sum(dist * dist))
print(dist)
if dist <= self.raduis:
self.dragging = True
def stop_drag(self):
self.ang_vel = 0.0
self.dragging = False
def drag(self):
diff = mousepos - self.anchor
self.angle = atan2(diff[0], diff[1])
def mousemove(event):
global mousepos
mousepos = np.array([event.x, event.y])
def mousedown(event):
global is_mouse_down
is_mouse_down = True
pen.clicked(np.array([event.x, event.y]))
def mouseup(event):
global is_mouse_down
is_mouse_down = False
pen.stop_drag()
def rmousedown(event):
global is_rmouse_down
is_rmouse_down = True
def rmouseup(event):
global is_rmouse_down
is_rmouse_down = False
def main():
pen.run(c)
gravity = np.array([0, 0.2]) * pen.mass
# pen.apply(gravity)
root.after(25, main) # 40 fps
root = Tk()
root.title("Tkinter demo")
c = Canvas(root, width=WIDTH, height=HEIGHT, background="#ffffff")
c.pack()
c.bind('<Motion>', mousemove)
c.bind('<Button-1>', mousedown)
c.bind('<ButtonRelease-1>', mouseup)
c.bind('<Button-2>', rmousedown)
c.bind('<ButtonRelease-3>', rmouseup)
pen = Pendulum(WIDTH / 2, 100, 300, 40)
main()
root.mainloop()
| mit | -2,486,442,822,304,618,500 | 22.407143 | 73 | 0.548062 | false |
jinzekid/codehub | python/py3_6venv/spider_mysqlpjt/spider_mysqlpjt/pipelines.py | 1 | 1037 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
class SpiderMysqlpjtPipeline(object):
def __init__(self):
print(">>>init_spider")
# 刚开始时连接对应的数据库
self.conn = pymysql.connect(host='127.0.0.1', user='root', passwd='',
db='mypydb')
def process_item(self, item, spider):
name = item['name'][0]
keywd = item['keywd'][0]
# 构造对应的sql语句
sql = "insert into mytb(title, keywd) " \
"values('"+name+"','"+keywd+"')"
print("=====执行sql语句=====")
print(sql)
# 通过query实现执行对应的sql语句
affect_rows = self.conn.query(sql)
print("影响 "+affect_rows+' 行')
return item
def close_spider(self, spider):
print(">>>close_spider")
self.conn.close()
| gpl-3.0 | -2,523,029,409,844,316,700 | 26.514286 | 77 | 0.551402 | false |
zstackio/zstack-woodpecker | integrationtest/vm/multihosts/volumes/paths/path1.py | 1 | 1625 | import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template1",
path_list=[[TestAction.stop_vm, "vm1"],
[TestAction.start_vm, "vm1"],
[TestAction.delete_volume, "vm1-volume1"],
[TestAction.create_image_from_volume, "vm1", "image1"],
[TestAction.resize_volume, "vm1", 5*1024*1024],
[TestAction.detach_volume, "vm1-volume2"],
[TestAction.stop_vm, "vm1"],
[TestAction.reinit_vm, "vm1"],
[TestAction.start_vm, "vm1"],
[TestAction.resize_volume, "vm1", 5*1024*1024],
[TestAction.create_volume_snapshot, "vm1-root","snapshot1-1"],
[TestAction.create_volume_snapshot, "vm1-root","snapshot1-2"],
[TestAction.create_volume_snapshot, "vm1-root","snapshot1-3"],
[TestAction.create_volume_snapshot, "vm1-root","snapshot1-4"],
[TestAction.batch_delete_volume_snapshot, ["snapshot1-2","snapshot1-4"]],
[TestAction.detach_volume, "vm1-volume3"],
[TestAction.stop_vm, "vm1"],
[TestAction.change_vm_image, "vm1"],
[TestAction.start_vm, "vm1"],
[TestAction.create_data_vol_template_from_volume, "vm1-volume2", "image2"],
[TestAction.reboot_vm, "vm1"]])
| apache-2.0 | 8,259,505,550,562,352,000 | 61.5 | 100 | 0.497231 | false |
rjdp/Easynginedemoplugin | ee/core/logging.py | 1 | 1064 | """EasyEngine log module"""
class Log:
"""
Logs messages with colors for different messages
according to functions
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def error(self, msg):
"""
Logs error into log file
"""
print(Log.FAIL + msg + Log.ENDC)
self.app.log.error(Log.FAIL + msg + Log.ENDC)
self.app.close(1)
def info(self, msg, end='\n', log=True):
"""
Logs info messages into log file
"""
print(Log.OKBLUE + msg + Log.ENDC, end=end)
if log:
self.app.log.info(Log.OKBLUE + msg + Log.ENDC)
def warn(self, msg):
"""
Logs warning into log file
"""
self.app.log.warn(Log.BOLD + msg + Log.ENDC)
def debug(self, msg):
"""
Logs debug messages into log file
"""
self.app.log.debug(Log.HEADER + msg + Log.ENDC)
| mit | -3,833,725,850,991,164,000 | 22.644444 | 58 | 0.514098 | false |
jblupus/PyLoyaltyProject | old/project.backup/old/alters_percentage.py | 1 | 2460 | import numpy as np
from old.project import FILE_NAMES
from old.project import ROOT_PATH
from old.project import exists
from old.project import makedirs
def percetage_executer():
for file_name in FILE_NAMES:
generate_intervals(file_name, 5)
print file_name
break
def generate_intervals(filename, _n):
inpath = ROOT_PATH + "/Output/AltersPerInterval/" + filename + ".csv"
outdir = ROOT_PATH + "/Output/AltersPercentage/"
try:
if not exists(outdir):
makedirs(outdir)
except Exception as e:
return
# outpath = outdir + filename + ".csv"
with open(inpath, 'r') as infile: # , open(outpath, 'w') as outfile:
counters = np.arange(35).reshape(5, 7) * 0.0
counter = 0.0
for line in infile.readlines():
split_values = line.split(' ')
np_values = np.array(split_values)
total = np_values[1].astype(np.float)
np_values = np_values[2:].astype(np.float)
last = np_values[np_values.shape[0] - 1]
np_values = np_values[:np_values.shape[0] - 1] - np_values[1:]
np_values = np.append(np_values, [last])
print np_values
_c = np.array([])
for i in xrange(1, 7):
_l = (i - 1) * _n
if i < 6:
_u = i * _n
_c = np.append(_c, [np.sum(np_values[_l:_u])])
else:
_c = np.append(_c, [np.sum(np_values[_l:])])
# break
# _c = np.ceil(_c).astype(np.int)
# _c = _c - 1
# _c[_c > 5] = 5
_c = _c / total
_c = np.ceil(_c * 10)
_c = _c.astype(np.int)
# break
# np_values = np_values/total
# np_values = np.arange(26).reshape(1,26)
if _c[0] > 0:
counters[_c[0] - 1] = counters[_c[0] - 1] + 1
if _c[1] > 0:
counters[_c[1] - 1] = counters[_c[1] - 1] + 1
if _c[2] > 0:
counters[_c[2] - 1] = counters[_c[2] - 1] + 1
if _c[3] > 0:
counters[_c[3] - 1] = counters[_c[3] - 1] + 1
if _c[4] > 0:
counters[_c[4] - 1] = counters[_c[4] - 1] + 1
print (counters * 100) / counter
# break
#
# def real_values(np_array, i):
#
| bsd-2-clause | -165,076,046,223,359,360 | 32.243243 | 74 | 0.452846 | false |
Esri/arcgis-runtime-samples-dotnet | tools/metadata_tools/sample_metadata.py | 1 | 27810 | import json
import os
from distutils.dir_util import copy_tree
from shutil import copyfile, rmtree
import re
import requests
from datetime import datetime
from csproj_utils import *
from file_utils import *
class sample_metadata:
'''
This class represents a sample.
Use populate_from_* to populate from content.
Use try_replace_from_common_readme to read external readme content and replace the sample's content if the common content is 'better'.
Use flush_to_* to write out the sample to disk.
Use emit_standalone_solution to write out the sample as a standalone Visual Studio solution.
'''
arcgis_runtime_latest = "100.11.0" # store latest Runtime version, for use with packages
local_server_latest = "100.9.0"
ar_toolkit_latest = "100.11.0"
def reset_props(self):
self.formal_name = ""
self.friendly_name = ""
self.category = ""
self.nuget_packages = {}
self.keywords = []
self.relevant_api = []
self.since = ""
self.images = []
self.source_files = []
self.redirect_from = []
self.offline_data = []
self.description = ""
self.how_to_use = []
self.how_it_works = ""
self.use_case = ""
self.data_statement = ""
self.Additional_info = ""
self.ignore = False
def __init__(self):
self.reset_props()
def populate_from_json(self, path_to_json):
# formal name is the name of the folder containing the json
pathparts = sample_metadata.splitall(path_to_json)
self.formal_name = pathparts[-2]
# open json file
with open(path_to_json, 'r') as json_file:
data = json.load(json_file)
keys = data.keys()
for key in ["category", "keywords", "images", "redirect_from", "description", "ignore", "nuget_packages"]:
if key in keys:
setattr(self, key, data[key])
if "title" in keys:
self.friendly_name = data["title"]
if "relevant_apis" in keys:
self.relevant_api = data["relevant_apis"]
if "snippets" in keys:
self.source_files = data["snippets"]
# manually correct nuget package if needed
self.nuget_packages["Esri.ArcGISRuntime"] = self.arcgis_runtime_latest
if self.category == "Hydrography":
self.nuget_packages["Esri.ArcGISRuntime.Hydrography"] = self.arcgis_runtime_latest
if self.category in ["Local Server", "LocalServer"]:
self.nuget_packages["Esri.ArcGISRuntime.LocalServices"] = self.local_server_latest
if self.category in ["Augmented reality", "Augmented Reality"]:
self.nuget_packages["Esri.ArcGISRuntime.ARToolkit"] = self.ar_toolkit_latest
return
def resync_nuget_packages(self, platform):
'''
Updates this sample's nuget packages.
'''
# add base package
self.nuget_packages["Esri.ArcGISRuntime"] = self.arcgis_runtime_latest
# add platform-specific package
if platform == "Forms":
self.nuget_packages["Esri.ArcGISRuntime.Xamarin.Forms"] = self.arcgis_runtime_latest
self.nuget_packages["Esri.ArcGISRuntime.UWP"] = self.arcgis_runtime_latest
self.nuget_packages["Esri.ArcGISRuntime.Xamarin.iOS"] = self.arcgis_runtime_latest
self.nuget_packages["Esri.ArcGISRuntime.Xamarin.Android"] = self.arcgis_runtime_latest
elif platform == "iOS":
self.nuget_packages["Esri.ArcGISRuntime.Xamarin.iOS"] = self.arcgis_runtime_latest
elif platform == "Android":
self.nuget_packages["Esri.ArcGISRuntime.Xamarin.Android"] = self.arcgis_runtime_latest
elif platform == "UWP":
self.nuget_packages["Esri.ArcGISRuntime.UWP"] = self.arcgis_runtime_latest
elif platform == "WPF":
self.nuget_packages["Esri.ArcGISRuntime.WPF"] = self.arcgis_runtime_latest
return
def populate_from_readme(self, platform, path_to_readme):
# formal name is the name of the folder containing the json
pathparts = sample_metadata.splitall(path_to_readme)
self.formal_name = pathparts[-2]
# populate redirect_from; it is based on a pattern
real_platform = platform
if real_platform in ["XFI", "XFA", "XFU"]:
real_platform = "Forms"
redirect_string = f"/net/latest/{real_platform.lower()}/sample-code/{self.formal_name.lower()}.htm"
self.redirect_from.append(redirect_string)
# category is the name of the folder containing the sample folder
self.category = pathparts[-3]
# if category is 'Hydrography', add the hydrography package
if self.category == "Hydrography":
self.nuget_packages["Esri.ArcGISRuntime.Hydrography"] = self.arcgis_runtime_latest
elif self.category == "LocalServer" or self.category == "Local Server":
self.nuget_packages["Esri.ArcGISRuntime.LocalServices"] = self.local_server_latest
elif self.category in ["Augmented reality", "Augmented Reality"]:
self.nuget_packages["Esri.ArcGISRuntime.ARToolkit"] = self.ar_toolkit_latest
# add the ArcGIS Runtime package always
self.nuget_packages["Esri.ArcGISRuntime"] = self.arcgis_runtime_latest
# read the readme content into a string
readme_contents = ""
try:
readme_file = open(path_to_readme, "r")
readme_contents = readme_file.read()
readme_file.close()
except Exception as err:
# not a sample, skip
print(f"Error populating sample from readme - {path_to_readme} - {err}")
return
# break into sections
readme_parts = readme_contents.split("\n\n") # a blank line is two newlines
# extract human-readable name
title_line = readme_parts[0].strip()
if not title_line.startswith("#"):
title_line = title_line.split("#")[1]
self.friendly_name = title_line.strip("#").strip()
if len(readme_parts) < 3:
# can't handle this, return early
return
if len(readme_parts) < 5: # old style readme
# Take just the first description paragraph
self.description = readme_parts[1]
self.images.append(sample_metadata.extract_image_from_image_string(readme_parts[2]))
return
else:
self.description = readme_parts[1]
self.images.append(sample_metadata.extract_image_from_image_string(readme_parts[2]))
# Read through and add the rest of the sections
examined_readme_part_index = 2
current_heading = ""
para_part_accumulator = []
while examined_readme_part_index < len(readme_parts):
current_part = readme_parts[examined_readme_part_index]
examined_readme_part_index += 1
if not current_part.startswith("#"):
para_part_accumulator.append(current_part)
continue
else:
# process existing heading, skipping if nothing to add
if len(para_part_accumulator) != 0:
self.populate_heading(current_heading, para_part_accumulator)
# get started with new heading
current_heading = current_part
para_part_accumulator = []
# do the last segment
if current_heading != "" and len(para_part_accumulator) > 0:
self.populate_heading(current_heading, para_part_accumulator)
return
def try_replace_with_common_readme(self, platform, path_to_common_dir, path_to_net_readme):
'''
Will read the common readme and replace the sample's readme if found wanting
path_to_common_dir is the path to the samples design folder
Precondition: populate_from_readme already called
'''
# skip if the existing readme is good enough; it is assumed that any sample with tags already has a good readme
if len(self.keywords) > 0:
return
# determine if matching readme exists; if not, return early
match_name = None
dirs = os.listdir(path_to_common_dir)
for dir in dirs:
if dir.lower() == self.formal_name.lower():
match_name = dir
if match_name == None:
return
# create a new sample_metadata, call populate from readme on the design readme
readme_path = os.path.join(path_to_common_dir, match_name, "readme.md")
if not os.path.exists(readme_path):
return
compare_sample = sample_metadata()
compare_sample.populate_from_readme(platform, readme_path)
# fix the image content
compare_sample.images = [f"{compare_sample.formal_name}.jpg"]
# fix the category
compare_sample.category = self.category
# call flush_to_readme on the newly created sample object
compare_sample.flush_to_readme(path_to_net_readme)
# re-read to pick up any new info
self.reset_props()
self.populate_from_readme(platform, path_to_net_readme)
def flush_to_readme(self, path_to_readme):
template_text = f"# {self.friendly_name}\n\n"
# add the description
if self.description != "":
template_text += f"{self.description}\n\n"
# add the image
if len(self.images) > 0:
template_text += f"\n\n"
# add "Use case" - use_case
if self.use_case != "":
template_text += "## Use case\n\n"
template_text += f"{self.use_case}\n\n"
# add 'How to use the sample' - how_to_use
if self.how_to_use != "" and len(self.how_to_use) > 0:
template_text += "## How to use the sample\n\n"
template_text += f"{self.how_to_use}\n\n"
# add 'How it works' - how_it_works
if len(self.how_it_works) > 0:
template_text += "## How it works\n\n"
stepIndex = 1
for step in self.how_it_works:
if not step.startswith("***"): # numbered steps
template_text += f"{stepIndex}. {step}\n"
stepIndex += 1
else: # sub-bullets
template_text += f" * {step.strip('***')}\n"
template_text += "\n"
# add 'Relevant API' - relevant_api
if len(self.relevant_api) > 0:
template_text += "## Relevant API\n\n"
for api in self.relevant_api:
template_text += f"* {api}\n"
template_text += "\n"
# add 'Offline data' - offline_data
if len(self.offline_data) > 0:
template_text += "## Offline data\n\n"
template_text += "This sample downloads the following items from ArcGIS Online automatically:\n\n"
for item in self.offline_data:
# get the item's name from AGOL
request_url = f"https://www.arcgis.com/sharing/rest/content/items/{item}?f=json"
agol_result = requests.get(url=request_url)
data = agol_result.json()
name = data["name"]
# write out line
template_text += f"* [{name}](https://www.arcgis.com/home/item.html?id={item}) - {data['snippet']}\n"
template_text += "\n"
# add 'About the data' - data_statement
if self.data_statement != "":
template_text += "## About the data\n\n"
template_text += f"{self.data_statement}\n\n"
# add 'Additional information' - additional_info
if self.Additional_info != "":
template_text += "## Additional information\n\n"
template_text += f"{self.Additional_info}\n\n"
# add 'Tags' - keywords
template_text += "## Tags\n\n"
template_text += ", ".join(self.keywords)
template_text += "\n"
# write the output
with open(path_to_readme, 'w+') as file:
file.write(template_text)
return
def flush_to_json(self, path_to_json):
data = {}
data["title"] = self.friendly_name
data["category"] = self.category
data["keywords"] = self.keywords
data["relevant_apis"] = self.relevant_api
data["images"] = self.images
data["snippets"] = self.source_files
data["redirect_from"] = self.redirect_from
data["description"] = self.description
data["ignore"] = self.ignore
data["offline_data"] = self.offline_data
data["nuget_packages"] = self.nuget_packages
data["formal_name"] = self.formal_name
with open(path_to_json, 'w+') as json_file:
json.dump(data, json_file, indent=4, sort_keys=True)
return
def emit_standalone_solution(self, platform, sample_dir, output_root):
'''
Produces a standalone sample solution for the given sample
platform: one of: Android, iOS, UWP, WPF, XFA, XFI, XFU
output_root: output folder; should not be specific to the platform
sample_dir: path to the folder containing the sample's code
'''
# create output dir
output_dir = os.path.join(output_root, platform, self.formal_name)
if os.path.exists(output_dir):
rmtree(output_dir)
os.makedirs(output_dir)
# copy template files over - find files in template
script_dir = os.path.split(os.path.realpath(__file__))[0]
template_dir = os.path.join(script_dir, "templates", "solutions", platform)
copy_tree(template_dir, output_dir)
# copy sample files over
copy_tree(sample_dir, output_dir)
# copy any out-of-dir files over (e.g. Android layouts, download manager)
if len(self.source_files) > 0:
for file in self.source_files:
if ".." in file:
source_path = os.path.join(sample_dir, file)
dest_path = os.path.join(output_dir, "Resources", "layout", os.path.split(file)[1])
if 'Attrs.xml' in file: # todo: improve this
dest_path = os.path.join(output_dir, "Resources", "values", os.path.split(file)[1])
elif file.endswith('.cs'):
dest_path = os.path.join(output_dir, "Controls", os.path.split(file)[1])
copyfile(source_path, dest_path)
# Remove nuget packages for forms as needed
if platform == "XFA":
del self.nuget_packages["Esri.ArcGISRuntime.Xamarin.iOS"]
del self.nuget_packages["Esri.ArcGISRuntime.UWP"]
elif platform == "XFI":
del self.nuget_packages["Esri.ArcGISRuntime.Xamarin.Android"]
del self.nuget_packages["Esri.ArcGISRuntime.UWP"]
elif platform == "XFU":
del self.nuget_packages["Esri.ArcGISRuntime.Xamarin.Android"]
del self.nuget_packages["Esri.ArcGISRuntime.Xamarin.iOS"]
# accumulate list of source, xaml, axml, and resource files
all_source_files = self.source_files
# generate list of replacements
replacements = {}
replacements["$$project$$"] = self.formal_name
replacements[".slntemplate"] = ".sln" # replacement needed to prevent template solutions from appearing in Visual Studio git browser
replacements["$$embedded_resources$$"] = "" # TODO
replacements["$$nuget_packages$$"] = get_csproj_xml_for_nuget_packages(self.nuget_packages)
replacements["$$code_and_xaml$$"] = get_csproj_xml_for_code_files(all_source_files, platform)
replacements["$$axml_files$$"] = get_csproj_xml_for_android_layout(all_source_files)
replacements["$$current_year$$"] = str(datetime.now().year)
replacements["$$friendly_name$$"] = self.friendly_name
# rewrite files in output - replace template fields
sample_metadata.rewrite_files_in_place(output_dir, replacements)
# write out the sample file
self.emit_dot_sample_file(platform, output_dir)
return
def emit_dot_sample_file(self, platform, output_dir):
output_xml = "<ArcGISRuntimeSDKdotNetSample>\n"
# basic metadata
output_xml += f"\t<SampleName>{self.formal_name}</SampleName>\n"
output_xml += f"\t<SampleDescription>{self.description}</SampleDescription>\n"
output_xml += f"\t<ScreenShot>{self.images[0]}</ScreenShot>\n"
# code files, including XAML
output_xml += "\t<CodeFiles>\n"
for source_file in self.source_files:
output_xml += f"\t\t<CodeFile>{source_file}</CodeFile>\n"
output_xml += "\t</CodeFiles>\n"
# xaml files
output_xml += "\t<XAMLParseFiles>\n"
for source_file in self.source_files:
if source_file.endswith(".xaml"):
output_xml += f"\t\t<XAMLParseFile>{source_file}</XAMLParseFile>\n"
output_xml += "\t</XAMLParseFiles>\n"
# exe
if platform == "WPF":
output_xml += "\t<DllExeFile>bin\debug\ArcGISRuntime.exe</DllExeFile>\n"
elif platform == "UWP" or platform == "XFU":
output_xml += "\t<DllExeFile>obj\\x86\Debug\intermediatexaml\ArcGISRuntime.exe</DllExeFile>\n"
elif platform == "Android" or platform == "XFA":
output_xml += "\t<DllExeFile>bin\debug\ArcGISRuntime.dll</DllExeFile>\n"
elif platform == "iOS" or platform == "XFI":
output_xml += "\t<DllExeFile>bin\iPhone\debug\ArcGISRuntime.exe</DllExeFile>\n"
output_xml += "</ArcGISRuntimeSDKdotNetSample>\n"
filename = os.path.join(output_dir, f"{self.formal_name}.sample")
safe_write_contents(filename, output_xml)
def populate_snippets_from_folder(self, platform, path_to_readme):
'''
Take a path to a readme file
Populate the snippets from: any .xaml, .cs files in the directory;
any .axml files referenced from .cs files on android
'''
# populate files in the directory
sample_dir = os.path.split(path_to_readme)[0]
for file in os.listdir(sample_dir):
if os.path.splitext(file)[1] in [".axml", ".xaml", ".cs"]:
self.source_files.append(file)
# populate AXML layouts for Android
if platform == "Android" and os.path.splitext(file)[1] == ".cs":
# search for line matching SetContentView(Resource.Layout.
referencing_file_path = os.path.join(sample_dir, file)
referencing_file_contents = safe_read_contents(referencing_file_path)
for line in referencing_file_contents.split("\n"):
layout_name = None
if "SetContentView(Resource.Layout." in line:
# extract name of layout
layout_name = line.split("Layout.")[1].strip().strip(";").strip(")")
elif "SetContentView(ArcGISRuntime.Resource.Layout." in line:
# extract name of layout
layout_name = line.split("Layout.")[1].strip().strip(";").strip(")")
elif ".Inflate(Resource.Layout." in line:
# extract name of layout
layout_name = line.split("Layout.")[1].strip().strip(";").strip(", null)")
if layout_name is not None:
# determine if the file ending is .xml
if (os.path.exists(os.path.join("..", "..", "src", "Android", "Xamarin.Android", "Resources", "layout", f"{layout_name}.xml"))):
ending = ".xml"
else:
ending = ".axml"
# add the file path to the snippets list
self.source_files.append(f"../../../Resources/layout/{layout_name}{ending}")
# Manually add JoystickSeekBar control on Android for AR only
if platform == "Android" and self.formal_name in ["NavigateAR", "CollectDataAR", "ViewHiddenInfrastructureAR"]:
self.source_files.append("../../../Resources/values/Attrs.xml")
self.source_files.append("../../../Controls/JoystickSeekBar.cs")
self.source_files.sort()
def rewrite_files_in_place(source_dir, replacements_dict):
'''
Takes a dictionary of strings and replacements, applies the replacements to all the files in a directory.
Used when generating sample solutions.
'''
for r, d, f in os.walk(source_dir):
for sample_dir in d:
sample_metadata.rewrite_files_in_place(os.path.join(r, sample_dir), replacements_dict)
for sample_file_name in f:
sample_file_fullpath = os.path.join(r, sample_file_name)
extension = os.path.splitext(sample_file_fullpath)[1]
if extension in [".cs", ".xaml", ".sln", ".slntemplate", ".md", ".csproj", ".shproj", ".axml"]:
# open file, read into string
original_contents = safe_read_contents(sample_file_fullpath)
# make replacements
new_content = original_contents
for tag in replacements_dict.keys():
new_content = new_content.replace(tag, replacements_dict[tag])
# write out new file
if new_content != original_contents:
os.remove(sample_file_fullpath)
safe_write_contents(sample_file_fullpath, new_content)
# rename any files (e.g. $$project$$.sln becomes AccessLoadStatus.sln)
new_name = sample_file_fullpath
for tag in replacements_dict.keys():
if tag in sample_file_fullpath:
new_name = new_name.replace(tag, replacements_dict[tag])
if new_name != sample_file_fullpath:
os.rename(sample_file_fullpath, new_name)
def splitall(path):
## Credits: taken verbatim from https://www.oreilly.com/library/view/python-cookbook/0596001673/ch04s16.html
allparts = []
while 1:
parts = os.path.split(path)
if parts[0] == path: # sentinel for absolute paths
allparts.insert(0, parts[0])
break
elif parts[1] == path: # sentinel for relative paths
allparts.insert(0, parts[1])
break
else:
path = parts[0]
allparts.insert(0, parts[1])
return allparts
def extract_image_from_image_string(image_string) -> str:
'''
Takes an image string in the form of 
or <img src="path_toImage.jpg" width="350"/>
and returns 'path_toImage.jpg'
'''
image_string = image_string.strip()
if image_string.startswith("!"): # Markdown-style string
# find index of last )
close_char_index = image_string.rfind(")")
# find index of last (
open_char_index = image_string.rfind("(")
# return original string if it can't be processed further
if close_char_index == -1 or open_char_index == -1:
return image_string
# read between those chars
substring = image_string[open_char_index + 1:close_char_index]
return substring
else: # HTML-style string
# find index of src="
open_match_string = "src=\""
open_char_index = image_string.rfind(open_match_string)
# return original string if can't be processed further
if open_char_index == -1:
return image_string
# adjust open_char_index to account for search string
open_char_index += len(open_match_string)
# read from after " to next "
close_char_index = image_string.find("\"", open_char_index)
# read between those chars
substring = image_string[open_char_index:close_char_index]
return substring
def populate_heading(self, heading_part, body_parts):
'''
param: heading_part - string starting with ##, e.g. 'Use case'
param: body_parts - list of constituent strings
output: determines which field the content belongs in and adds appropriately
e.g. lists will be turned into python list instead of string
'''
# normalize string for easier decisions
heading_parts = heading_part.strip("#").strip().lower().split()
# use case
if "use" in heading_parts and "case" in heading_parts:
content = "\n\n".join(body_parts)
self.use_case = content
return
# how to use
if "use" in heading_parts and "how" in heading_parts:
content = "\n\n".join(body_parts)
self.how_to_use = content
return
# how it works
if "works" in heading_parts and "how" in heading_parts:
step_strings = []
lines = body_parts[0].split("\n")
cleaned_lines = []
for line in lines:
if not line.strip().startswith("*"): # numbered steps
line_parts = line.split('.')
cleaned_lines.append(".".join(line_parts[1:]).strip())
else: # sub-bullets
cleaned_line = line.strip().strip("*").strip()
cleaned_lines.append(f"***{cleaned_line}")
self.how_it_works = cleaned_lines
return
# relevant API
if "api" in heading_parts or "apis" in heading_parts:
lines = body_parts[0].split("\n")
cleaned_lines = []
for line in lines:
# removes nonsense formatting
cleaned_line = line.strip("*").strip("-").split("-")[0].strip("`").strip().strip("`").replace("::", ".")
cleaned_lines.append(cleaned_line)
self.relevant_api = list(dict.fromkeys(cleaned_lines))
self.relevant_api.sort()
return
# offline data
if "offline" in heading_parts:
content = "\n".join(body_parts)
# extract any guids - these are AGOL items
regex = re.compile('[0-9a-f]{8}[0-9a-f]{4}[1-5][0-9a-f]{3}[89ab][0-9a-f]{3}[0-9a-f]{12}', re.I)
matches = re.findall(regex, content)
self.offline_data = list(dict.fromkeys(matches))
return
# about the data
if "data" in heading_parts and "about" in heading_parts:
content = "\n\n".join(body_parts)
self.data_statement = content
return
# additional info
if "additional" in heading_parts:
content = "\n\n".join(body_parts)
self.Additional_info = content
return
# tags
if "tags" in heading_parts:
tags = body_parts[0].split(",")
cleaned_tags = []
for tag in tags:
cleaned_tags.append(tag.strip())
cleaned_tags.sort()
self.keywords = cleaned_tags
return | apache-2.0 | 7,528,575,467,615,126,000 | 43.355662 | 152 | 0.570514 | false |
mosen/commandment | commandment/runner.py | 1 | 1720 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2015 Jesse Peterson
Licensed under the MIT license. See the included LICENSE.txt file for details.
Attributes:
runner_thread (threading.Timer):
runner_start (int): In seconds, time of first run
runner_time (int): In seconds, time of subsequent runs
Todo:
* Currently we start this thread after the database context and
configuration has already been. We envision a day when this runner runs
standalone and thus we'll need to sort out separate configuration routines etc.
"""
import threading
import datetime
from .models import db, Device
runner_thread = None
runner_start = 5
runner_time = 5
def start_runner():
"""Start the runner thread"""
global runner_thread
start_time = runner_time if runner_thread else runner_start
runner_thread = threading.Timer(start_time, runner, ())
runner_thread.daemon = True
runner_thread.start()
def stop_runner():
"""Stop the runner thread"""
global runner_thread
if runner_thread is threading.Timer:
runner_thread.cancel()
def runner():
"""Runner thread main procedure
Todo:
* Catch everything so we don't interrupt the thread (and it never reschedules)
* Certificate expiration warnings/emails
"""
# dep_configs = dep_configs_needing_updates()
# if dep_configs:
# print('runner() updating DEP configs', runner_time, datetime.datetime.now())
# update_dep_configs(dep_configs)
#
# dep_profiles = unsubmitted_dep_profiles()
# if dep_profiles:
# print('runner() submitting DEP profiles', runner_time, datetime.datetime.now())
# submit_dep_profiles(dep_profiles)
start_runner()
| mit | 7,778,323,826,943,350,000 | 28.152542 | 89 | 0.684884 | false |
jmckind/yubicrypto | server.py | 1 | 1613 |
import falcon
import logging
from GhettoYubiHSM import GhettoYubiHSM
FORMAT = '%(asctime)-15s %(levelname)s %(funcName)s(%(lineno)d): %(message)s'
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
LOG = logging.getLogger(__name__)
class EncryptResource:
"""
Resource to encrypt plaintext and return the ciphertext.
"""
def on_post(self, req, resp):
"""
Handle HTTP POST requests.
Args:
req: the request object.
resp: the response object.
"""
pt = req.stream.read()
LOG.debug("plaintext: %s" % pt)
ct = self.encrypt(pt)
LOG.debug("ciphertext: %s" % ct)
resp.body = ct
def encrypt(self, pt):
"""
This method will "encrypt" the provided plaintext value.
"""
hsm = GhettoYubiHSM()
return hsm.encrypt(pt)
class DecryptResource:
"""
Resource to decrypt ciphertext and return the plaintext.
"""
def on_post(self, req, resp):
"""
Handle HTTP POST requests.
Args:
req: the request object.
resp: the response object.
"""
ct = req.stream.read()
LOG.debug("ciphertext: %s" % ct)
pt = self.decrypt(ct)
LOG.debug("plaintext: %s" % pt)
resp.body = pt
def decrypt(self, ct):
"""
This method will "decrypt" the provided ciphertext value.
"""
hsm = GhettoYubiHSM()
return hsm.decrypt(ct)
app = falcon.API()
app.add_route('/decrypt', DecryptResource())
app.add_route('/encrypt', EncryptResource())
| mit | -4,547,307,971,003,035,600 | 22.376812 | 77 | 0.571606 | false |
RyanBalfanz/django-smsish | smsish/sms/backends/twilio.py | 1 | 1779 | from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from twilio.rest import TwilioRestClient
class SMSBackend(BaseEmailBackend):
def __init__(self, fail_silently=False, **kwargs):
super().__init__(fail_silently=fail_silently)
self.connection = None
def open(self):
"""
Ensures we have a connection to the SMS gateway. Returns whether or not a new connection was required (True or False).
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
self.connection = self._get_twilio_client()
return True
def close(self):
"""Closes the connection to the email server."""
del self.connection
def send_messages(self, email_messages):
"""
Sends one or more SMSMessage objects and returns the number of text messages sent.
"""
if not email_messages:
return
new_conn_created = self.open()
if not self.connection:
# We failed silently on open().
# Trying to send would be pointless.
return
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
from_email = email_message.from_email
recipients = email_message.recipients()
try:
self.connection.messages.create(
to=recipients,
from_=from_email,
body=email_message.body
)
except Exception:
if not self.fail_silently:
raise
return False
return True
def _get_twilio_client(self):
account = settings.TWILIO_ACCOUNT_SID
token = settings.TWILIO_AUTH_TOKEN
client = TwilioRestClient(account, token)
return client
| mit | 7,131,390,264,221,991,000 | 24.782609 | 120 | 0.708263 | false |
orbnauticus/Pique | pique/network.py | 1 | 4328 | #!/usr/bin/env python
#
# Copyright (c) 2010, Ryan Marquardt
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the project nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from common import *
from player import Error as PlayerError
import bgthread
import collections
import socket
import traceback
NetFormat = str
class ConnectionThread(bgthread.BgThread, PObject):
def main(self, commandmap, sock, address):
self.sock = sock
self.name = "Client %s:%i" % address
debug('connected')
for cmd, args in self.recv_delimited():
debug('called', cmd, args)
if cmd == 'close':
break
elif cmd == 'quit':
self.respond(None)
commandmap[cmd](*args) #quit()
break
try:
func = commandmap[cmd]
except KeyError:
self.respond('No such command')
continue
try:
debug(func, args)
result = func(*args)
except PlayerError, e:
debug(e)
self.respond(e)
except:
tb = traceback.format_exc()
debug('Error:', tb)
self.respond('Unknown Error', tb)
continue
else:
debug('Responding with result', repr(result))
self.respond(None, result)
self.quit()
debug('disconnected')
def recv_delimited(self):
delimiter = '\n\n'
buffer = self.sock.recv(BUFSIZE)
buff2 = ''
while buffer:
buff2 += buffer
while delimiter in buff2:
cmd, _, buff2 = buff2.partition(delimiter)
cmd = cmd.split('\n')
yield cmd[0], cmd[1:]
try:
buffer = self.sock.recv(BUFSIZE)
except socket.error:
buffer = ''
def respond(self, err=None, payload=None):
if payload is not None:
self.sock.send(NetFormat(payload))
if err is None:
self.sock.send('OK\n\n')
else:
self.sock.send('ERR: %s\n\n' % err)
def quit(self):
self.sock.close()
self.emit('connection-closed', self)
class NetThread(bgthread.BgThread, PObject):
name = "NetworkThread"
def __init__(self, *args, **kwargs):
bgthread.BgThread.__init__(self, *args, **kwargs)
self.dependencies = {'commandmap':self.on_set_commandmap}
self.commands = {
'ping': self.ping,
}
def main(self, confitems):
config = dict(confitems)
host = config.get('listen-host', 'localhost')
port = config.get('listen-port', 8145)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(5)
self.clients = set()
self.connect('new-connection', self.on_new_connection)
while True:
self.emit('new-connection', *sock.accept())
def on_new_connection(self, conn, addr):
c = ConnectionThread(self.commandmap, conn, addr)
c.connect('connection-closed', self.client_closed)
self.clients.add(c)
c.start()
def client_closed(self, client):
self.clients.discard(client)
def ping(self):
return None
def on_set_commandmap(self, commandmap):
self.commandmap = commandmap
def quit(self):
for c in list(self.clients):
c.quit()
| bsd-3-clause | 4,551,058,690,451,919,400 | 29.478873 | 75 | 0.703558 | false |
theJollySin/mazelib | mazelib/generate/Sidewinder.py | 1 | 2279 | from random import choice, random
import numpy as np
# If the code is not Cython-compiled, we need to add some imports.
from cython import compiled
if not compiled:
from mazelib.generate.MazeGenAlgo import MazeGenAlgo
class Sidewinder(MazeGenAlgo):
""" The Algorithm
1. Work through the grid row-wise, starting with the cell at 0,0.
2. Add the current cell to a "run" set.
3. For the current cell, randomly decide whether to carve East.
4. If a passage East was carved, make the new cell the current cell and repeat steps 2-4.
5. If a passage East was not carved, choose any one of the cells in the run set and carve
a passage North. Then empty the run set. Repeat steps 2-5.
6. Continue until all rows have been processed.
Optional Parameters
skew: Float [0.0, 1.0]
If the skew is set less than 0.5 the maze will be skewed East-West, if it set greater
than 0.5 it will be skewed North-South. (default 0.5)
"""
def __init__(self, h, w, skew=0.5):
super(Sidewinder, self).__init__(h, w)
self.skew = skew
def generate(self):
""" highest-level method that implements the maze-generating algorithm
Returns:
np.array: returned matrix
"""
# create empty grid
grid = np.empty((self.H, self.W), dtype=np.int8)
grid.fill(1)
# The first row is always empty, because you can't carve North
for col in range(1, self.W - 1):
grid[1][col] = 0
# loop through the remaining rows and columns
for row in range(3, self.H, 2):
# create a run of cells
run = []
for col in range(1, self.W, 2):
# remove the wall to the current cell
grid[row][col] = 0
# add the current cell to the run
run.append((row, col))
carve_east = random() > self.skew
# carve East or North (can't carve East into the East wall
if carve_east and col < (self.W - 2):
grid[row][col + 1] = 0
else:
north = choice(run)
grid[north[0] - 1][north[1]] = 0
run = []
return grid | gpl-3.0 | -3,060,804,768,750,042,600 | 34.076923 | 93 | 0.575691 | false |
ssdxiao/kimchi | src/kimchi/rollbackcontext.py | 1 | 2304 | #
# Project Kimchi
#
# Copyright IBM, Corp. 2013
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import sys
class RollbackContext(object):
'''
A context manager for recording and playing rollback.
The first exception will be remembered and re-raised after rollback
Sample usage:
with RollbackContext() as rollback:
step1()
rollback.prependDefer(lambda: undo step1)
def undoStep2(arg): pass
step2()
rollback.prependDefer(undoStep2, arg)
'''
def __init__(self, *args):
self._finally = []
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
"""
According to Python official doc. This function should only re-raise
the exception from undo functions. Python automatically re-raises the
original exception when this function does not return True.
http://docs.python.org/2/library/stdtypes.html#contextmanager.__exit__
"""
undoExcInfo = None
for undo, args, kwargs in self._finally:
try:
undo(*args, **kwargs)
except Exception:
# keep the earliest exception info
if undoExcInfo is None:
undoExcInfo = sys.exc_info()
if exc_type is None and undoExcInfo is not None:
raise undoExcInfo[0], undoExcInfo[1], undoExcInfo[2]
def defer(self, func, *args, **kwargs):
self._finally.append((func, args, kwargs))
def prependDefer(self, func, *args, **kwargs):
self._finally.insert(0, (func, args, kwargs))
| lgpl-2.1 | 4,841,209,536,425,208,000 | 33.909091 | 79 | 0.658854 | false |
googleapis/python-pubsublite | google/cloud/pubsublite/internal/wire/permanent_failable.py | 1 | 3266 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from typing import Awaitable, TypeVar, Optional, Callable
from google.api_core.exceptions import GoogleAPICallError
from google.cloud.pubsublite.internal.wait_ignore_cancelled import wait_ignore_errors
T = TypeVar("T")
class _TaskWithCleanup:
def __init__(self, a: Awaitable):
self._task = asyncio.ensure_future(a)
async def __aenter__(self):
return self._task
async def __aexit__(self, exc_type, exc_val, exc_tb):
if not self._task.done():
self._task.cancel()
await wait_ignore_errors(self._task)
class PermanentFailable:
"""A class that can experience permanent failures, with helpers for forwarding these to client actions."""
_maybe_failure_task: Optional[asyncio.Future]
def __init__(self):
self._maybe_failure_task = None
@property
def _failure_task(self) -> asyncio.Future:
"""Get the failure task, initializing it lazily, since it needs to be initialized in the event loop."""
if self._maybe_failure_task is None:
self._maybe_failure_task = asyncio.Future()
return self._maybe_failure_task
async def await_unless_failed(self, awaitable: Awaitable[T]) -> T:
"""
Await the awaitable, unless fail() is called first.
Args:
awaitable: An awaitable
Returns: The result of the awaitable
Raises: The permanent error if fail() is called or the awaitable raises one.
"""
async with _TaskWithCleanup(awaitable) as task:
if self._failure_task.done():
raise self._failure_task.exception()
done, _ = await asyncio.wait(
[task, self._failure_task], return_when=asyncio.FIRST_COMPLETED
)
if task in done:
return await task
raise self._failure_task.exception()
async def run_poller(self, poll_action: Callable[[], Awaitable[None]]):
"""
Run a polling loop, which runs poll_action forever unless this is failed.
Args:
poll_action: A callable returning an awaitable to run in a loop. Note that async functions which return once
satisfy this.
"""
try:
while True:
await self.await_unless_failed(poll_action())
except GoogleAPICallError as e:
self.fail(e)
def fail(self, err: GoogleAPICallError):
if not self._failure_task.done():
self._failure_task.set_exception(err)
def error(self) -> Optional[GoogleAPICallError]:
if not self._failure_task.done():
return None
return self._failure_task.exception()
| apache-2.0 | -7,624,177,616,057,700,000 | 34.5 | 118 | 0.647581 | false |
thisismyrobot/pyxero | xero/auth.py | 1 | 13893 | from __future__ import unicode_literals
import datetime
import requests
from requests_oauthlib import OAuth1
from oauthlib.oauth1 import (SIGNATURE_RSA, SIGNATURE_TYPE_AUTH_HEADER,
SIGNATURE_HMAC)
from six.moves.urllib.parse import urlencode, parse_qs
from .constants import (XERO_BASE_URL, XERO_PARTNER_BASE_URL,
REQUEST_TOKEN_URL, AUTHORIZE_URL, ACCESS_TOKEN_URL)
from .exceptions import *
OAUTH_EXPIRY_SECONDS = 3600 # Default unless a response reports differently
class PrivateCredentials(object):
"""An object wrapping the 2-step OAuth process for Private Xero API access.
Usage:
1) Construct a PrivateCredentials() instance:
>>> from xero.auth import PrivateCredentials
>>> credentials = PrivateCredentials(<consumer_key>, <rsa_key>)
rsa_key should be a multi-line string, starting with:
-----BEGIN RSA PRIVATE KEY-----\n
2) Use the credentials:
>>> from xero import Xero
>>> xero = Xero(credentials)
>>> xero.contacts.all()
...
"""
def __init__(self, consumer_key, rsa_key):
self.consumer_key = consumer_key
self.rsa_key = rsa_key
self.base_url = XERO_BASE_URL
# Private API uses consumer key as the OAuth token.
self.oauth_token = consumer_key
self.oauth = OAuth1(
self.consumer_key,
resource_owner_key=self.oauth_token,
rsa_key=self.rsa_key,
signature_method=SIGNATURE_RSA,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
)
class PublicCredentials(object):
"""An object wrapping the 3-step OAuth process for Public Xero API access.
Usage:
1) Construct a PublicCredentials() instance:
>>> from xero import PublicCredentials
>>> credentials = PublicCredentials(<consumer_key>, <consumer_secret>)
2) Visit the authentication URL:
>>> credentials.url
If a callback URI was provided (e.g., https://example.com/oauth),
the user will be redirected to a URL of the form:
https://example.com/oauth?oauth_token=<token>&oauth_verifier=<verifier>&org=<organization ID>
from which the verifier can be extracted. If no callback URI is
provided, the verifier will be shown on the screen, and must be
manually entered by the user.
3) Verify the instance:
>>> credentials.verify(<verifier string>)
4) Use the credentials.
>>> from xero import Xero
>>> xero = Xero(credentials)
>>> xero.contacts.all()
...
"""
def __init__(self, consumer_key, consumer_secret,
callback_uri=None, verified=False,
oauth_token=None, oauth_token_secret=None,
oauth_expires_at=None, oauth_authorization_expires_at=None):
""" Construct the auth instance.
Must provide the consumer key and secret.
A callback URL may be provided as an option. If provided, the
Xero verification process will redirect to that URL when
"""
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.callback_uri = callback_uri
self.verified = verified
self._oauth = None
self.oauth_expires_at = oauth_expires_at
self.oauth_authorization_expires_at = oauth_authorization_expires_at
self.base_url = XERO_BASE_URL
self._signature_method = SIGNATURE_HMAC
# These are not strictly used by Public Credentials, but
# are reserved for use by other credentials (i.e. Partner)
self.rsa_key = None
self.client_cert = None
self.oauth_session_handle = None
self.oauth_token = None
self.oauth_token_secret = None
self._init_credentials(oauth_token, oauth_token_secret)
def _init_credentials(self, oauth_token, oauth_token_secret):
""" Depending on the state passed in, get self._oauth up and running.
"""
if oauth_token and oauth_token_secret:
if self.verified:
# If provided, this is a fully verified set of
# credentials. Store the oauth_token and secret
# and initialize OAuth around those
self._init_oauth(oauth_token, oauth_token_secret)
else:
# If provided, we are reconstructing an initalized
# (but non-verified) set of public credentials.
self.oauth_token = oauth_token
self.oauth_token_secret = oauth_token_secret
else:
# This is a brand new set of credentials - we need to generate
# an oauth token so it's available for the url property.
oauth = OAuth1(
self.consumer_key,
client_secret=self.consumer_secret,
callback_uri=self.callback_uri,
rsa_key=self.rsa_key,
signature_method=self._signature_method
)
url = self.base_url + REQUEST_TOKEN_URL
response = requests.post(url=url, auth=oauth, cert=self.client_cert)
self._process_oauth_response(response)
def _init_oauth(self, oauth_token, oauth_token_secret):
""" Store and initialize a verified set of OAuth credentials.
"""
self.oauth_token = oauth_token
self.oauth_token_secret = oauth_token_secret
self._oauth = OAuth1(
self.consumer_key,
client_secret=self.consumer_secret,
resource_owner_key=self.oauth_token,
resource_owner_secret=self.oauth_token_secret,
rsa_key=self.rsa_key,
signature_method=self._signature_method
)
def _process_oauth_response(self, response):
""" Extracts the fields from an oauth response.
"""
if response.status_code == 200:
credentials = parse_qs(response.text)
# Initialize the oauth credentials
self._init_oauth(
credentials.get('oauth_token')[0],
credentials.get('oauth_token_secret')[0]
)
# If tokens are refreshable, we'll get a session handle
self.oauth_session_handle = credentials.get(
'oauth_session_handle', [None])[0]
# Calculate token/auth expiry
oauth_expires_in = credentials.get(
'oauth_expires_in',
[OAUTH_EXPIRY_SECONDS])[0]
oauth_authorisation_expires_in = credentials.get(
'oauth_authorization_expires_in',
[OAUTH_EXPIRY_SECONDS])[0]
self.oauth_expires_at = datetime.datetime.now() + \
datetime.timedelta(seconds=int(
oauth_expires_in))
self.oauth_authorization_expires_at = \
datetime.datetime.now() + \
datetime.timedelta(seconds=int(
oauth_authorisation_expires_in))
else:
self._handle_error_response(response)
def _handle_error_response(self, response):
""" Raise exceptions for error codes.
"""
if response.status_code == 400:
raise XeroBadRequest(response)
elif response.status_code == 401:
raise XeroUnauthorized(response)
elif response.status_code == 403:
raise XeroForbidden(response)
elif response.status_code == 404:
raise XeroNotFound(response)
elif response.status_code == 500:
raise XeroInternalError(response)
elif response.status_code == 501:
raise XeroNotImplemented(response)
elif response.status_code == 503:
# Two 503 responses are possible. Rate limit errors
# return encoded content; offline errors don't.
# If you parse the response text and there's nothing
# encoded, it must be a not-available error.
payload = parse_qs(response.text)
if payload:
raise XeroRateLimitExceeded(response, payload)
else:
raise XeroNotAvailable(response)
else:
raise XeroExceptionUnknown(response)
@property
def state(self):
""" Obtain the useful state of this credentials object so that we can
reconstruct it independently.
"""
return dict(
(attr, getattr(self, attr))
for attr in (
'consumer_key', 'consumer_secret', 'callback_uri',
'verified', 'oauth_token', 'oauth_token_secret',
'oauth_session_handle', 'oauth_expires_at',
'oauth_authorization_expires_at'
)
if getattr(self, attr) is not None
)
def verify(self, verifier):
""" Verify an OAuth token
"""
# Construct the credentials for the verification request
oauth = OAuth1(
self.consumer_key,
client_secret=self.consumer_secret,
resource_owner_key=self.oauth_token,
resource_owner_secret=self.oauth_token_secret,
verifier=verifier,
rsa_key=self.rsa_key,
signature_method=self._signature_method
)
# Make the verification request, gettiung back an access token
url = self.base_url + ACCESS_TOKEN_URL
response = requests.post(url=url, auth=oauth, cert=self.client_cert)
self._process_oauth_response(response)
self.verified = True
@property
def url(self):
""" Returns the URL that can be visited to obtain a verifier code.
"""
# The authorize url is always api.xero.com
url = XERO_BASE_URL + AUTHORIZE_URL + '?' + \
urlencode({'oauth_token': self.oauth_token})
return url
@property
def oauth(self):
"Returns the requests-compatible OAuth object"
if self._oauth is None:
raise XeroNotVerified("OAuth credentials haven't been verified")
return self._oauth
def expired(self, now=None):
""" Check whether expired.
"""
if now is None:
now = datetime.datetime.now()
# Credentials states from older versions might not have
# oauth_expires_at available
if self.oauth_expires_at is None:
raise XeroException(None, "Expiry time is not available")
# Allow a bit of time for clock differences and round trip times
# to prevent false negatives. If users want the precise expiry,
# they can use self.oauth_expires_at
CONSERVATIVE_SECONDS = 30
return self.oauth_expires_at <= \
(now + datetime.timedelta(seconds=CONSERVATIVE_SECONDS))
class PartnerCredentials(PublicCredentials):
"""An object wrapping the 3-step OAuth process for Partner Xero API access.
Usage is very similar to Public Credentials with the following changes:
1) You'll need to pass the private key for your RSA certificate.
>>> rsa_key = "-----BEGIN RSA PRIVATE KEY----- ..."
2) You'll need to pass a tuple to the Entrust certificate pair.
>>> client_cert = ('/path/to/entrust-cert.pem',
'/path/to/entrust-private-nopass.pem')
3) Once a token has expired, you can refresh it to get another 30 mins
>>> credentials = PartnerCredentials(**state)
>>> if credentials.expired():
credentials.refresh()
4) Authorization expiry and token expiry become different things.
oauth_expires_at tells when the current token expires (~30 min window)
oauth_authorization_expires_at tells when the overall access
permissions expire (~10 year window)
"""
def __init__(self, consumer_key, consumer_secret, rsa_key, client_cert,
callback_uri=None, verified=False,
oauth_token=None, oauth_token_secret=None,
oauth_expires_at=None, oauth_authorization_expires_at=None,
oauth_session_handle=None):
"""Construct the auth instance.
Must provide the consumer key and secret.
A callback URL may be provided as an option. If provided, the
Xero verification process will redirect to that URL when
"""
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.callback_uri = callback_uri
self.verified = verified
self._oauth = None
self.oauth_expires_at = oauth_expires_at
self.oauth_authorization_expires_at = oauth_authorization_expires_at
self._signature_method = SIGNATURE_RSA
self.base_url = XERO_PARTNER_BASE_URL
self.rsa_key = rsa_key
self.client_cert = client_cert
self.oauth_session_handle = oauth_session_handle
self._init_credentials(oauth_token, oauth_token_secret)
def refresh(self):
"Refresh an expired token"
# Construct the credentials for the verification request
oauth = OAuth1(
self.consumer_key,
client_secret=self.consumer_secret,
resource_owner_key=self.oauth_token,
resource_owner_secret=self.oauth_token_secret,
rsa_key=self.rsa_key,
signature_method=self._signature_method
)
# Make the verification request, getting back an access token
params = {'oauth_session_handle': self.oauth_session_handle}
response = requests.post(url=self.base_url + ACCESS_TOKEN_URL,
params=params, auth=oauth, cert=self.client_cert)
self._process_oauth_response(response)
| bsd-3-clause | 7,559,847,955,323,640,000 | 35.560526 | 101 | 0.600158 | false |
meshulam/sly | deps/svgwrite/elementfactory.py | 1 | 2222 | #!/usr/bin/env python
#coding:utf-8
# Author: mozman --<[email protected]>
# Purpose: element factory
# Created: 15.10.2010
# Copyright (C) 2010, Manfred Moitzi
# License: MIT License
from svgwrite import container
from svgwrite import shapes
from svgwrite import path
from svgwrite import image
from svgwrite import text
from svgwrite import gradients
from svgwrite import pattern
from svgwrite import masking
from svgwrite import animate
from svgwrite import filters
factoryelements = {
'g': container.Group,
'svg': container.SVG,
'defs': container.Defs,
'symbol': container.Symbol,
'marker': container.Marker,
'use': container.Use,
'a': container.Hyperlink,
'script': container.Script,
'style': container.Style,
'line': shapes.Line,
'rect': shapes.Rect,
'circle': shapes.Circle,
'ellipse': shapes.Ellipse,
'polyline': shapes.Polyline,
'polygon': shapes.Polygon,
'path': path.Path,
'image': image.Image,
'text': text.Text,
'tspan': text.TSpan,
'tref': text.TRef,
'textPath': text.TextPath,
'textArea': text.TextArea,
'linearGradient': gradients.LinearGradient,
'radialGradient': gradients.RadialGradient,
'pattern': pattern.Pattern,
'clipPath': masking.ClipPath,
'mask': masking.Mask,
'animate': animate.Animate,
'set': animate.Set,
'animateColor': animate.AnimateColor,
'animateMotion': animate.AnimateMotion,
'animateTransform': animate.AnimateTransform,
'filter': filters.Filter,
}
class ElementBuilder(object):
def __init__(self, cls, factory):
self.cls = cls
self.factory = factory
def __call__(self, *args, **kwargs):
# inject creator object - inherit _parameter from factory
kwargs['factory'] = self.factory
# create an object of type 'cls'
return self.cls(*args, **kwargs)
class ElementFactory(object):
def __getattr__(self, name):
if name in factoryelements:
return ElementBuilder(factoryelements[name], self)
else:
raise AttributeError("'%s' has no attribute '%s'" % (self.__class__.__name__, name))
| mit | 6,687,915,123,606,840,000 | 28.027027 | 96 | 0.643114 | false |
thethythy/Mnemopwd | mnemopwd/server/util/Daemon.py | 1 | 7927 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Thierry Lemeunier <thierry at lemeunier dot net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Provides a simple Daemon class to ease the process of forking a
python application on POSIX systems.
"""
import errno
import logging
import socket
from logging.handlers import RotatingFileHandler
import os
import signal
import sys
import time
import datetime
from ...server.util.Configuration import Configuration
class Daemon(object):
"""Daemon base class"""
def run(self):
"""Override. We are in the daemon at this point."""
def main(self):
"""Read the command line and either start or stop the daemon"""
if Configuration.action == 'start':
self.start()
elif Configuration.action == 'stop':
self.stop()
elif Configuration.action == 'status':
self.status()
else:
raise ValueError(Configuration.action)
def on_sigterm(self, signalnum, frame):
"""Handle segterm by treating as a keyboard interrupt"""
raise KeyboardInterrupt('SIGTERM')
def add_signal_handlers(self):
"""Register the sigterm handler"""
signal.signal(signal.SIGTERM, self.on_sigterm)
def start(self):
"""Initialize and run the daemon"""
self.check_pid()
self.add_signal_handlers()
self.start_logging()
try:
self.check_pid_writable()
self.check_server_accessibility()
self.daemonize()
except:
logging.exception("failed to start due to an exception")
raise
self.write_pid()
try:
try:
self.run()
except (KeyboardInterrupt, SystemExit):
pass
except OSError as exc:
logging.exception(str(exc))
pass
except:
logging.exception("stopping with an exception")
raise
finally:
self.remove_pid()
def stop(self):
"""Stop the running process"""
if Configuration.pidfile and os.path.exists(Configuration.pidfile):
file = open(Configuration.pidfile)
pid = int(file.read())
file.close()
os.kill(pid, signal.SIGTERM)
for n in range(10):
time.sleep(0.25)
try:
os.kill(pid, 0)
except OSError as why:
if why.errno == errno.ESRCH:
break
else:
raise
else:
sys.exit("pid %d did not die" % pid)
else:
sys.exit("not running")
def status(self):
self.check_pid(True)
def start_logging(self):
"""Configure the logging module"""
handler = RotatingFileHandler(
Configuration.logfile,
maxBytes=Configuration.logmaxmb * 1024 * 1024,
backupCount=Configuration.logbackups)
log = logging.getLogger()
log.setLevel(Configuration.loglevel)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
log.addHandler(handler)
def check_pid(self, status=False):
"""Check the pid file.
Stop using sys.exit() if another instance is already running.
If the pid file exists but no other instance is running,
delete the pid file.
"""
if not Configuration.pidfile:
return
if os.path.exists(Configuration.pidfile):
try:
pid = int(open(Configuration.pidfile, 'rb').read().decode('utf-8').strip())
except ValueError:
msg = 'pidfile %s contains a non-integer value' % Configuration.pidfile
sys.exit(msg)
try:
os.kill(pid, 0)
except OSError as err:
if err.errno == errno.ESRCH:
# The pid doesn't exist, so remove the stale pidfile.
os.remove(Configuration.pidfile)
else:
msg = ("failed to check status of process %s "
"from pidfile %s: %s" % (pid, Configuration.pidfile, err.strerror))
sys.exit(msg)
else:
mtime = os.stat(Configuration.pidfile).st_mtime
since = datetime.timedelta(seconds=(time.time() - mtime))
msg = 'instance [pid %s] seems to be running since %s [%s days]' % (pid, time.ctime(mtime), since.days)
sys.exit(msg)
elif status:
print('no instance seems to be running')
def check_pid_writable(self):
"""Verify the user has access to write to the pid file.
Note that the eventual process ID isn't known until after
daemonize(), so it's not possible to write the PID here.
"""
if not Configuration.pidfile:
return
if os.path.exists(Configuration.pidfile):
check = Configuration.pidfile
else:
check = os.path.dirname(Configuration.pidfile)
if not os.access(check, os.W_OK):
msg = 'unable to write to pidfile %s' % Configuration.pidfile
sys.exit(msg)
def check_server_accessibility(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((Configuration.host, Configuration.port))
except OSError as exc:
if exc.errno == 48:
print("address [%s:%d] already in use" % (Configuration.host, Configuration.port))
sys.exit(1)
def write_pid(self):
"""Write to the pid file"""
if Configuration.pidfile:
open(Configuration.pidfile, 'wb').write(str(os.getpid()).encode('utf-8'))
def remove_pid(self):
"""Delete the pid file"""
if Configuration.pidfile and os.path.exists(Configuration.pidfile):
os.remove(Configuration.pidfile)
def daemonize(self):
"""Detach from the terminal and continue as a daemon"""
if os.fork(): # launch child and...
os._exit(0) # kill off parent
os.setsid()
if os.fork(): # launch child and...
os._exit(0) # kill off parent again.
os.umask(63) # 077 in octal
null = os.open('/dev/null', os.O_RDWR)
for i in range(3):
try:
os.dup2(null, i)
except OSError as e:
if e.errno != errno.EBADF:
raise
os.close(null)
| bsd-2-clause | 4,598,217,630,481,537,000 | 35.196347 | 119 | 0.588873 | false |
googleapis/python-ndb | google/cloud/ndb/exceptions.py | 1 | 4045 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes representing legacy Google App Engine exceptions.
Unless otherwise noted, these are meant to act as shims for the exception
types defined in the ``google.appengine.api.datastore_errors`` module in
legacy Google App Engine runtime.
"""
__all__ = [
"Error",
"ContextError",
"BadValueError",
"BadArgumentError",
"BadRequestError",
"Rollback",
"BadQueryError",
"BadFilterError",
]
class Error(Exception):
"""Base datastore error type."""
class ContextError(Error):
"""Indicates an NDB call being made without a context.
Raised whenever an NDB call is made outside of a context
established by :meth:`google.cloud.ndb.client.Client.context`.
"""
def __init__(self):
super(ContextError, self).__init__(
"No current context. NDB calls must be made in context "
"established by google.cloud.ndb.Client.context."
)
class BadValueError(Error):
"""Indicates a property value or filter value is invalid.
Raised by ``Entity.__setitem__()``, ``Query.__setitem__()``, ``Get()``,
and others.
"""
class BadArgumentError(Error):
"""Indicates an invalid argument was passed.
Raised by ``Query.Order()``, ``Iterator.Next()``, and others.
"""
class BadRequestError(Error):
"""Indicates a bad request was passed.
Raised by ``Model.non_transactional()`` and others.
"""
class Rollback(Error):
"""Allows a transaction to be rolled back instead of committed.
Note that *any* exception raised by a transaction function will cause a
rollback. Hence, this exception type is purely for convenience.
"""
class BadQueryError(Error):
"""Raised by Query when a query or query string is invalid."""
class BadFilterError(Error):
"""Indicates a filter value is invalid.
Raised by ``Query.__setitem__()`` and ``Query.Run()`` when a filter string
is invalid.
"""
def __init__(self, filter):
self.filter = filter
message = "invalid filter: {}.".format(self.filter).encode("utf-8")
super(BadFilterError, self).__init__(message)
class NoLongerImplementedError(NotImplementedError):
"""Indicates a legacy function that is intentionally left unimplemented.
In the vast majority of cases, this should only be raised by classes,
functions, or methods that were only been used internally in legacy NDB and
are no longer necessary because of refactoring. Legacy NDB did a poor job
of distinguishing between internal and public API. Where we have determined
that something is probably not a part of the public API, we've removed it
in order to keep the supported API as clean as possible. It's possible that
in some cases we've guessed wrong. Get in touch with the NDB development
team if you think this is the case.
"""
def __init__(self):
super(NoLongerImplementedError, self).__init__("No longer implemented")
class Cancelled(Error):
"""An operation has been cancelled by user request.
Raised when trying to get a result from a future that has been cancelled by
a call to ``Future.cancel`` (possibly on a future that depends on this
future).
"""
class NestedRetryException(Error):
"""A nested retry block raised an exception.
Raised when a nested retry block cannot complete due to an exception. This
allows the outer retry to get back control and retry the whole operation.
"""
| apache-2.0 | 2,430,306,938,058,011,600 | 29.877863 | 79 | 0.695179 | false |
hunter-cameron/Bioinformatics | python/build/lib/mypyli/kraken.py | 1 | 2295 |
import sys
class KrakenRecord(object):
@classmethod
def parse_kraken_file(cls, kraken_f, iterate=False):
"""
Class method that returns a list of all kraken entries in the output
Also has the option to work as a generator and iterate back out objects (to selectively keep some)
"""
record_list = []
with open(kraken_f) as IN:
if iterate:
for line in IN:
yield cls(line)
else:
return [cls(line) for line in IN]
@staticmethod
def print_count_matrix(record_list, outfile="kraken_taxonomy_counts.txt"):
counts = {}
for record in record_list:
if record.classified:
counts[record.tax_id] = counts.get(record.tax_id, {'count':0, 'length':0})
counts[record.tax_id]['count'] += 1
counts[record.tax_id]['length'] += int(record.length)
else:
counts['unclassified'] = counts.get('unclassified', {'count':0, 'length':0})
counts['unclassified']['count'] += 1
counts['unclassified']['length'] += int(record.length)
with open(outfile, 'w') as OUT:
print("taxid\tcount\tavg_length")
for key, value in counts.items():
OUT.write("\t".join([str(key), str(value['count']), str(value['length'] / value['count'])]) + "\n")
def __init__(self, line):
# "chomp" the line
line = line.rstrip()
line_dict = {key: value for key, value in zip(["classified", "name", "tax_id", "length", "id_hits"], line.split("\t"))}
# set classified to true if C, false otherwise
self.classified = line_dict['classified'] == 'C'
self.name = line_dict['name']
self.taxid = line_dict['tax_id']
self.length = line_dict['length']
self.id_hits = [line_dict['id_hits'].split(" ")]
def __repr__(self):
return "{} classified as TID:{}".format(self.name, self.taxid) if self.classified else "{} not classified".format(self.name)
if __name__ == "__main__":
records = KrakenRecord.parse_kraken_file(sys.argv[1])
[print(record) for record in records]
KrakenRecord.print_count_matrix(records)
| mit | 2,762,964,342,759,067,000 | 34.859375 | 132 | 0.556427 | false |
DiplomadoACL/problemasenclase | dictionary_makers/pmi_v01.py | 1 | 2137 | import pickle
import math
import sys
PATH="diccionarios/"
soporte=int(sys.argv[1])
codigos_ISO=sys.argv[2:]
for codigo_ISO in codigos_ISO:
archivo=open(PATH+codigo_ISO+"wiki_frecuencias.pickle","r")
metadatos_palabras,dic_palabras=pickle.load(archivo)
print
print "Metadatos archivo de frecuencias de palabras:",metadatos_palabras
archivo.close()
archivo=open(PATH+codigo_ISO+"wiki_2grams.pickle","r")
metadatos_bigramas,dic_bigramas=pickle.load(archivo)
print
print "Metadatos archivo de frecuencias de bigramas:",metadatos_bigramas
archivo.close()
numero_palabras_corpus=metadatos_palabras["palabras_corpus"]
#lista_bigramas_ordenados=dic_bigramas.keys()
#lista_bigramas_ordenados.sort(key=lambda x:-dic_bigramas[x])
#for bigrama in lista_bigramas_ordenados[:20]:
# print bigrama, dic_bigramas[bigrama]
dic_pmi={}
for bigrama in dic_bigramas:
if dic_bigramas[bigrama]>=soporte:
palabra1=bigrama[:bigrama.find("|||")]
palabra2=bigrama[bigrama.find("|||")+3:]
#print "/t",bigrama,palabra1,palabra2
P_palabra1=float(dic_palabras[palabra1])/numero_palabras_corpus
P_palabra2=float(dic_palabras[palabra2])/numero_palabras_corpus
P_bigrama=float(dic_bigramas[bigrama])/(numero_palabras_corpus-1)
pmi=math.log(P_bigrama/(P_palabra1*P_palabra2))
dic_pmi[bigrama]=pmi
lista_bigramas_ordenados=dic_pmi.keys()
lista_bigramas_ordenados.sort(key=lambda x:-dic_pmi[x])
print "Palabras mas relacionadas:"
for bigrama in lista_bigramas_ordenados[:200]:
palabra1=bigrama[:bigrama.find("|||")]
palabra2=bigrama[bigrama.find("|||")+3:]
print bigrama, dic_pmi[bigrama],dic_palabras[palabra1],dic_palabras[palabra2],dic_bigramas[bigrama]
print "Palabras menos relacionadas:"
for bigrama in lista_bigramas_ordenados[-200:]:
palabra1=bigrama[:bigrama.find("|||")]
palabra2=bigrama[bigrama.find("|||")+3:]
print bigrama, dic_pmi[bigrama],dic_palabras[palabra1],dic_palabras[palabra2],dic_bigramas[bigrama]
| lgpl-3.0 | -6,301,402,933,782,339,000 | 38.574074 | 107 | 0.68554 | false |
grundprinzip/Impala | tests/query_test/test_aggregation.py | 1 | 4762 | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates all aggregate functions across all datatypes
#
import logging
import pytest
from tests.common.test_vector import *
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.test_dimensions import create_exec_option_dimension
from tests.common.test_dimensions import create_uncompressed_text_dimension
from tests.util.test_file_parser import QueryTestSectionReader
agg_functions = ['sum', 'count', 'min', 'max', 'avg']
data_types = ['int', 'bool', 'double', 'bigint', 'tinyint',
'smallint', 'float', 'timestamp']
result_lut = {
# TODO: Add verification for other types
'sum-tinyint': 45000, 'avg-tinyint': 5, 'count-tinyint': 9000,
'min-tinyint': 1, 'max-tinyint': 9,
'sum-smallint': 495000, 'avg-smallint': 50, 'count-smallint': 9900,
'min-smallint': 1, 'max-smallint': 99,
'sum-int': 4995000, 'avg-int': 500, 'count-int': 9990,
'min-int': 1, 'max-int': 999,
'sum-bigint': 49950000, 'avg-bigint': 5000, 'count-bigint': 9990,
'min-bigint': 10, 'max-bigint': 9990,
}
class TestAggregation(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestAggregation, cls).add_test_dimensions()
# Add two more dimensions
cls.TestMatrix.add_dimension(TestDimension('agg_func', *agg_functions))
cls.TestMatrix.add_dimension(TestDimension('data_type', *data_types))
cls.TestMatrix.add_constraint(lambda v: cls.is_valid_vector(v))
@classmethod
def is_valid_vector(cls, vector):
data_type, agg_func = vector.get_value('data_type'), vector.get_value('agg_func')
file_format = vector.get_value('table_format').file_format
if file_format not in ['parquet']: return False
if cls.exploration_strategy() == 'core':
# Reduce execution time when exploration strategy is 'core'
if vector.get_value('exec_option')['batch_size'] != 0: return False
# Avro doesn't have timestamp type
if file_format == 'avro' and data_type == 'timestamp':
return False
elif agg_func not in ['min', 'max', 'count'] and data_type == 'bool':
return False
elif agg_func == 'sum' and data_type == 'timestamp':
return False
return True
def test_aggregation(self, vector):
data_type, agg_func = (vector.get_value('data_type'), vector.get_value('agg_func'))
query = 'select %s(%s_col) from alltypesagg where day is not null' % (agg_func,
data_type)
result = self.execute_scalar(query, vector.get_value('exec_option'),
table_format=vector.get_value('table_format'))
if 'int' in data_type:
assert result_lut['%s-%s' % (agg_func, data_type)] == int(result)
# AVG
if vector.get_value('data_type') == 'timestamp' and\
vector.get_value('agg_func') == 'avg':
return
query = 'select %s(DISTINCT(%s_col)) from alltypesagg where day is not null' % (
agg_func, data_type)
result = self.execute_scalar(query, vector.get_value('exec_option'))
class TestAggregationQueries(ImpalaTestSuite):
"""Run the aggregation test suite, with codegen enabled and disabled, to exercise our
non-codegen code"""
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestAggregationQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[False, True]))
if cls.exploration_strategy() == 'core':
cls.TestMatrix.add_dimension(create_uncompressed_text_dimension(cls.get_workload()))
@pytest.mark.execute_serially
def test_non_codegen_tinyint_grouping(self, vector):
# Regression for IMPALA-901. The test includes an INSERT statement, so can only be run
# on INSERT-able formats - text only in this case, since the bug doesn't depend on the
# file format.
if vector.get_value('table_format').file_format == 'text' \
and vector.get_value('table_format').compression_codec == 'none':
self.run_test_case('QueryTest/aggregation_no_codegen_only', vector)
def test_aggregation(self, vector):
if vector.get_value('table_format').file_format == 'hbase':
pytest.xfail(reason="IMPALA-283 - select count(*) produces inconsistent results")
self.run_test_case('QueryTest/aggregation', vector)
def test_distinct(self, vector):
if vector.get_value('table_format').file_format == 'hbase':
pytest.xfail("HBase returns columns in alphabetical order for select distinct *, "
"making the result verication to fail.")
self.run_test_case('QueryTest/distinct', vector)
| apache-2.0 | -7,782,414,313,389,860,000 | 40.408696 | 90 | 0.681016 | false |
regisf/yablog | capatcha/admin.py | 1 | 3730 | # -*- coding: UTF-8 -*-
# YaBlog
# (c) Regis FLORET 2012 and later
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Regis FLORET 2012 and later BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django.contrib import admin
from django.contrib.admin.widgets import AdminFileWidget
from django.utils.safestring import mark_safe
from .models import Preference
class AdminImageWidget(AdminFileWidget):
""" Affichage d'une miniature dans l'admin """
def render(self, name, value, attrs=None):
""" Rendu à la demande """
output = []
if value:
output.append(u'<div>%s</div>' % (value))
output.append(super(AdminFileWidget, self).render(name, value, attrs))
if value and getattr(value, "url", None):
#l'image mais on pourrait aussi mettre un lien
img = u'<div><img src="%s" width="128px"/></div>' % (value.url)
output.append(img)
return mark_safe(u''.join(output))
class AdminFontWidget(AdminFileWidget):
def render(self, name, value, attrs=None):
output = []
if value:
output.append(u'<div>%s</div>' % value)
output.append(super(AdminFileWidget, self).render(name, value, attrs))
if value and getattr(value, "url", None):
# Get the font
# On affiche
text = u'''<style type="text/css">
@font-face {
src: url("/site_media/%s");
font-family: sample;
}
</style>
<div style="font-family:sample; font-size: 48px;">Portez vieux ce whiskey au juge blond qui fume</div>
''' % value
output.append(text)
return mark_safe(u''.join(output))
class BaseAdmin(admin.ModelAdmin):
""" Base pour tout les modules d'administration """
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'background':
kwargs['widget'] = AdminImageWidget
kwargs.pop('request', None) #erreur sinon
return db_field.formfield(**kwargs)
elif db_field.name == 'font':
kwargs['widget'] = AdminFontWidget
kwargs.pop('request', None)
return db_field.formfield(**kwargs)
return super(BaseAdmin, self).formfield_for_dbfield(db_field, **kwargs)
admin.site.register(Preference, BaseAdmin)
| bsd-3-clause | 2,487,247,014,641,007,600 | 45.6125 | 114 | 0.666398 | false |
ducandu/aiopening | docs/conf.py | 1 | 4851 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# aiopening documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 4 11:36:56 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'aiopening'
copyright = '2017, Sven Mika'
author = 'Sven Mika'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.1'
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'aiopenerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'aiopening.tex', 'aiopening Documentation',
'Sven Mika', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'aiopening', 'aiopening Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'aiopening', 'aiopening Documentation',
author, 'aiopening', 'One line description of project.',
'Miscellaneous'),
]
| mit | 6,609,301,012,459,507,000 | 29.130435 | 79 | 0.672645 | false |
tgalal/yowsup | yowsup/config/manager.py | 1 | 5552 | from yowsup.config.v1.config import Config
from yowsup.config.transforms.dict_keyval import DictKeyValTransform
from yowsup.config.transforms.dict_json import DictJsonTransform
from yowsup.config.v1.serialize import ConfigSerialize
from yowsup.common.tools import StorageTools
import logging
import os
logger = logging.getLogger(__name__)
class ConfigManager(object):
NAME_FILE_CONFIG = "config"
TYPE_KEYVAL = 1
TYPE_JSON = 2
TYPE_NAMES = {
TYPE_KEYVAL: "keyval",
TYPE_JSON: "json"
}
MAP_EXT = {
"yo": TYPE_KEYVAL,
"json": TYPE_JSON,
}
TYPES = {
TYPE_KEYVAL: DictKeyValTransform,
TYPE_JSON: DictJsonTransform
}
def load(self, path_or_profile_name, profile_only=False):
# type: (str, bool) -> Config
"""
Will first try to interpret path_or_profile_name as direct path to a config file and load from there. If
this fails will interpret it as profile name and load from profile dir.
:param path_or_profile_name:
:param profile_only
:return Config instance, or None if no config could be found
"""
logger.debug("load(path_or_profile_name=%s, profile_only=%s)" % (path_or_profile_name, profile_only))
exhausted = []
if not profile_only:
config = self._load_path(path_or_profile_name)
else:
config = None
if config is not None:
return config
else:
logger.debug("path_or_profile_name is not a path, using it as profile name")
if not profile_only:
exhausted.append(path_or_profile_name)
profile_name = path_or_profile_name
config_dir = StorageTools.getStorageForProfile(profile_name)
logger.debug("Detecting config for profile=%s, dir=%s" % (profile_name, config_dir))
for ftype in self.MAP_EXT:
if len(ftype):
fname = (self.NAME_FILE_CONFIG + "." + ftype)
else:
fname = self.NAME_FILE_CONFIG
fpath = os.path.join(config_dir, fname)
logger.debug("Trying %s" % fpath)
if os.path.isfile(fpath):
return self._load_path(fpath)
exhausted.append(fpath)
logger.error("Could not find a config for profile=%s, paths checked: %s" % (profile_name, ":".join(exhausted)))
def _type_to_str(self, type):
"""
:param type:
:type type: int
:return:
:rtype:
"""
for key, val in self.TYPE_NAMES.items():
if key == type:
return val
def _load_path(self, path):
"""
:param path:
:type path:
:return:
:rtype:
"""
logger.debug("_load_path(path=%s)" % path)
if os.path.isfile(path):
configtype = self.guess_type(path)
logger.debug("Detected config type: %s" % self._type_to_str(configtype))
if configtype in self.TYPES:
logger.debug("Opening config for reading")
with open(path, 'r') as f:
data = f.read()
datadict = self.TYPES[configtype]().reverse(data)
return self.load_data(datadict)
else:
raise ValueError("Unsupported config type")
else:
logger.debug("_load_path couldn't find the path: %s" % path)
def load_data(self, datadict):
logger.debug("Loading config")
return ConfigSerialize(Config).deserialize(datadict)
def guess_type(self, config_path):
dissected = os.path.splitext(config_path)
if len(dissected) > 1:
ext = dissected[1][1:].lower()
config_type = self.MAP_EXT[ext] if ext in self.MAP_EXT else None
else:
config_type = None
if config_type is not None:
return config_type
else:
logger.debug("Trying auto detect config type by parsing")
with open(config_path, 'r') as f:
data = f.read()
for config_type, transform in self.TYPES.items():
config_type_str = self.TYPE_NAMES[config_type]
try:
logger.debug("Trying to parse as %s" % config_type_str)
if transform().reverse(data):
logger.debug("Successfully detected %s as config type for %s" % (config_type_str, config_path))
return config_type
except Exception as ex:
logger.debug("%s was not parseable as %s, reason: %s" % (config_path, config_type_str, ex))
def get_str_transform(self, serialize_type):
if serialize_type in self.TYPES:
return self.TYPES[serialize_type]()
def config_to_str(self, config, serialize_type=TYPE_JSON):
transform = self.get_str_transform(serialize_type)
if transform is not None:
return transform.transform(ConfigSerialize(config.__class__).serialize(config))
raise ValueError("unrecognized serialize_type=%d" % serialize_type)
def save(self, profile_name, config, serialize_type=TYPE_JSON, dest=None):
outputdata = self.config_to_str(config, serialize_type)
if dest is None:
StorageTools.writeProfileConfig(profile_name, outputdata)
else:
with open(dest, 'wb') as outputfile:
outputfile.write(outputdata)
| gpl-3.0 | -4,712,326,739,665,925,000 | 35.768212 | 123 | 0.573307 | false |
skuda/client-python | kubernetes/client/models/v2alpha1_job_template_spec.py | 1 | 4122 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V2alpha1JobTemplateSpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, metadata=None, spec=None):
"""
V2alpha1JobTemplateSpec - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'metadata': 'V1ObjectMeta',
'spec': 'V1JobSpec'
}
self.attribute_map = {
'metadata': 'metadata',
'spec': 'spec'
}
self._metadata = metadata
self._spec = spec
@property
def metadata(self):
"""
Gets the metadata of this V2alpha1JobTemplateSpec.
Standard object's metadata of the jobs created from this template. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
:return: The metadata of this V2alpha1JobTemplateSpec.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V2alpha1JobTemplateSpec.
Standard object's metadata of the jobs created from this template. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
:param metadata: The metadata of this V2alpha1JobTemplateSpec.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V2alpha1JobTemplateSpec.
Specification of the desired behavior of the job. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
:return: The spec of this V2alpha1JobTemplateSpec.
:rtype: V1JobSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V2alpha1JobTemplateSpec.
Specification of the desired behavior of the job. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
:param spec: The spec of this V2alpha1JobTemplateSpec.
:type: V1JobSpec
"""
self._spec = spec
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | -3,516,323,169,450,627,600 | 28.442857 | 152 | 0.566715 | false |
rgayon/plaso | plaso/parsers/chrome_cache.py | 1 | 16321 | # -*- coding: utf-8 -*-
"""Parser for Google Chrome and Chromium Cache files."""
from __future__ import unicode_literals
import os
from dfdatetime import webkit_time as dfdatetime_webkit_time
from dfvfs.resolver import resolver as path_spec_resolver
from dfvfs.path import factory as path_spec_factory
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import errors
from plaso.lib import definitions
from plaso.parsers import dtfabric_parser
from plaso.parsers import interface
from plaso.parsers import manager
class CacheAddress(object):
"""Chrome cache address.
Attributes:
block_number (int): block data file number.
block_offset (int): offset within the block data file.
block_size (int): block size.
filename (str): name of the block data file.
value (int): cache address.
"""
FILE_TYPE_SEPARATE = 0
FILE_TYPE_BLOCK_RANKINGS = 1
FILE_TYPE_BLOCK_256 = 2
FILE_TYPE_BLOCK_1024 = 3
FILE_TYPE_BLOCK_4096 = 4
_BLOCK_DATA_FILE_TYPES = [
FILE_TYPE_BLOCK_RANKINGS,
FILE_TYPE_BLOCK_256,
FILE_TYPE_BLOCK_1024,
FILE_TYPE_BLOCK_4096]
_FILE_TYPE_BLOCK_SIZES = [0, 36, 256, 1024, 4096]
def __init__(self, cache_address):
"""Initializes a cache address.
Args:
cache_address (int): cache address.
"""
super(CacheAddress, self).__init__()
self.block_number = None
self.block_offset = None
self.block_size = None
self.filename = None
self.value = cache_address
if cache_address & 0x80000000:
self.is_initialized = 'True'
else:
self.is_initialized = 'False'
self.file_type = (cache_address & 0x70000000) >> 28
if not cache_address == 0x00000000:
if self.file_type == self.FILE_TYPE_SEPARATE:
file_selector = cache_address & 0x0fffffff
self.filename = 'f_{0:06x}'.format(file_selector)
elif self.file_type in self._BLOCK_DATA_FILE_TYPES:
file_selector = (cache_address & 0x00ff0000) >> 16
self.filename = 'data_{0:d}'.format(file_selector)
file_block_size = self._FILE_TYPE_BLOCK_SIZES[self.file_type]
self.block_number = cache_address & 0x0000ffff
self.block_size = (cache_address & 0x03000000) >> 24
self.block_size *= file_block_size
self.block_offset = 8192 + (self.block_number * file_block_size)
class CacheEntry(object):
"""Chrome cache entry.
Attributes:
creation_time (int): creation time, in number of microseconds since
since January 1, 1601, 00:00:00 UTC.
hash (int): super fast hash of the key.
key (bytes): key.
next (int): cache address of the next cache entry.
original_url (str): original URL derived from the key.
rankings_node (int): cache address of the rankings node.
"""
def __init__(self):
"""Initializes a cache entry."""
super(CacheEntry, self).__init__()
self.creation_time = None
self.hash = None
self.key = None
self.next = None
self.original_url = None
self.rankings_node = None
class ChromeCacheIndexFileParser(dtfabric_parser.DtFabricBaseParser):
"""Chrome cache index file parser.
Attributes:
creation_time (int): creation time, in number of number of microseconds
since January 1, 1601, 00:00:00 UTC.
index_table (list[CacheAddress]): the cache addresses which are stored in
the index file.
"""
_DEFINITION_FILE = 'chrome_cache.yaml'
def __init__(self):
"""Initializes an index file."""
super(ChromeCacheIndexFileParser, self).__init__()
self.creation_time = None
self.index_table = []
def _ParseFileHeader(self, file_object):
"""Parses the file header.
Args:
file_object (dfvfs.FileIO): a file-like object to parse.
Raises:
ParseError: if the file header cannot be read.
"""
file_header_map = self._GetDataTypeMap('chrome_cache_index_file_header')
try:
file_header, _ = self._ReadStructureFromFileObject(
file_object, 0, file_header_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError(
'Unable to parse index file header with error: {0!s}'.format(
exception))
format_version = '{0:d}.{1:d}'.format(
file_header.major_version, file_header.minor_version)
if format_version not in ('2.0', '2.1'):
raise errors.ParseError(
'Unsupported index file format version: {0:s}'.format(format_version))
self.creation_time = file_header.creation_time
def _ParseIndexTable(self, file_object):
"""Parses the index table.
Args:
file_object (dfvfs.FileIO): a file-like object to parse.
Raises:
ParseError: if the index table cannot be read.
"""
cache_address_map = self._GetDataTypeMap('uint32le')
file_offset = file_object.get_offset()
cache_address_data = file_object.read(4)
while len(cache_address_data) == 4:
try:
value = self._ReadStructureFromByteStream(
cache_address_data, file_offset, cache_address_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map cache address at offset: 0x{0:08x} with error: '
'{1!s}').format(file_offset, exception))
if value:
cache_address = CacheAddress(value)
self.index_table.append(cache_address)
file_offset += 4
cache_address_data = file_object.read(4)
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a file-like object.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_object (dfvfs.FileIO): a file-like object to parse.
Raises:
ParseError: when the file cannot be parsed.
"""
try:
self._ParseFileHeader(file_object)
except errors.ParseError as exception:
raise errors.ParseError(
'Unable to parse index file header with error: {0!s}'.format(
exception))
# Skip over the LRU data, which is 112 bytes in size.
file_object.seek(112, os.SEEK_CUR)
self._ParseIndexTable(file_object)
class ChromeCacheDataBlockFileParser(dtfabric_parser.DtFabricBaseParser):
"""Chrome cache data block file parser."""
_DEFINITION_FILE = 'chrome_cache.yaml'
def _ParseFileHeader(self, file_object):
"""Parses the file header.
Args:
file_object (dfvfs.FileIO): a file-like object to parse.
Raises:
ParseError: if the file header cannot be read.
"""
file_header_map = self._GetDataTypeMap(
'chrome_cache_data_block_file_header')
try:
file_header, _ = self._ReadStructureFromFileObject(
file_object, 0, file_header_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError(
'Unable to parse data block file header with error: {0!s}'.format(
exception))
format_version = '{0:d}.{1:d}'.format(
file_header.major_version, file_header.minor_version)
if format_version not in ('2.0', '2.1'):
raise errors.ParseError(
'Unsupported data block file format version: {0:s}'.format(
format_version))
if file_header.block_size not in (256, 1024, 4096):
raise errors.ParseError(
'Unsupported data block file block size: {0:d}'.format(
file_header.block_size))
def ParseCacheEntry(self, file_object, block_offset):
"""Parses a cache entry.
Args:
file_object (dfvfs.FileIO): a file-like object to read from.
block_offset (int): block offset of the cache entry.
Returns:
CacheEntry: cache entry.
Raises:
ParseError: if the cache entry cannot be read.
"""
cache_entry_map = self._GetDataTypeMap('chrome_cache_entry')
try:
cache_entry, _ = self._ReadStructureFromFileObject(
file_object, block_offset, cache_entry_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to parse cache entry at offset: 0x{0:08x} with error: '
'{1!s}').format(block_offset, exception))
cache_entry_object = CacheEntry()
cache_entry_object.hash = cache_entry.hash
cache_entry_object.next = CacheAddress(cache_entry.next_address)
cache_entry_object.rankings_node = CacheAddress(
cache_entry.rankings_node_address)
cache_entry_object.creation_time = cache_entry.creation_time
byte_array = cache_entry.key
byte_string = bytes(bytearray(byte_array))
cache_entry_object.key, _, _ = byte_string.partition(b'\x00')
try:
cache_entry_object.original_url = cache_entry_object.key.decode('ascii')
except UnicodeDecodeError as exception:
raise errors.ParseError(
'Unable to decode original URL in key with error: {0!s}'.format(
exception))
return cache_entry_object
# pylint: disable=unused-argument
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a file-like object.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_object (dfvfs.FileIO): a file-like object to parse.
Raises:
ParseError: when the file cannot be parsed.
"""
self._ParseFileHeader(file_object)
class ChromeCacheEntryEventData(events.EventData):
"""Chrome Cache event data.
Attributes:
original_url (str): original URL.
"""
DATA_TYPE = 'chrome:cache:entry'
def __init__(self):
"""Initializes event data."""
super(ChromeCacheEntryEventData, self).__init__(data_type=self.DATA_TYPE)
self.original_url = None
class ChromeCacheParser(interface.FileEntryParser):
"""Parses Chrome Cache files."""
NAME = 'chrome_cache'
DATA_FORMAT = 'Google Chrome or Chromium Cache file'
def __init__(self):
"""Initializes a Chrome Cache files parser."""
super(ChromeCacheParser, self).__init__()
self._data_block_file_parser = ChromeCacheDataBlockFileParser()
def _ParseCacheEntries(self, parser_mediator, index_table, data_block_files):
"""Parses Chrome Cache file entries.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
index_table (list[CacheAddress]): the cache addresses which are stored in
the index file.
data_block_files (dict[str: file]): look up table for the data block
file-like object handles.
"""
# Parse the cache entries in the data block files.
for cache_address in index_table:
cache_address_chain_length = 0
while cache_address.value != 0:
if cache_address_chain_length >= 64:
parser_mediator.ProduceExtractionWarning(
'Maximum allowed cache address chain length reached.')
break
data_block_file_object = data_block_files.get(
cache_address.filename, None)
if not data_block_file_object:
message = 'Cache address: 0x{0:08x} missing data file.'.format(
cache_address.value)
parser_mediator.ProduceExtractionWarning(message)
break
try:
cache_entry = self._data_block_file_parser.ParseCacheEntry(
data_block_file_object, cache_address.block_offset)
except (IOError, errors.ParseError) as exception:
parser_mediator.ProduceExtractionWarning(
'Unable to parse cache entry with error: {0!s}'.format(
exception))
break
event_data = ChromeCacheEntryEventData()
event_data.original_url = cache_entry.original_url
date_time = dfdatetime_webkit_time.WebKitTime(
timestamp=cache_entry.creation_time)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
parser_mediator.ProduceEventWithEventData(event, event_data)
cache_address = cache_entry.next
cache_address_chain_length += 1
def _ParseIndexTable(
self, parser_mediator, file_system, file_entry, index_table):
"""Parses a Chrome Cache index table.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_system (dfvfs.FileSystem): file system.
file_entry (dfvfs.FileEntry): file entry.
index_table (list[CacheAddress]): the cache addresses which are stored in
the index file.
"""
# Build a lookup table for the data block files.
path_segments = file_system.SplitPath(file_entry.path_spec.location)
data_block_files = {}
for cache_address in index_table:
if cache_address.filename not in data_block_files:
# Remove the previous filename from the path segments list and
# add one of the data block files.
path_segments.pop()
path_segments.append(cache_address.filename)
# We need to pass only used arguments to the path specification
# factory otherwise it will raise.
kwargs = {}
if file_entry.path_spec.parent:
kwargs['parent'] = file_entry.path_spec.parent
kwargs['location'] = file_system.JoinPath(path_segments)
data_block_file_path_spec = path_spec_factory.Factory.NewPathSpec(
file_entry.path_spec.TYPE_INDICATOR, **kwargs)
try:
data_block_file_entry = path_spec_resolver.Resolver.OpenFileEntry(
data_block_file_path_spec)
except RuntimeError as exception:
message = (
'Unable to open data block file: {0:s} with error: '
'{1!s}'.format(kwargs['location'], exception))
parser_mediator.ProduceExtractionWarning(message)
data_block_file_entry = None
if not data_block_file_entry:
message = 'Missing data block file: {0:s}'.format(
cache_address.filename)
parser_mediator.ProduceExtractionWarning(message)
data_block_file_object = None
else:
data_block_file_object = data_block_file_entry.GetFileObject()
try:
self._data_block_file_parser.ParseFileObject(
parser_mediator, data_block_file_object)
except (IOError, errors.ParseError) as exception:
message = (
'Unable to parse data block file: {0:s} with error: '
'{1!s}').format(cache_address.filename, exception)
parser_mediator.ProduceExtractionWarning(message)
data_block_file_object.close()
data_block_file_object = None
data_block_files[cache_address.filename] = data_block_file_object
try:
self._ParseCacheEntries(
parser_mediator, index_table, data_block_files)
finally:
for data_block_file_object in data_block_files.values():
if data_block_file_object:
data_block_file_object.close()
def ParseFileEntry(self, parser_mediator, file_entry):
"""Parses Chrome Cache files.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_entry (dfvfs.FileEntry): file entry.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
index_file_parser = ChromeCacheIndexFileParser()
file_object = file_entry.GetFileObject()
if not file_object:
display_name = parser_mediator.GetDisplayName()
raise errors.UnableToParseFile(
'[{0:s}] unable to parse index file {1:s}'.format(
self.NAME, display_name))
try:
index_file_parser.ParseFileObject(parser_mediator, file_object)
except (IOError, errors.ParseError) as exception:
file_object.close()
display_name = parser_mediator.GetDisplayName()
raise errors.UnableToParseFile(
'[{0:s}] unable to parse index file {1:s} with error: {2!s}'.format(
self.NAME, display_name, exception))
# TODO: create event based on index file creation time.
try:
file_system = file_entry.GetFileSystem()
self._ParseIndexTable(
parser_mediator, file_system, file_entry,
index_file_parser.index_table)
finally:
file_object.close()
manager.ParsersManager.RegisterParser(ChromeCacheParser)
| apache-2.0 | 8,817,148,244,868,467,000 | 32.651546 | 80 | 0.656026 | false |
mozillazg/chendian-plus | chendian/qq/migrations/0005_auto_20150308_1347.py | 1 | 1558 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('qq', '0004_rawmessage_raw_item'),
]
operations = [
migrations.CreateModel(
name='UploadRecord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.SmallIntegerField(default=1, choices=[(1, '\u5904\u7406\u4e2d'), (2, '\u5b8c\u6210'), (3, '\u9519\u8bef')])),
('count', models.IntegerField(default=0)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('update_at', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'verbose_name': '\u4e0a\u4f20\u8bb0\u5f55',
'verbose_name_plural': '\u4e0a\u4f20\u8bb0\u5f55',
},
bases=(models.Model,),
),
migrations.AlterField(
model_name='checkinrecord',
name='book_name',
field=models.CharField(max_length=100, verbose_name='\u4e66\u540d', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='checkinrecord',
name='think',
field=models.TextField(default='', verbose_name='\u8bfb\u540e\u611f', blank=True),
preserve_default=True,
),
]
| mit | 2,021,768,505,801,848,000 | 36.095238 | 143 | 0.563543 | false |
joelagnel/lisa | libs/wlgen/wlgen/rta.py | 1 | 28387 | # SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) 2015, ARM Limited and contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import fileinput
import json
import os
import re
from collections import namedtuple
from wlgen import Workload
from devlib.utils.misc import ranges_to_list
import logging
_Phase = namedtuple('Phase', 'duration_s, period_ms, duty_cycle_pct')
class Phase(_Phase):
"""
Descriptor for an RT-App load phase
:param duration_s: the phase duration in [s].
:type duration_s: int
:param period_ms: the phase period in [ms].
:type period_ms: int
:param duty_cycle_pct: the generated load in [%].
:type duty_cycle_pct: int
"""
pass
class RTA(Workload):
"""
Class for creating RT-App workloads
"""
def __init__(self,
target,
name,
calibration=None):
"""
:param target: Devlib target to run workload on.
:param name: Human-readable name for the workload.
:param calibration: CPU calibration specification. Can be obtained from
:meth:`calibrate`.
"""
# Setup logging
self._log = logging.getLogger('RTApp')
# rt-app calibration
self.pload = calibration
# TODO: Assume rt-app is pre-installed on target
# self.target.setup('rt-app')
super(RTA, self).__init__(target, name)
# rt-app executor
self.wtype = 'rtapp'
self.executor = 'rt-app'
# Default initialization
self.json = None
self.rta_profile = None
self.loadref = None
self.rta_cmd = None
self.rta_conf = None
self.test_label = None
# Setup RTA callbacks
self.setCallback('postrun', self.__postrun)
@staticmethod
def calibrate(target):
"""
Calibrate RT-App on each CPU in the system
:param target: Devlib target to run calibration on.
:returns: Dict mapping CPU numbers to RT-App calibration values.
"""
pload_regexp = re.compile(r'pLoad = ([0-9]+)ns')
pload = {}
# Setup logging
log = logging.getLogger('RTApp')
# Save previous governors
cpus = target.list_online_cpus()
old_governors = {}
for cpu in cpus:
domain = tuple(target.cpufreq.get_domain_cpus(cpu))
if domain not in old_governors:
old_governors[domain] = target.cpufreq.get_governor(cpu)
target.cpufreq.set_all_governors('performance')
for cpu in target.list_online_cpus():
log.info('CPU%d calibration...', cpu)
max_rtprio = int(target.execute('ulimit -Hr').split('\r')[0])
log.debug('Max RT prio: %d', max_rtprio)
if max_rtprio > 10:
max_rtprio = 10
rta = RTA(target, 'rta_calib')
rta.conf(kind='profile',
params = {
'task1': Periodic(
period_ms=100,
duty_cycle_pct=50,
duration_s=1,
sched={
'policy': 'FIFO',
'prio' : max_rtprio
}
).get()
},
cpus=[cpu])
rta.run(as_root=True)
for line in rta.getOutput().split('\n'):
pload_match = re.search(pload_regexp, line)
if pload_match is None:
continue
pload[cpu] = int(pload_match.group(1))
log.debug('>>> cpu%d: %d', cpu, pload[cpu])
# Restore previous governors
# Setting a governor for a cpu will set it for all cpus in the same
# clock domain, so only restoring the governor of one cpu per domain
# is enough to restore all of the previous governors
for domain, governor in old_governors.iteritems():
target.cpufreq.set_governor(domain[0], governor)
log.info('Target RT-App calibration:')
log.info("{" + ", ".join('"%r": %r' % (key, pload[key])
for key in pload) + "}")
# Sanity check calibration values for big.LITTLE systems
if 'bl' in target.modules:
bcpu = target.bl.bigs_online[0]
lcpu = target.bl.littles_online[0]
if pload[bcpu] > pload[lcpu]:
log.warning('Calibration values reports big cores less '
'capable than LITTLE cores')
raise RuntimeError('Calibration failed: try again or file a bug')
bigs_speedup = ((float(pload[lcpu]) / pload[bcpu]) - 1) * 100
log.info('big cores are ~%.0f%% more capable than LITTLE cores',
bigs_speedup)
return pload
def __postrun(self, params):
destdir = params['destdir']
if destdir is None:
return
self._log.debug('Pulling logfiles to [%s]...', destdir)
for task in self.tasks.keys():
logfile = self.target.path.join(self.run_dir,
'*{}*.log'.format(task))
self.target.pull(logfile, destdir)
self._log.debug('Pulling JSON to [%s]...', destdir)
self.target.pull(self.target.path.join(self.run_dir, self.json),
destdir)
logfile = self.target.path.join(destdir, 'output.log')
self._log.debug('Saving output on [%s]...', logfile)
with open(logfile, 'w') as ofile:
for line in self.output['executor'].split('\n'):
ofile.write(line+'\n')
def _getFirstBiggest(self, cpus):
# Non big.LITTLE system:
if 'bl' not in self.target.modules:
# return the first CPU of the last cluster
platform = self.target.platform
cluster_last = list(set(platform.core_clusters))[-1]
cluster_cpus = [cpu_id
for cpu_id, cluster_id in enumerate(platform.core_clusters)
if cluster_id == cluster_last]
# If CPUs have been specified': return the fist in the last cluster
if cpus:
for cpu_id in cpus:
if cpu_id in cluster_cpus:
return cpu_id
# Otherwise just return the first cpu of the last cluster
return cluster_cpus[0]
# big.LITTLE system:
for c in cpus:
if c not in self.target.bl.bigs:
continue
return c
# Only LITTLE CPUs, thus:
# return the first possible cpu
return cpus[0]
def _getFirstBig(self, cpus=None):
# Non big.LITTLE system:
if 'bl' not in self.target.modules:
return self._getFirstBiggest(cpus)
if cpus:
for c in cpus:
if c not in self.target.bl.bigs:
continue
return c
# Only LITTLE CPUs, thus:
# return the first big core of the system
if self.target.big_core:
# Big.LITTLE system
return self.target.bl.bigs[0]
return 0
def _getFirstLittle(self, cpus=None):
# Non big.LITTLE system:
if 'bl' not in self.target.modules:
# return the first CPU of the first cluster
platform = self.target.platform
cluster_first = list(set(platform.core_clusters))[0]
cluster_cpus = [cpu_id
for cpu_id, cluster_id in enumerate(platform.core_clusters)
if cluster_id == cluster_first]
# If CPUs have been specified': return the fist in the first cluster
if cpus:
for cpu_id in cpus:
if cpu_id in cluster_cpus:
return cpu_id
# Otherwise just return the first cpu of the first cluster
return cluster_cpus[0]
# Try to return one LITTLE CPUs among the specified ones
if cpus:
for c in cpus:
if c not in self.target.bl.littles:
continue
return c
# Only big CPUs, thus:
# return the first LITTLE core of the system
if self.target.little_core:
# Big.LITTLE system
return self.target.bl.littles[0]
return 0
def getTargetCpu(self, loadref):
# Select CPU for task calibration, which is the first little
# of big depending on the loadref tag
if self.pload is not None:
if loadref and loadref.upper() == 'LITTLE':
target_cpu = self._getFirstLittle()
self._log.debug('ref on LITTLE cpu: %d', target_cpu)
else:
target_cpu = self._getFirstBig()
self._log.debug('ref on big cpu: %d', target_cpu)
return target_cpu
# These options are selected only when RTApp has not been
# already calibrated
if self.cpus is None:
target_cpu = self._getFirstBig()
self._log.debug('ref on cpu: %d', target_cpu)
else:
target_cpu = self._getFirstBiggest(self.cpus)
self._log.debug('ref on (possible) biggest cpu: %d', target_cpu)
return target_cpu
def getCalibrationConf(self, target_cpu=0):
if self.pload is None:
return 'CPU{0:d}'.format(target_cpu)
return self.pload[target_cpu]
def _confCustom(self):
rtapp_conf = self.params['custom']
# Sanity check params being a valid file path
if not isinstance(rtapp_conf, str) or \
not os.path.isfile(rtapp_conf):
self._log.debug('Checking for %s', rtapp_conf)
raise ValueError('value specified for \'params\' is not '
'a valid rt-app JSON configuration file')
if self.duration is None:
raise ValueError('Workload duration not specified')
target_cpu = self.getTargetCpu(self.loadref)
calibration = self.getCalibrationConf(target_cpu)
self._log.info('Loading custom configuration:')
self._log.info(' %s', rtapp_conf)
self.json = '{0:s}_{1:02d}.json'.format(self.name, self.exc_id)
ofile = open(self.json, 'w')
ifile = open(rtapp_conf, 'r')
replacements = {
'__DURATION__' : str(self.duration),
'__PVALUE__' : str(calibration),
'__LOGDIR__' : str(self.run_dir),
'__WORKDIR__' : '"'+self.target.working_directory+'"',
}
for line in ifile:
for src, target in replacements.iteritems():
line = line.replace(src, target)
ofile.write(line)
ifile.close()
ofile.close()
with open(self.json) as f:
conf = json.load(f)
for tid in conf['tasks']:
self.tasks[tid] = {'pid': -1}
return self.json
def _confProfile(self):
# Sanity check for task names
for task in self.params.keys():
if len(task) > 15:
# rt-app uses pthread_setname_np(3) which limits the task name
# to 16 characters including the terminal '\0'.
msg = ('Task name "{}" too long, please configure your tasks '
'with names shorter than 16 characters').format(task)
raise ValueError(msg)
# Task configuration
target_cpu = self.getTargetCpu(self.loadref)
self.rta_profile = {
'tasks': {},
'global': {}
}
# Initialize global configuration
global_conf = {
'default_policy': 'SCHED_OTHER',
'duration': -1,
'calibration': 'CPU'+str(target_cpu),
'logdir': self.run_dir,
}
# Setup calibration data
calibration = self.getCalibrationConf(target_cpu)
global_conf['calibration'] = calibration
if self.duration is not None:
global_conf['duration'] = self.duration
self._log.warn('Limiting workload duration to %d [s]',
global_conf['duration'])
else:
self._log.info('Workload duration defined by longest task')
# Setup default scheduling class
if 'policy' in self.sched:
policy = self.sched['policy'].upper()
if policy not in ['OTHER', 'FIFO', 'RR', 'DEADLINE']:
raise ValueError('scheduling class {} not supported'\
.format(policy))
global_conf['default_policy'] = 'SCHED_' + self.sched['policy']
self._log.info('Default policy: %s', global_conf['default_policy'])
# Setup global configuration
self.rta_profile['global'] = global_conf
# Setup tasks parameters
for tid in sorted(self.params['profile'].keys()):
task = self.params['profile'][tid]
# Initialize task configuration
task_conf = {}
if 'sched' not in task:
policy = 'DEFAULT'
else:
policy = task['sched']['policy'].upper()
if policy == 'DEFAULT':
task_conf['policy'] = global_conf['default_policy']
sched_descr = 'sched: using default policy'
elif policy not in ['OTHER', 'FIFO', 'RR', 'DEADLINE']:
raise ValueError('scheduling class {} not supported'\
.format(task['sclass']))
else:
task_conf.update(task['sched'])
task_conf['policy'] = 'SCHED_' + policy
sched_descr = 'sched: {0:s}'.format(task['sched'])
# Initialize task phases
task_conf['phases'] = {}
self._log.info('------------------------')
self._log.info('task [%s], %s', tid, sched_descr)
if 'delay' in task.keys():
if task['delay'] > 0:
task_conf['phases']['p000000'] = {}
task_conf['phases']['p000000']['delay'] = int(task['delay'] * 1e6)
self._log.info(' | start delay: %.6f [s]',
task['delay'])
self._log.info(' | calibration CPU: %d', target_cpu)
if 'loops' not in task.keys():
task['loops'] = 1
task_conf['loop'] = task['loops']
self._log.info(' | loops count: %d', task['loops'])
# Setup task affinity
if 'cpus' in task and task['cpus']:
self._log.info(' | CPUs affinity: %s', task['cpus'])
if isinstance(task['cpus'], str):
task_conf['cpus'] = ranges_to_list(task['cpus'])
elif isinstance(task['cpus'], list):
task_conf['cpus'] = task['cpus']
else:
raise ValueError('cpus must be a list or string')
# Setup task configuration
self.rta_profile['tasks'][tid] = task_conf
# Getting task phase descriptor
pid=1
for phase in task['phases']:
# Convert time parameters to integer [us] units
duration = int(phase.duration_s * 1e6)
period = int(phase.period_ms * 1e3)
# A duty-cycle of 0[%] translates on a 'sleep' phase
if phase.duty_cycle_pct == 0:
self._log.info(' + phase_%06d: sleep %.6f [s]',
pid, duration/1e6)
task_phase = {
'loop': 1,
'sleep': duration,
}
# A duty-cycle of 100[%] translates on a 'run-only' phase
elif phase.duty_cycle_pct == 100:
self._log.info(' + phase_%06d: batch %.6f [s]',
pid, duration/1e6)
task_phase = {
'loop': 1,
'run': duration,
}
# A certain number of loops is requires to generate the
# proper load
else:
cloops = -1
if duration >= 0:
cloops = int(duration / period)
sleep_time = period * (100 - phase.duty_cycle_pct) / 100
running_time = period - sleep_time
self._log.info('+ phase_%06d: duration %.6f [s] (%d loops)',
pid, duration/1e6, cloops)
self._log.info('| period %6d [us], duty_cycle %3d %%',
period, phase.duty_cycle_pct)
self._log.info('| run_time %6d [us], sleep_time %6d [us]',
running_time, sleep_time)
task_phase = {
'loop': cloops,
'run': running_time,
'timer': {'ref': tid, 'period': period},
}
self.rta_profile['tasks'][tid]['phases']\
['p'+str(pid).zfill(6)] = task_phase
pid+=1
# Append task name to the list of this workload tasks
self.tasks[tid] = {'pid': -1}
# Generate JSON configuration on local file
self.json = '{0:s}_{1:02d}.json'.format(self.name, self.exc_id)
with open(self.json, 'w') as outfile:
json.dump(self.rta_profile, outfile,
sort_keys=True, indent=4, separators=(',', ': '))
return self.json
def conf(self,
kind,
params,
duration=None,
cpus=None,
sched=None,
run_dir=None,
exc_id=0,
loadref='big'):
"""
Configure a workload of a specified kind.
The rt-app based workload allows to define different classes of
workloads. The classes supported so far are detailed hereafter.
Custom workloads
When 'kind' is 'custom' the tasks generated by this workload are the
ones defined in a provided rt-app JSON configuration file.
In this case the 'params' parameter must be used to specify the
complete path of the rt-app JSON configuration file to use.
Profile based workloads
When ``kind`` is "profile", ``params`` is a dictionary mapping task
names to task specifications. The easiest way to create these task
specifications using :meth:`RTATask.get`.
For example, the following configures an RTA workload with a single
task, named 't1', using the default parameters for a Periodic RTATask:
::
wl = RTA(...)
wl.conf(kind='profile', params={'t1': Periodic().get()})
:param kind: Either 'custom' or 'profile' - see above.
:param params: RT-App parameters - see above.
:param duration: Maximum duration of the workload in seconds. Any
remaining tasks are killed by rt-app when this time has
elapsed.
:param cpus: CPUs to restrict this workload to, using ``taskset``.
:type cpus: list(int)
:param sched: Global RT-App scheduler configuration. Dict with fields:
policy
The default scheduler policy. Choose from 'OTHER', 'FIFO', 'RR',
and 'DEADLINE'.
:param run_dir: Target dir to store output and config files in.
.. TODO: document or remove loadref
"""
if not sched:
sched = {'policy' : 'OTHER'}
super(RTA, self).conf(kind, params, duration,
cpus, sched, run_dir, exc_id)
self.loadref = loadref
# Setup class-specific configuration
if kind == 'custom':
self._confCustom()
elif kind == 'profile':
self._confProfile()
# Move configuration file to target
self.target.push(self.json, self.run_dir)
self.rta_cmd = self.target.executables_directory + '/rt-app'
self.rta_conf = self.run_dir + '/' + self.json
self.command = '{0:s} {1:s} 2>&1'.format(self.rta_cmd, self.rta_conf)
# Set and return the test label
self.test_label = '{0:s}_{1:02d}'.format(self.name, self.exc_id)
return self.test_label
class RTATask(object):
"""
Base class for conveniently constructing params to :meth:`RTA.conf`
This class represents an RT-App task which may contain multiple phases. It
implements ``__add__`` so that using ``+`` on two tasks concatenates their
phases. For example ``Ramp() + Periodic()`` would yield an ``RTATask`` that
executes the default phases for ``Ramp`` followed by the default phases for
``Periodic``.
"""
def __init__(self):
self._task = {}
def get(self):
"""
Return a dict that can be passed as an element of the ``params`` field
to :meth:`RTA.conf`.
"""
return self._task
def __add__(self, next_phases):
self._task['phases'].extend(next_phases._task['phases'])
return self
class Ramp(RTATask):
"""
Configure a ramp load.
This class defines a task which load is a ramp with a configured number
of steps according to the input parameters.
:param start_pct: the initial load percentage.
:param end_pct: the final load percentage.
:param delta_pct: the load increase/decrease at each step, in percentage
points.
:param time_s: the duration in seconds of each load step.
:param period_ms: the period used to define the load in [ms].
:param delay_s: the delay in seconds before ramp start.
:param loops: number of time to repeat the ramp, with the specified delay in
between.
:param sched: the scheduler configuration for this task.
:type sched: dict
:param cpus: the list of CPUs on which task can run.
:type cpus: list(int)
"""
def __init__(self, start_pct=0, end_pct=100, delta_pct=10, time_s=1,
period_ms=100, delay_s=0, loops=1, sched=None, cpus=None):
super(Ramp, self).__init__()
self._task['cpus'] = cpus
if not sched:
sched = {'policy' : 'DEFAULT'}
self._task['sched'] = sched
self._task['delay'] = delay_s
self._task['loops'] = loops
if start_pct not in range(0,101) or end_pct not in range(0,101):
raise ValueError('start_pct and end_pct must be in [0..100] range')
if start_pct >= end_pct:
if delta_pct > 0:
delta_pct = -delta_pct
delta_adj = -1
if start_pct <= end_pct:
if delta_pct < 0:
delta_pct = -delta_pct
delta_adj = +1
phases = []
steps = range(start_pct, end_pct+delta_adj, delta_pct)
for load in steps:
if load == 0:
phase = Phase(time_s, 0, 0)
else:
phase = Phase(time_s, period_ms, load)
phases.append(phase)
self._task['phases'] = phases
class Step(Ramp):
"""
Configure a step load.
This class defines a task which load is a step with a configured initial and
final load. Using the ``loops`` param, this can be used to create a workload
that alternates between two load values.
:param start_pct: the initial load percentage.
:param end_pct: the final load percentage.
:param time_s: the duration in seconds of each load step.
:param period_ms: the period used to define the load in [ms].
:param delay_s: the delay in seconds before ramp start.
:param loops: number of time to repeat the step, with the specified delay in
between.
:param sched: the scheduler configuration for this task.
:type sched: dict
:param cpus: the list of CPUs on which task can run.
:type cpus: list(int)
"""
def __init__(self, start_pct=0, end_pct=100, time_s=1, period_ms=100,
delay_s=0, loops=1, sched=None, cpus=None):
delta_pct = abs(end_pct - start_pct)
super(Step, self).__init__(start_pct, end_pct, delta_pct, time_s,
period_ms, delay_s, loops, sched, cpus)
class Pulse(RTATask):
"""
Configure a pulse load.
This class defines a task which load is a pulse with a configured
initial and final load.
The main difference with the 'step' class is that a pulse workload is
by definition a 'step down', i.e. the workload switch from an finial
load to a final one which is always lower than the initial one.
Moreover, a pulse load does not generate a sleep phase in case of 0[%]
load, i.e. the task ends as soon as the non null initial load has
completed.
:param start_pct: the initial load percentage.
:param end_pct: the final load percentage. Must be lower than ``start_pct``
value. If end_pct is 0, the task end after the ``start_pct``
period has completed.
:param time_s: the duration in seconds of each load step.
:param period_ms: the period used to define the load in [ms].
:param delay_s: the delay in seconds before ramp start.
:param loops: number of time to repeat the pulse, with the specified delay
in between.
:param sched: the scheduler configuration for this task.
:type sched: dict
:param cpus: the list of CPUs on which task can run
:type cpus: list(int)
"""
def __init__(self, start_pct=100, end_pct=0, time_s=1, period_ms=100,
delay_s=0, loops=1, sched=None, cpus=None):
super(Pulse, self).__init__()
if end_pct >= start_pct:
raise ValueError('end_pct must be lower than start_pct')
self._task = {}
self._task['cpus'] = cpus
if not sched:
sched = {'policy' : 'DEFAULT'}
self._task['sched'] = sched
self._task['delay'] = delay_s
self._task['loops'] = loops
self._task['phases'] = {}
if end_pct not in range(0,101) or start_pct not in range(0,101):
raise ValueError('end_pct and start_pct must be in [0..100] range')
if end_pct >= start_pct:
raise ValueError('end_pct must be lower than start_pct')
phases = []
for load in [start_pct, end_pct]:
if load == 0:
continue
phase = Phase(time_s, period_ms, load)
phases.append(phase)
self._task['phases'] = phases
class Periodic(Pulse):
"""
Configure a periodic load. This is the simplest type of RTA task.
This class defines a task which load is periodic with a configured
period and duty-cycle.
:param duty_cycle_pct: the load percentage.
:param duration_s: the total duration in seconds of the task.
:param period_ms: the period used to define the load in milliseconds.
:param delay_s: the delay in seconds before starting the periodic phase.
:param sched: the scheduler configuration for this task.
:type sched: dict
:param cpus: the list of CPUs on which task can run.
:type cpus: list(int)
"""
def __init__(self, duty_cycle_pct=50, duration_s=1, period_ms=100,
delay_s=0, sched=None, cpus=None):
super(Periodic, self).__init__(duty_cycle_pct, 0, duration_s,
period_ms, delay_s, 1, sched, cpus)
| apache-2.0 | 8,448,739,575,015,911,000 | 35.39359 | 86 | 0.541903 | false |
nikdoof/test-auth | app/sso/services/miningbuddy/__init__.py | 1 | 3066 | import crypt
import random
import time
from django.db import transaction
from sso.services import BaseDBService
from django.conf import settings
class MiningBuddyService(BaseDBService):
"""
Mining Buddy Class, allows registration and sign-in
"""
settings = { 'require_user': False,
'require_password': False,
'provide_login': False,
'use_auth_username': False,
'database_name': 'dreddit_mining',
'password_salt': 's98ss7fsc7fd2rf62ctcrlwztstnzve9toezexcsdhfgviuinusxcdtsvbrg' }
SQL_ADD_USER = r"INSERT INTO users (username, password, email, emailvalid, confirmed, rank) VALUES (%s, %s, %s, 1, 1, 2)"
SQL_ADD_API = r"INSERT INTO api_keys (userid, time, apiID, apiKey, api_valid, charid) values (%s, %s, %s, %s, 1, %s)"
SQL_DIS_USER = r"UPDATE users SET canLogin = 0 WHERE username = %s"
SQL_ENABLE_USER = r"UPDATE users SET canLogin = 1, password = %s WHERE username = %s"
SQL_CHECK_USER = r"SELECT username from users WHERE username = %s and deleted = 0"
SQL_DEL_USER = r"UPDATE users set deleted = 1 WHERE username = %s"
def _gen_salt(self):
return self.settings['password_salt']
def _gen_mb_hash(self, password, salt=None):
if not salt:
salt = self._gen_salt()
return crypt.crypt(password, salt)
def _clean_username(self, username):
username = username.strip()
return username
def add_user(self, username, password, **kwargs):
""" Add a user """
pwhash = self._gen_mb_hash(password)
if 'user' in kwargs:
email = kwargs['user'].email
else:
email = ''
self.dbcursor.execute(self.SQL_ADD_USER, [self._clean_username(username), pwhash, email])
userid = self.dbcursor.lastrowid
api = kwargs['character'].eveaccount_set.all()[0]
self.dbcursor.execute(self.SQL_ADD_API, [userid, int(time.time()), api.api_user_id, api.api_key, kwargs['character'].id])
return { 'username': self._clean_username(username), 'password': password }
def check_user(self, username):
""" Check if the username exists """
self.dbcursor.execute(self.SQL_CHECK_USER, [self._clean_username(username)])
row = self.dbcursor.fetchone()
if row:
return True
return False
def delete_user(self, uid):
""" Delete a user """
self.dbcursor.execute(self.SQL_DEL_USER, [uid])
return True
def disable_user(self, uid):
""" Disable a user """
self.dbcursor.execute(self.SQL_DIS_USER, [uid])
return True
def enable_user(self, uid, password):
""" Enable a user """
pwhash = self._gen_mb_hash(password)
self.dbcursor.execute(self.SQL_ENABLE_USER, [pwhash, uid])
return True
def reset_password(self, uid, password):
""" Reset the user's password """
return self.enable_user(uid, password)
ServiceClass = 'MiningBuddyService'
| bsd-3-clause | -8,489,552,789,618,893,000 | 34.651163 | 129 | 0.612524 | false |
praekelt/rapidpro | temba/api/models.py | 1 | 22048 | from __future__ import unicode_literals
import hmac
import json
from django.db.models import Q
import requests
import uuid
from datetime import timedelta
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, Group
from django.db import models
from django.utils import timezone
from hashlib import sha1
from smartmin.models import SmartModel
from temba.contacts.models import TEL_SCHEME
from temba.orgs.models import Org
from temba.channels.models import Channel, TEMBA_HEADERS
from temba.msgs.models import Call
from temba.utils import datetime_to_str, prepped_request_to_str
from temba.utils.cache import get_cacheable_attr
from urllib import urlencode
PENDING = 'P'
COMPLETE = 'C'
FAILED = 'F'
ERRORED = 'E'
STATUS_CHOICES = ((PENDING, "Pending"),
(COMPLETE, "Complete"),
(ERRORED, "Errored"),
(FAILED, "Failed"))
SMS_RECEIVED = 'mo_sms'
SMS_SENT = 'mt_sent'
SMS_DELIVERED = 'mt_dlvd'
SMS_FAIL = 'mt_fail'
RELAYER_ALARM = 'alarm'
FLOW = 'flow'
CATEGORIZE = 'categorize'
EVENT_CHOICES = ((SMS_RECEIVED, "Incoming SMS Message"),
(SMS_SENT, "Outgoing SMS Sent"),
(SMS_DELIVERED, "Outgoing SMS Delivered to Recipient"),
(Call.TYPE_OUT, "Outgoing Call"),
(Call.TYPE_OUT_MISSED, "Missed Outgoing Call"),
(Call.TYPE_IN, "Incoming Call"),
(Call.TYPE_IN_MISSED, "Missed Incoming Call"),
(RELAYER_ALARM, "Channel Alarm"),
(FLOW, "Flow Step Reached"),
(CATEGORIZE, "Flow Categorization"))
class WebHookEvent(SmartModel):
"""
Represents an event that needs to be sent to the web hook for a channel.
"""
org = models.ForeignKey(Org,
help_text="The organization that this event was triggered for")
status = models.CharField(max_length=1, choices=STATUS_CHOICES, default='P',
help_text="The state this event is currently in")
channel = models.ForeignKey(Channel, null=True, blank=True,
help_text="The channel that this event is relating to")
event = models.CharField(max_length=16, choices=EVENT_CHOICES,
help_text="The event type for this event")
data = models.TextField(help_text="The JSON encoded data that will be POSTED to the web hook")
try_count = models.IntegerField(default=0,
help_text="The number of times this event has been tried")
next_attempt = models.DateTimeField(null=True, blank=True,
help_text="When this event will be retried")
action = models.CharField(max_length=8, default='POST', help_text='What type of HTTP event is it')
def fire(self):
# start our task with this event id
from .tasks import deliver_event_task
deliver_event_task.delay(self.id)
@classmethod
def trigger_flow_event(cls, webhook_url, flow, run, node_uuid, contact, event, action='POST'):
org = flow.org
api_user = get_api_user()
# no-op if no webhook configured
if not webhook_url:
return
json_time = datetime_to_str(timezone.now())
# get the results for this contact
results = flow.get_results(contact)
values = []
if results and results[0]:
values = results[0]['values']
for value in values:
value['time'] = datetime_to_str(value['time'])
value['value'] = unicode(value['value'])
# if the action is on the first node
# we might not have an sms (or channel) yet
channel = None
text = None
if event:
text = event.text
channel = event.channel
if channel:
channel_id = channel.pk
else:
channel_id = -1
steps = []
for step in run.steps.all().order_by('arrived_on'):
steps.append(dict(type=step.step_type,
node=step.step_uuid,
arrived_on=datetime_to_str(step.arrived_on),
left_on=datetime_to_str(step.left_on),
text=step.get_text(),
value=step.rule_value))
data = dict(channel=channel_id,
relayer=channel_id,
flow=flow.id,
run=run.id,
text=text,
step=unicode(node_uuid),
phone=contact.get_urn_display(org=org, scheme=TEL_SCHEME, full=True),
values=json.dumps(values),
steps=json.dumps(steps),
time=json_time)
if not action:
action = 'POST'
webhook_event = WebHookEvent.objects.create(org=org,
event=FLOW,
channel=channel,
data=json.dumps(data),
try_count=1,
action=action,
created_by=api_user,
modified_by=api_user)
status_code = -1
message = "None"
body = None
# webhook events fire immediately since we need the results back
try:
# only send webhooks when we are configured to, otherwise fail
if not settings.SEND_WEBHOOKS:
raise Exception("!! Skipping WebHook send, SEND_WEBHOOKS set to False")
# some hosts deny generic user agents, use Temba as our user agent
if action == 'GET':
response = requests.get(webhook_url, headers=TEMBA_HEADERS, timeout=10)
else:
response = requests.post(webhook_url, data=data, headers=TEMBA_HEADERS, timeout=10)
response_text = response.text
body = response.text
status_code = response.status_code
if response.status_code == 200 or response.status_code == 201:
try:
response_json = json.loads(response_text)
# only update if we got a valid JSON dictionary
if not isinstance(response_json, dict):
raise ValueError("Response must be a JSON dictionary, ignoring response.")
run.update_fields(response_json)
message = "Webhook called successfully."
except ValueError as e:
message = "Response must be a JSON dictionary, ignoring response."
webhook_event.status = COMPLETE
else:
webhook_event.status = FAILED
message = "Got non 200 response (%d) from webhook." % response.status_code
raise Exception("Got non 200 response (%d) from webhook." % response.status_code)
except Exception as e:
import traceback
traceback.print_exc()
webhook_event.status = FAILED
message = "Error calling webhook: %s" % unicode(e)
finally:
webhook_event.save()
result = WebHookResult.objects.create(event=webhook_event,
url=webhook_url,
status_code=status_code,
body=body,
message=message,
data=urlencode(data, doseq=True),
created_by=api_user,
modified_by=api_user)
# if this is a test contact, add an entry to our action log
if run.contact.is_test:
from temba.flows.models import ActionLog
log_txt = "Triggered <a href='%s' target='_log'>webhook event</a> - %d" % (reverse('api.log_read', args=[webhook_event.pk]), status_code)
ActionLog.create(run, log_txt, safe=True)
return result
@classmethod
def trigger_sms_event(cls, event, msg, time):
if not msg.channel:
return
org = msg.org
# no-op if no webhook configured
if not org or not org.get_webhook_url():
return
# if the org doesn't care about this type of message, ignore it
if (event == 'mo_sms' and not org.is_notified_of_mo_sms()) or \
(event == 'mt_sent' and not org.is_notified_of_mt_sms()) or \
(event == 'mt_dlvd' and not org.is_notified_of_mt_sms()):
return
api_user = get_api_user()
json_time = time.strftime('%Y-%m-%dT%H:%M:%S.%f')
data = dict(sms=msg.pk,
phone=msg.contact.get_urn_display(org=org, scheme=TEL_SCHEME, full=True),
text=msg.text,
time=json_time,
status=msg.status,
direction=msg.direction)
hook_event = WebHookEvent.objects.create(org=org,
channel=msg.channel,
event=event,
data=json.dumps(data),
created_by=api_user,
modified_by=api_user)
hook_event.fire()
return hook_event
@classmethod
def trigger_call_event(cls, call):
if not call.channel:
return
org = call.channel.org
# no-op if no webhook configured
if not org or not org.get_webhook_url():
return
event = call.call_type
# if the org doesn't care about this type of message, ignore it
if (event == 'mt_call' and not org.is_notified_of_mt_call()) or \
(event == 'mt_miss' and not org.is_notified_of_mt_call()) or \
(event == 'mo_call' and not org.is_notified_of_mo_call()) or \
(event == 'mo_miss' and not org.is_notified_of_mo_call()):
return
api_user = get_api_user()
json_time = call.time.strftime('%Y-%m-%dT%H:%M:%S.%f')
data = dict(call=call.pk,
phone=call.contact.get_urn_display(org=org, scheme=TEL_SCHEME, full=True),
duration=call.duration,
time=json_time)
hook_event = WebHookEvent.objects.create(org=org,
channel=call.channel,
event=event,
data=json.dumps(data),
created_by=api_user,
modified_by=api_user)
hook_event.fire()
return hook_event
@classmethod
def trigger_channel_alarm(cls, sync_event):
channel = sync_event.channel
org = channel.org
# no-op if no webhook configured
if not org or not org.get_webhook_url(): # pragma: no cover
return
if not org.is_notified_of_alarms():
return
api_user = get_api_user()
json_time = channel.last_seen.strftime('%Y-%m-%dT%H:%M:%S.%f')
data = dict(channel=channel.pk,
power_source=sync_event.power_source,
power_status=sync_event.power_status,
power_level=sync_event.power_level,
network_type=sync_event.network_type,
pending_message_count=sync_event.pending_message_count,
retry_message_count=sync_event.retry_message_count,
last_seen=json_time)
hook_event = WebHookEvent.objects.create(org=org,
channel=channel,
event=RELAYER_ALARM,
data=json.dumps(data),
created_by=api_user,
modified_by=api_user)
hook_event.fire()
return hook_event
def deliver(self):
from .serializers import MsgCreateSerializer
# create our post parameters
post_data = json.loads(self.data)
post_data['event'] = self.event
post_data['relayer'] = self.channel.pk
post_data['channel'] = self.channel.pk
post_data['relayer_phone'] = self.channel.address
# look up the endpoint for this channel
result = dict(url=self.org.get_webhook_url(), data=urlencode(post_data, doseq=True))
if not self.org.get_webhook_url(): # pragma: no cover
result['status_code'] = 0
result['message'] = "No webhook registered for this org, ignoring event"
self.status = FAILED
self.next_attempt = None
return result
# get our org user
user = self.org.get_user()
# no user? we shouldn't be doing webhooks shtuff
if not user:
result['status_code'] = 0
result['message'] = "No active user for this org, ignoring event"
self.status = FAILED
self.next_attempt = None
return result
# make the request
try:
if not settings.SEND_WEBHOOKS:
raise Exception("!! Skipping WebHook send, SEND_WEBHOOKS set to False")
# some hosts deny generic user agents, use Temba as our user agent
headers = TEMBA_HEADERS.copy()
# also include any user-defined headers
headers.update(self.org.get_webhook_headers())
s = requests.Session()
prepped = requests.Request('POST', self.org.get_webhook_url(),
data=post_data,
headers=headers).prepare()
result['url'] = prepped.url
result['request'] = prepped_request_to_str(prepped)
r = s.send(prepped)
result['status_code'] = r.status_code
result['body'] = r.text.strip()
r.raise_for_status()
# any 200 code is ok by us
self.status = COMPLETE
result['message'] = "Event delivered successfully."
# read our body if we have one
if result['body']:
try:
data = r.json()
serializer = MsgCreateSerializer(data=data, user=user, org=self.org)
if serializer.is_valid():
result['serializer'] = serializer
obj = serializer.object
result['message'] = "SMS message to %d recipient(s) with text: '%s'" % (len(obj.contacts), obj.text)
else:
errors = serializer.errors
result['message'] = "Event delivered successfully, ignoring response body, wrong format: %s" % \
",".join("%s: %s" % (_, ",".join(errors[_])) for _ in errors.keys())
except Exception as e:
# we were unable to make anything of the body, that's ok though because
# get a 200, so just save our error for posterity
result['message'] = "Event delivered successfully, ignoring response body, not JSON: %s" % unicode(e)
except Exception as e:
# we had an error, log it
self.status = ERRORED
result['message'] = "Error when delivering event - %s" % unicode(e)
# if we had an error of some kind, schedule a retry for five minutes from now
self.try_count += 1
if self.status == ERRORED:
if self.try_count < 3:
self.next_attempt = timezone.now() + timedelta(minutes=5)
else:
self.next_attempt = None
self.status = 'F'
else:
self.next_attempt = None
return result
def __unicode__(self):
return "WebHookEvent[%s:%d] %s" % (self.event, self.pk, self.data)
class WebHookResult(SmartModel):
"""
Represents the result of trying to deliver an event to a web hook
"""
event = models.ForeignKey(WebHookEvent,
help_text="The event that this result is tied to")
url = models.TextField(null=True, blank=True,
help_text="The URL the event was delivered to")
data = models.TextField(null=True, blank=True,
help_text="The data that was posted to the webhook")
request = models.TextField(null=True, blank=True,
help_text="The request that was posted to the webhook")
status_code = models.IntegerField(help_text="The HTTP status as returned by the web hook")
message = models.CharField(max_length=255,
help_text="A message describing the result, error messages go here")
body = models.TextField(null=True, blank=True,
help_text="The body of the HTTP response as returned by the web hook")
def stripped_body(self):
return self.body.strip() if self.body else ""
@classmethod
def record_result(cls, event, result):
# save our event
event.save()
# if our serializer was valid, save it, this will send the message out
serializer = result.get('serializer', None)
if serializer and serializer.is_valid():
serializer.save()
# little utility to trim a value by length
message = result['message']
if message:
message = message[:255]
api_user = get_api_user()
WebHookResult.objects.create(event=event,
url=result['url'],
# Flow webhooks won't have 'request'
request=result.get('request'),
data=result['data'],
message=message,
status_code=result.get('status_code', 503),
body=result.get('body', None),
created_by=api_user,
modified_by=api_user)
# keep only the most recent 100 events for each org
for old_event in WebHookEvent.objects.filter(org=event.org, status__in=['C', 'F']).order_by('-created_on')[100:]: # pragma: no cover
old_event.delete()
class APIToken(models.Model):
"""
Our API token, ties in orgs
"""
key = models.CharField(max_length=40, primary_key=True)
user = models.ForeignKey(User, related_name='api_tokens')
org = models.ForeignKey(Org, related_name='api_tokens')
created = models.DateTimeField(auto_now_add=True)
role = models.ForeignKey(Group)
@classmethod
def get_orgs_for_role(cls, user, role):
"""
Gets all the orgs the user can login to with the given role. Also
takes a single character role (A, E, S, etc) and maps it to a UserGroup.
"""
if role == 'A':
valid_orgs = Org.objects.filter(administrators__in=[user])
role = Group.objects.get(name='Administrators')
elif role == 'E':
# admins can authenticate as editors
valid_orgs = Org.objects.filter(Q(administrators__in=[user]) | Q(editors__in=[user]))
role = Group.objects.get(name='Editors')
elif role == 'S':
# admins and editors can authenticate as surveyors
valid_orgs = Org.objects.filter(Q(administrators__in=[user]) | Q(editors__in=[user]) | Q(surveyors__in=[user]))
role = Group.objects.get(name='Surveyors')
else:
# can't authenticate via the api as anything else
valid_orgs = []
role = None
return valid_orgs, role
def save(self, *args, **kwargs):
if not self.key:
self.key = self.generate_key()
return super(APIToken, self).save(*args, **kwargs)
def generate_key(self):
unique = uuid.uuid4()
return hmac.new(unique.bytes, digestmod=sha1).hexdigest()
def __unicode__(self):
return self.key
class Meta:
unique_together = ('user', 'org', 'role')
def get_or_create_api_token(user):
"""
Gets or (lazily creates) an API token for this user
"""
if not user.is_authenticated():
return None
org = user.get_org()
if not org:
org = Org.get_org(user)
role = user.get_role()
if org:
tokens = APIToken.objects.filter(user=user, org=org, role=role)
if tokens:
return str(tokens[0])
else:
token = APIToken.objects.create(user=user, org=org, role=role)
return str(token)
else:
return None
def api_token(user):
"""
Cached property access to a user's lazily-created API token
"""
return get_cacheable_attr(user, '__api_token', lambda: get_or_create_api_token(user))
User.api_token = property(api_token)
def get_api_user():
"""
Returns a user that can be used to associate events created by the API service
"""
user = User.objects.filter(username='api')
if user:
return user[0]
else:
user = User.objects.create_user('api', '[email protected]')
user.groups.add(Group.objects.get(name='Service Users'))
return user
| agpl-3.0 | 1,152,064,268,507,012,000 | 37.344348 | 153 | 0.527712 | false |
openstack/ironic | ironic/tests/unit/api/controllers/v1/test_allocation.py | 1 | 65654 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for the API /allocations/ methods.
"""
import datetime
from http import client as http_client
import json
from unittest import mock
from urllib import parse as urlparse
import fixtures
from keystonemiddleware import auth_token
from oslo_config import cfg
from oslo_utils import timeutils
from oslo_utils import uuidutils
from ironic.api.controllers import base as api_base
from ironic.api.controllers import v1 as api_v1
from ironic.api.controllers.v1 import notification_utils
from ironic.common import exception
from ironic.common import policy
from ironic.conductor import rpcapi
from ironic import objects
from ironic.objects import fields as obj_fields
from ironic.tests.unit.api import base as test_api_base
from ironic.tests.unit.api import utils as apiutils
from ironic.tests.unit.objects import utils as obj_utils
class TestListAllocations(test_api_base.BaseApiTest):
headers = {api_base.Version.string: str(api_v1.max_version())}
def setUp(self):
super(TestListAllocations, self).setUp()
self.node = obj_utils.create_test_node(self.context, name='node-1')
def test_empty(self):
data = self.get_json('/allocations', headers=self.headers)
self.assertEqual([], data['allocations'])
def test_one(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
data = self.get_json('/allocations', headers=self.headers)
self.assertEqual(allocation.uuid, data['allocations'][0]["uuid"])
self.assertEqual(allocation.name, data['allocations'][0]['name'])
self.assertEqual({}, data['allocations'][0]["extra"])
self.assertEqual(self.node.uuid, data['allocations'][0]["node_uuid"])
self.assertEqual(allocation.owner, data['allocations'][0]["owner"])
# never expose the node_id
self.assertNotIn('node_id', data['allocations'][0])
def test_get_one(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
data = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertEqual(allocation.uuid, data['uuid'])
self.assertEqual({}, data["extra"])
self.assertEqual(self.node.uuid, data["node_uuid"])
self.assertEqual(allocation.owner, data["owner"])
# never expose the node_id
self.assertNotIn('node_id', data)
def test_get_one_with_json(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
data = self.get_json('/allocations/%s.json' % allocation.uuid,
headers=self.headers)
self.assertEqual(allocation.uuid, data['uuid'])
def test_get_one_with_json_in_name(self):
allocation = obj_utils.create_test_allocation(self.context,
name='pg.json',
node_id=self.node.id)
data = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertEqual(allocation.uuid, data['uuid'])
def test_get_one_with_suffix(self):
allocation = obj_utils.create_test_allocation(self.context,
name='pg.1',
node_id=self.node.id)
data = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertEqual(allocation.uuid, data['uuid'])
def test_get_one_custom_fields(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
fields = 'resource_class,extra'
data = self.get_json(
'/allocations/%s?fields=%s' % (allocation.uuid, fields),
headers=self.headers)
# We always append "links"
self.assertCountEqual(['resource_class', 'extra', 'links'], data)
def test_get_collection_custom_fields(self):
fields = 'uuid,extra'
for i in range(3):
obj_utils.create_test_allocation(
self.context,
node_id=self.node.id,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % i)
data = self.get_json(
'/allocations?fields=%s' % fields,
headers=self.headers)
self.assertEqual(3, len(data['allocations']))
for allocation in data['allocations']:
# We always append "links"
self.assertCountEqual(['uuid', 'extra', 'links'], allocation)
def test_get_custom_fields_invalid_fields(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
fields = 'uuid,spongebob'
response = self.get_json(
'/allocations/%s?fields=%s' % (allocation.uuid, fields),
headers=self.headers, expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn('spongebob', response.json['error_message'])
def test_get_one_invalid_api_version(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
response = self.get_json(
'/allocations/%s' % (allocation.uuid),
headers={api_base.Version.string: str(api_v1.min_version())},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_get_one_invalid_api_version_without_check(self):
# Invalid name, but the check happens after the microversion check.
response = self.get_json(
'/allocations/ba!na!na!',
headers={api_base.Version.string: str(api_v1.min_version())},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_many(self):
allocations = []
for id_ in range(5):
allocation = obj_utils.create_test_allocation(
self.context, node_id=self.node.id,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % id_)
allocations.append(allocation.uuid)
data = self.get_json('/allocations', headers=self.headers)
self.assertEqual(len(allocations), len(data['allocations']))
uuids = [n['uuid'] for n in data['allocations']]
self.assertCountEqual(allocations, uuids)
def test_links(self):
uuid = uuidutils.generate_uuid()
obj_utils.create_test_allocation(self.context,
uuid=uuid,
node_id=self.node.id)
data = self.get_json('/allocations/%s' % uuid, headers=self.headers)
self.assertIn('links', data)
self.assertEqual(2, len(data['links']))
self.assertIn(uuid, data['links'][0]['href'])
for link in data['links']:
bookmark = link['rel'] == 'bookmark'
self.assertTrue(self.validate_link(link['href'], bookmark=bookmark,
headers=self.headers))
def test_collection_links(self):
allocations = []
for id_ in range(5):
allocation = obj_utils.create_test_allocation(
self.context,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % id_)
allocations.append(allocation.uuid)
data = self.get_json('/allocations/?limit=3', headers=self.headers)
self.assertEqual(3, len(data['allocations']))
next_marker = data['allocations'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
allocations = []
for id_ in range(5):
allocation = obj_utils.create_test_allocation(
self.context,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % id_)
allocations.append(allocation.uuid)
data = self.get_json('/allocations', headers=self.headers)
self.assertEqual(3, len(data['allocations']))
next_marker = data['allocations'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_custom_fields(self):
cfg.CONF.set_override('max_limit', 3, 'api')
fields = 'uuid,extra'
allocations = []
for i in range(5):
allocation = obj_utils.create_test_allocation(
self.context,
node_id=self.node.id,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % i)
allocations.append(allocation.uuid)
data = self.get_json(
'/allocations?fields=%s' % fields,
headers=self.headers)
self.assertEqual(3, len(data['allocations']))
next_marker = data['allocations'][-1]['uuid']
self.assertIn(next_marker, data['next'])
self.assertIn('fields', data['next'])
def test_get_collection_pagination_no_uuid(self):
fields = 'node_uuid'
limit = 2
allocations = []
for id_ in range(3):
allocation = obj_utils.create_test_allocation(
self.context,
node_id=self.node.id,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % id_)
allocations.append(allocation)
data = self.get_json(
'/allocations?fields=%s&limit=%s' % (fields, limit),
headers=self.headers)
self.assertEqual(limit, len(data['allocations']))
self.assertIn('marker=%s' % allocations[limit - 1].uuid, data['next'])
def test_allocation_get_all_invalid_api_version(self):
obj_utils.create_test_allocation(
self.context, node_id=self.node.id, uuid=uuidutils.generate_uuid(),
name='allocation_1')
response = self.get_json('/allocations',
headers={api_base.Version.string: '1.14'},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
@mock.patch.object(policy, 'authorize', spec=True)
def test_allocation_get_all_forbidden(self, mock_authorize):
def mock_authorize_function(rule, target, creds):
raise exception.HTTPForbidden(resource='fake')
mock_authorize.side_effect = mock_authorize_function
response = self.get_json('/allocations', expect_errors=True,
headers={
api_base.Version.string: '1.60',
'X-Project-Id': '12345'
})
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(policy, 'authorize', spec=True)
def test_allocation_get_all_forbidden_no_project(self, mock_authorize):
def mock_authorize_function(rule, target, creds):
if rule == 'baremetal:allocation:list_all':
raise exception.HTTPForbidden(resource='fake')
return True
mock_authorize.side_effect = mock_authorize_function
response = self.get_json('/allocations', expect_errors=True,
headers={
api_base.Version.string: '1.59',
})
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(policy, 'authorize', spec=True)
def test_allocation_get_all_forbid_owner_proj_mismatch(
self, mock_authorize):
def mock_authorize_function(rule, target, creds):
if rule == 'baremetal:allocation:list_all':
raise exception.HTTPForbidden(resource='fake')
return True
mock_authorize.side_effect = mock_authorize_function
response = self.get_json('/allocations?owner=54321',
expect_errors=True,
headers={
api_base.Version.string: '1.60',
'X-Project-Id': '12345'
})
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(policy, 'authorize', spec=True)
def test_allocation_get_all_non_admin(self, mock_authorize):
def mock_authorize_function(rule, target, creds):
if rule == 'baremetal:allocation:list_all':
raise exception.HTTPForbidden(resource='fake')
return True
mock_authorize.side_effect = mock_authorize_function
allocations = []
for id in range(5):
allocation = obj_utils.create_test_allocation(
self.context,
uuid=uuidutils.generate_uuid(),
owner='12345')
allocations.append(allocation.uuid)
for id in range(2):
allocation = obj_utils.create_test_allocation(
self.context,
uuid=uuidutils.generate_uuid())
data = self.get_json('/allocations', headers={
api_base.Version.string: '1.60',
'X-Project-Id': '12345'})
self.assertEqual(len(allocations), len(data['allocations']))
uuids = [n['uuid'] for n in data['allocations']]
self.assertEqual(sorted(allocations), sorted(uuids))
def test_sort_key(self):
allocations = []
for id_ in range(3):
allocation = obj_utils.create_test_allocation(
self.context,
node_id=self.node.id,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % id_)
allocations.append(allocation.uuid)
data = self.get_json('/allocations?sort_key=uuid',
headers=self.headers)
uuids = [n['uuid'] for n in data['allocations']]
self.assertEqual(sorted(allocations), uuids)
def test_sort_key_invalid(self):
invalid_keys_list = ['foo', 'extra', 'internal_info', 'properties']
for invalid_key in invalid_keys_list:
response = self.get_json('/allocations?sort_key=%s' % invalid_key,
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn(invalid_key, response.json['error_message'])
def test_sort_key_allowed(self):
allocation_uuids = []
for id_ in range(3, 0, -1):
allocation = obj_utils.create_test_allocation(
self.context,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % id_)
allocation_uuids.append(allocation.uuid)
allocation_uuids.reverse()
data = self.get_json('/allocations?sort_key=name',
headers=self.headers)
data_uuids = [p['uuid'] for p in data['allocations']]
self.assertEqual(allocation_uuids, data_uuids)
def test_get_all_by_state(self):
for i in range(5):
if i < 3:
state = 'allocating'
else:
state = 'active'
obj_utils.create_test_allocation(
self.context,
state=state,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % i)
data = self.get_json("/allocations?state=allocating",
headers=self.headers)
self.assertEqual(3, len(data['allocations']))
def test_get_all_by_owner(self):
for i in range(5):
if i < 3:
owner = '12345'
else:
owner = '54321'
obj_utils.create_test_allocation(
self.context,
owner=owner,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % i)
# NOTE(TheJulia): Force the cast of the action to a system
# scoped request. System scoped is allowed to view everything,
# where as project scoped requests are actually filtered with the
# secure-rbac work. This was done in troubleshooting the code,
# so may not be necessary, but filtered views are checked in
# the RBAC testing.
headers = self.headers
headers['X-Roles'] = "member,reader"
headers['OpenStack-System-Scope'] = "all"
data = self.get_json("/allocations?owner=12345",
headers=self.headers)
self.assertEqual(3, len(data['allocations']))
def test_get_all_by_owner_not_allowed(self):
response = self.get_json("/allocations?owner=12345",
headers={api_base.Version.string: '1.59'},
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_code)
self.assertTrue(response.json['error_message'])
def test_get_all_by_node_name(self):
for i in range(5):
if i < 3:
node_id = self.node.id
else:
node_id = 100000 + i
obj_utils.create_test_allocation(
self.context,
node_id=node_id,
uuid=uuidutils.generate_uuid(),
name='allocation%s' % i)
data = self.get_json("/allocations?node=%s" % self.node.name,
headers=self.headers)
self.assertEqual(3, len(data['allocations']))
def test_get_all_by_node_uuid(self):
obj_utils.create_test_allocation(self.context, node_id=self.node.id)
data = self.get_json('/allocations?node=%s' % (self.node.uuid),
headers=self.headers)
self.assertEqual(1, len(data['allocations']))
def test_get_all_by_non_existing_node(self):
obj_utils.create_test_allocation(self.context, node_id=self.node.id)
response = self.get_json('/allocations?node=banana',
headers=self.headers, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
def test_get_by_node_resource(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
data = self.get_json('/nodes/%s/allocation' % self.node.uuid,
headers=self.headers)
self.assertEqual(allocation.uuid, data['uuid'])
self.assertEqual({}, data["extra"])
self.assertEqual(self.node.uuid, data["node_uuid"])
def test_get_by_node_resource_invalid_api_version(self):
obj_utils.create_test_allocation(self.context, node_id=self.node.id)
response = self.get_json(
'/nodes/%s/allocation' % self.node.uuid,
headers={api_base.Version.string: str(api_v1.min_version())},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_get_by_node_resource_with_fields(self):
obj_utils.create_test_allocation(self.context, node_id=self.node.id)
data = self.get_json('/nodes/%s/allocation?fields=name,extra' %
self.node.uuid,
headers=self.headers)
self.assertNotIn('uuid', data)
self.assertIn('name', data)
self.assertEqual({}, data["extra"])
def test_get_by_node_resource_and_id(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
response = self.get_json('/nodes/%s/allocation/%s' % (self.node.uuid,
allocation.uuid),
headers=self.headers, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_by_node_resource_not_existed(self):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
res = self.get_json('/node/%s/allocation' % node.uuid,
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, res.status_code)
def test_by_node_invalid_node(self):
res = self.get_json('/node/%s/allocation' % uuidutils.generate_uuid(),
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, res.status_code)
def test_allocation_owner_hidden_in_lower_version(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id)
data = self.get_json(
'/allocations/%s' % allocation.uuid,
headers={api_base.Version.string: '1.59'})
self.assertNotIn('owner', data)
data = self.get_json(
'/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertIn('owner', data)
def test_allocation_owner_null_field(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id,
owner=None)
data = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertIsNone(data['owner'])
def test_allocation_owner_present(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id,
owner='12345')
data = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertEqual(data['owner'], '12345')
def test_get_owner_field(self):
allocation = obj_utils.create_test_allocation(self.context,
node_id=self.node.id,
owner='12345')
fields = 'owner'
response = self.get_json(
'/allocations/%s?fields=%s' % (allocation.uuid, fields),
headers=self.headers)
self.assertIn('owner', response)
class TestPatch(test_api_base.BaseApiTest):
headers = {api_base.Version.string: str(api_v1.max_version())}
def setUp(self):
super(TestPatch, self).setUp()
self.allocation = obj_utils.create_test_allocation(self.context)
def test_update_not_allowed(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/extra/foo',
'value': 'bar',
'op': 'add'}],
expect_errors=True,
headers={api_base.Version.string: '1.56'})
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_update_not_found(self):
uuid = uuidutils.generate_uuid()
response = self.patch_json('/allocations/%s' % uuid,
[{'path': '/name', 'value': 'b',
'op': 'replace'}],
expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_add(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_int)
def test_add_non_existent(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/foo', 'value': 'bar',
'op': 'add'}],
expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertTrue(response.json['error_message'])
def test_add_multi(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/extra/foo1', 'value': 'bar1',
'op': 'add'},
{'path': '/extra/foo2', 'value': 'bar2',
'op': 'add'}], headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % self.allocation.uuid,
headers=self.headers)
expected = {"foo1": "bar1", "foo2": "bar2"}
self.assertEqual(expected, result['extra'])
def test_replace_invalid_name(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/name', 'value': '[test]',
'op': 'replace'}],
expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertTrue(response.json['error_message'])
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_replace_singular(self, mock_utcnow, mock_notify):
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/name',
'value': 'test', 'op': 'replace'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % self.allocation.uuid,
headers=self.headers)
self.assertEqual('test', result['name'])
return_updated_at = timeutils.parse_isotime(
result['updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_updated_at)
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END)])
def test_replace_name_with_none(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/name',
'value': None, 'op': 'replace'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % self.allocation.uuid,
headers=self.headers)
self.assertIsNone(result['name'])
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
@mock.patch.object(objects.Allocation, 'save', autospec=True)
def test_update_error(self, mock_save, mock_notify):
mock_save.side_effect = Exception()
allocation = obj_utils.create_test_allocation(self.context)
self.patch_json('/allocations/%s' % allocation.uuid, [{'path': '/name',
'value': 'new', 'op': 'replace'}],
expect_errors=True, headers=self.headers)
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.ERROR,
obj_fields.NotificationStatus.ERROR)])
def test_replace_multi(self):
extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
allocation = obj_utils.create_test_allocation(
self.context, extra=extra, uuid=uuidutils.generate_uuid())
new_value = 'new value'
response = self.patch_json('/allocations/%s' % allocation.uuid,
[{'path': '/extra/foo2',
'value': new_value, 'op': 'replace'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
extra["foo2"] = new_value
self.assertEqual(extra, result['extra'])
def test_remove_uuid(self):
response = self.patch_json('/allocations/%s' % self.allocation.uuid,
[{'path': '/uuid', 'op': 'remove'}],
expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_remove_singular(self):
allocation = obj_utils.create_test_allocation(
self.context, extra={'a': 'b'}, uuid=uuidutils.generate_uuid())
response = self.patch_json('/allocations/%s' % allocation.uuid,
[{'path': '/extra/a', 'op': 'remove'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertEqual(result['extra'], {})
# Assert nothing else was changed
self.assertEqual(allocation.uuid, result['uuid'])
def test_remove_multi(self):
extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
allocation = obj_utils.create_test_allocation(
self.context, extra=extra, uuid=uuidutils.generate_uuid())
# Removing one item from the collection
response = self.patch_json('/allocations/%s' % allocation.uuid,
[{'path': '/extra/foo2', 'op': 'remove'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
extra.pop("foo2")
self.assertEqual(extra, result['extra'])
# Removing the collection
response = self.patch_json('/allocations/%s' % allocation.uuid,
[{'path': '/extra', 'op': 'remove'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/allocations/%s' % allocation.uuid,
headers=self.headers)
self.assertEqual({}, result['extra'])
# Assert nothing else was changed
self.assertEqual(allocation.uuid, result['uuid'])
def test_remove_non_existent_property_fail(self):
response = self.patch_json(
'/allocations/%s' % self.allocation.uuid,
[{'path': '/extra/non-existent', 'op': 'remove'}],
expect_errors=True, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_code)
self.assertTrue(response.json['error_message'])
def test_update_owner_not_acceptable(self):
allocation = obj_utils.create_test_allocation(
self.context, owner='12345', uuid=uuidutils.generate_uuid())
new_owner = '54321'
response = self.patch_json('/allocations/%s' % allocation.uuid,
[{'path': '/owner',
'value': new_owner,
'op': 'replace'}],
expect_errors=True, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_code)
def _create_locally(_api, _ctx, allocation, topic):
if 'node_id' in allocation and allocation.node_id:
assert topic == 'node-topic', topic
else:
assert topic == 'some-topic', topic
allocation.create()
return allocation
@mock.patch.object(rpcapi.ConductorAPI, 'create_allocation', _create_locally)
class TestPost(test_api_base.BaseApiTest):
headers = {api_base.Version.string: str(api_v1.max_version())}
def setUp(self):
super(TestPost, self).setUp()
self.mock_get_topic = self.useFixture(
fixtures.MockPatchObject(rpcapi.ConductorAPI, 'get_random_topic')
).mock
self.mock_get_topic.return_value = 'some-topic'
self.mock_get_topic_for_node = self.useFixture(
fixtures.MockPatchObject(rpcapi.ConductorAPI, 'get_topic_for')
).mock
self.mock_get_topic_for_node.return_value = 'node-topic'
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_create_allocation(self, mock_utcnow, mock_notify):
adict = apiutils.allocation_post_data()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/allocations', adict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
self.assertEqual(adict['uuid'], response.json['uuid'])
self.assertEqual('allocating', response.json['state'])
self.assertIsNone(response.json['node_uuid'])
self.assertEqual([], response.json['candidate_nodes'])
self.assertEqual([], response.json['traits'])
self.assertNotIn('node', response.json)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertFalse(result['updated_at'])
self.assertIsNone(result['node_uuid'])
self.assertEqual([], result['candidate_nodes'])
self.assertEqual([], result['traits'])
self.assertIsNone(result['owner'])
self.assertNotIn('node', result)
return_created_at = timeutils.parse_isotime(
result['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/allocations/%s' % adict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
mock_notify.assert_has_calls([
mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END),
])
def test_create_allocation_invalid_api_version(self):
adict = apiutils.allocation_post_data()
response = self.post_json(
'/allocations', adict, headers={api_base.Version.string: '1.50'},
expect_errors=True)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_create_allocation_doesnt_contain_id(self):
with mock.patch.object(self.dbapi, 'create_allocation',
wraps=self.dbapi.create_allocation) as cp_mock:
adict = apiutils.allocation_post_data(extra={'foo': 123})
self.post_json('/allocations', adict, headers=self.headers)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['extra'], result['extra'])
cp_mock.assert_called_once_with(mock.ANY)
# Check that 'id' is not in first arg of positional args
self.assertNotIn('id', cp_mock.call_args[0][0])
@mock.patch.object(notification_utils.LOG, 'exception', autospec=True)
@mock.patch.object(notification_utils.LOG, 'warning', autospec=True)
def test_create_allocation_generate_uuid(self, mock_warn, mock_except):
adict = apiutils.allocation_post_data()
del adict['uuid']
response = self.post_json('/allocations', adict, headers=self.headers)
result = self.get_json('/allocations/%s' % response.json['uuid'],
headers=self.headers)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
self.assertFalse(mock_warn.called)
self.assertFalse(mock_except.called)
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
@mock.patch.object(objects.Allocation, 'create', autospec=True)
def test_create_allocation_error(self, mock_create, mock_notify):
mock_create.side_effect = Exception()
adict = apiutils.allocation_post_data()
self.post_json('/allocations', adict, headers=self.headers,
expect_errors=True)
mock_notify.assert_has_calls([
mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.ERROR,
obj_fields.NotificationStatus.ERROR),
])
def test_create_allocation_with_candidate_nodes(self):
node1 = obj_utils.create_test_node(self.context,
name='node-1')
node2 = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
adict = apiutils.allocation_post_data(
candidate_nodes=[node1.name, node2.uuid])
response = self.post_json('/allocations', adict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertEqual([node1.uuid, node2.uuid], result['candidate_nodes'])
def test_create_allocation_valid_extra(self):
adict = apiutils.allocation_post_data(
extra={'str': 'foo', 'int': 123, 'float': 0.1, 'bool': True,
'list': [1, 2], 'none': None, 'dict': {'cat': 'meow'}})
self.post_json('/allocations', adict, headers=self.headers)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['extra'], result['extra'])
def test_create_allocation_with_no_extra(self):
adict = apiutils.allocation_post_data()
del adict['extra']
response = self.post_json('/allocations', adict, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.CREATED, response.status_int)
def test_create_allocation_no_mandatory_field_resource_class(self):
adict = apiutils.allocation_post_data()
del adict['resource_class']
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn('resource_class', response.json['error_message'])
def test_create_allocation_resource_class_too_long(self):
adict = apiutils.allocation_post_data()
adict['resource_class'] = 'f' * 81
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_allocation_with_traits(self):
adict = apiutils.allocation_post_data()
adict['traits'] = ['CUSTOM_GPU', 'CUSTOM_FOO_BAR']
response = self.post_json('/allocations', adict, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.CREATED, response.status_int)
self.assertEqual(['CUSTOM_GPU', 'CUSTOM_FOO_BAR'],
response.json['traits'])
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertEqual(['CUSTOM_GPU', 'CUSTOM_FOO_BAR'],
result['traits'])
def test_create_allocation_invalid_trait(self):
adict = apiutils.allocation_post_data()
adict['traits'] = ['CUSTOM_GPU', 'FOO_BAR']
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_allocation_invalid_candidate_node_format(self):
adict = apiutils.allocation_post_data(
candidate_nodes=['invalid-format'])
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertTrue(response.json['error_message'])
def test_create_allocation_candidate_node_not_found(self):
adict = apiutils.allocation_post_data(
candidate_nodes=['1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e'])
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertTrue(response.json['error_message'])
def test_create_allocation_candidate_node_invalid(self):
adict = apiutils.allocation_post_data(
candidate_nodes=['this/is/not a/node/name'])
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertTrue(response.json['error_message'])
def test_create_allocation_name_ok(self):
name = 'foo'
adict = apiutils.allocation_post_data(name=name)
self.post_json('/allocations', adict, headers=self.headers)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(name, result['name'])
def test_create_allocation_name_invalid(self):
name = 'aa:bb_cc'
adict = apiutils.allocation_post_data(name=name)
response = self.post_json('/allocations', adict, headers=self.headers,
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
def test_create_by_node_not_allowed(self):
node = obj_utils.create_test_node(self.context)
adict = apiutils.allocation_post_data()
response = self.post_json('/nodes/%s/allocation' % node.uuid,
adict, headers=self.headers,
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_create_node_uuid_not_allowed(self):
node = obj_utils.create_test_node(self.context)
adict = apiutils.allocation_post_data()
adict['node_uuid'] = node.uuid
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_allocation_owner(self):
owner = '12345'
adict = apiutils.allocation_post_data(owner=owner)
self.post_json('/allocations', adict, headers=self.headers)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(owner, result['owner'])
def test_create_allocation_owner_not_allowed(self):
owner = '12345'
adict = apiutils.allocation_post_data(owner=owner)
response = self.post_json('/allocations', adict,
headers={api_base.Version.string: '1.59'},
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
@mock.patch.object(auth_token.AuthProtocol, 'process_request',
autospec=True)
def test_create_allocation_owner_not_my_projet_id(self, mock_auth_req):
# This is only enforced, test wise with the new oslo policy rbac
# model and enforcement. Likely can be cleaned up past the Xena cycle.
cfg.CONF.set_override('enforce_scope', True, group='oslo_policy')
cfg.CONF.set_override('enforce_new_defaults', True,
group='oslo_policy')
# Tests normally run in noauth, but we need policy
# enforcement to run completely here to ensure the logic is followed.
cfg.CONF.set_override('auth_strategy', 'keystone')
self.headers['X-Project-ID'] = '0987'
self.headers['X-Roles'] = 'admin,member,reader'
owner = '12345'
adict = apiutils.allocation_post_data(owner=owner)
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
expected_faultstring = ('Cannot create allocation with an owner '
'Project ID value 12345 not matching the '
'requestor Project ID 0987. Policy '
'baremetal:allocation:create_restricted '
'is required for this capability.')
error_body = json.loads(response.json['error_message'])
self.assertEqual(expected_faultstring,
error_body.get('faultstring'))
def test_create_allocation_owner_auto_filled(self):
cfg.CONF.set_override('enforce_new_defaults', True,
group='oslo_policy')
self.headers['X-Project-ID'] = '123456'
adict = apiutils.allocation_post_data()
self.post_json('/allocations', adict, headers=self.headers)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual('123456', result['owner'])
def test_create_allocation_is_restricted_until_scope_enforcement(self):
cfg.CONF.set_override('enforce_new_defaults', False,
group='oslo_policy')
cfg.CONF.set_override('auth_strategy', 'keystone')
# We're setting ourselves to be a random ID and member
# which is allowed to create an allocation.
self.headers['X-Project-ID'] = '1135'
self.headers['X-Roles'] = 'admin, member, reader'
self.headers['X-Is-Admin-Project'] = 'False'
adict = apiutils.allocation_post_data()
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
def test_backfill(self):
node = obj_utils.create_test_node(self.context)
adict = apiutils.allocation_post_data(node=node.uuid)
response = self.post_json('/allocations', adict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
self.assertNotIn('node', response.json)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertEqual(node.uuid, result['node_uuid'])
self.assertNotIn('node', result)
def test_backfill_with_name(self):
node = obj_utils.create_test_node(self.context, name='backfill-me')
adict = apiutils.allocation_post_data(node=node.name)
response = self.post_json('/allocations', adict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
self.assertNotIn('node', response.json)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=self.headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertEqual(node.uuid, result['node_uuid'])
self.assertNotIn('node', result)
def test_backfill_without_resource_class(self):
node = obj_utils.create_test_node(self.context,
resource_class='bm-super')
adict = {'node': node.uuid}
response = self.post_json('/allocations', adict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
result = self.get_json('/allocations/%s' % response.json['uuid'],
headers=self.headers)
self.assertEqual(node.uuid, result['node_uuid'])
self.assertEqual('bm-super', result['resource_class'])
def test_backfill_copy_instance_uuid(self):
uuid = uuidutils.generate_uuid()
node = obj_utils.create_test_node(self.context,
instance_uuid=uuid,
resource_class='bm-super')
adict = {'node': node.uuid}
response = self.post_json('/allocations', adict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
result = self.get_json('/allocations/%s' % response.json['uuid'],
headers=self.headers)
self.assertEqual(uuid, result['uuid'])
self.assertEqual(node.uuid, result['node_uuid'])
self.assertEqual('bm-super', result['resource_class'])
def test_backfill_node_not_found(self):
adict = apiutils.allocation_post_data(node=uuidutils.generate_uuid())
response = self.post_json('/allocations', adict, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_backfill_not_allowed(self):
node = obj_utils.create_test_node(self.context)
headers = {api_base.Version.string: '1.57'}
adict = {'node': node.uuid}
response = self.post_json('/allocations', adict, expect_errors=True,
headers=headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_restricted_allocation_normal(self):
cfg.CONF.set_override('enforce_new_defaults', True,
group='oslo_policy')
owner = '12345'
adict = apiutils.allocation_post_data()
headers = {api_base.Version.string: '1.60',
'X-Roles': 'member,reader',
'X-Project-Id': owner}
response = self.post_json('/allocations', adict, headers=headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.CREATED, response.status_int)
self.assertEqual(owner, response.json['owner'])
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertEqual(owner, result['owner'])
def test_create_restricted_allocation_older_version(self):
owner = '12345'
adict = apiutils.allocation_post_data()
del adict['owner']
headers = {api_base.Version.string: '1.59', 'X-Project-Id': owner}
response = self.post_json('/allocations', adict, headers=headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.CREATED, response.status_int)
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=headers)
self.assertEqual(adict['uuid'], result['uuid'])
@mock.patch.object(policy, 'authorize', autospec=True)
def test_create_restricted_allocation_forbidden(self, mock_authorize):
def mock_authorize_function(rule, target, creds):
raise exception.HTTPForbidden(resource='fake')
mock_authorize.side_effect = mock_authorize_function
owner = '12345'
adict = apiutils.allocation_post_data()
headers = {api_base.Version.string: '1.60', 'X-Project-Id': owner}
response = self.post_json('/allocations', adict, expect_errors=True,
headers=headers)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
@mock.patch.object(policy, 'authorize', autospec=True)
def test_create_restricted_allocation_with_owner(self, mock_authorize):
def mock_authorize_function(rule, target, creds):
if rule == 'baremetal:allocation:create':
raise exception.HTTPForbidden(resource='fake')
return True
mock_authorize.side_effect = mock_authorize_function
owner = '12345'
adict = apiutils.allocation_post_data(owner=owner)
adict['owner'] = owner
headers = {api_base.Version.string: '1.60', 'X-Project-Id': owner}
response = self.post_json('/allocations', adict, headers=headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.CREATED, response.status_int)
self.assertEqual(owner, response.json['owner'])
result = self.get_json('/allocations/%s' % adict['uuid'],
headers=headers)
self.assertEqual(adict['uuid'], result['uuid'])
self.assertEqual(owner, result['owner'])
@mock.patch.object(policy, 'authorize', autospec=True)
def test_create_restricted_allocation_with_mismatch_owner(
self, mock_authorize):
def mock_authorize_function(rule, target, creds):
if rule == 'baremetal:allocation:create':
raise exception.HTTPForbidden(resource='fake')
return True
mock_authorize.side_effect = mock_authorize_function
owner = '12345'
adict = apiutils.allocation_post_data(owner=owner)
adict['owner'] = '54321'
headers = {api_base.Version.string: '1.60', 'X-Project-Id': owner}
response = self.post_json('/allocations', adict, expect_errors=True,
headers=headers)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_allocation', autospec=True)
class TestDelete(test_api_base.BaseApiTest):
headers = {api_base.Version.string: str(api_v1.max_version())}
def setUp(self):
super(TestDelete, self).setUp()
self.node = obj_utils.create_test_node(self.context)
self.allocation = obj_utils.create_test_allocation(
self.context, node_id=self.node.id, name='alloc1')
self.mock_get_topic = self.useFixture(
fixtures.MockPatchObject(rpcapi.ConductorAPI, 'get_random_topic')
).mock
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_delete_allocation_by_id(self, mock_notify, mock_destroy):
self.delete('/allocations/%s' % self.allocation.uuid,
headers=self.headers)
self.assertTrue(mock_destroy.called)
mock_notify.assert_has_calls([
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START,
node_uuid=self.node.uuid),
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END,
node_uuid=self.node.uuid),
])
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_delete_allocation_node_locked(self, mock_notify, mock_destroy):
self.node.reserve(self.context, 'fake', self.node.uuid)
mock_destroy.side_effect = exception.NodeLocked(node='fake-node',
host='fake-host')
ret = self.delete('/allocations/%s' % self.allocation.uuid,
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.CONFLICT, ret.status_code)
self.assertTrue(ret.json['error_message'])
self.assertTrue(mock_destroy.called)
mock_notify.assert_has_calls([
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START,
node_uuid=self.node.uuid),
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.ERROR,
obj_fields.NotificationStatus.ERROR,
node_uuid=self.node.uuid),
])
def test_delete_allocation_invalid_api_version(self, mock_destroy):
response = self.delete('/allocations/%s' % self.allocation.uuid,
expect_errors=True,
headers={api_base.Version.string: '1.14'})
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_delete_allocation_invalid_api_version_without_check(self,
mock_destroy):
# Invalid name, but the check happens after the microversion check.
response = self.delete('/allocations/ba!na!na1',
expect_errors=True,
headers={api_base.Version.string: '1.14'})
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_delete_allocation_by_name(self, mock_destroy):
self.delete('/allocations/%s' % self.allocation.name,
headers=self.headers)
self.assertTrue(mock_destroy.called)
def test_delete_allocation_by_name_with_json(self, mock_destroy):
self.delete('/allocations/%s.json' % self.allocation.name,
headers=self.headers)
self.assertTrue(mock_destroy.called)
def test_delete_allocation_by_name_not_existed(self, mock_destroy):
res = self.delete('/allocations/%s' % 'blah', expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, res.status_code)
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_delete_allocation_by_node(self, mock_notify, mock_destroy):
self.delete('/nodes/%s/allocation' % self.node.uuid,
headers=self.headers)
self.assertTrue(mock_destroy.called)
mock_notify.assert_has_calls([
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START,
node_uuid=self.node.uuid),
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END,
node_uuid=self.node.uuid),
])
def test_delete_allocation_by_node_not_existed(self, mock_destroy):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
res = self.delete('/nodes/%s/allocation' % node.uuid,
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, res.status_code)
def test_delete_allocation_invalid_node(self, mock_destroy):
res = self.delete('/nodes/%s/allocation' % uuidutils.generate_uuid(),
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, res.status_code)
def test_delete_allocation_by_node_invalid_api_version(self, mock_destroy):
obj_utils.create_test_allocation(self.context, node_id=self.node.id)
response = self.delete(
'/nodes/%s/allocation' % self.node.uuid,
headers={api_base.Version.string: str(api_v1.min_version())},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
self.assertFalse(mock_destroy.called)
| apache-2.0 | -4,012,432,216,761,781,000 | 48.28979 | 79 | 0.578198 | false |
slideinc/notifier | notifier/service.py | 1 | 11364 | # -*- Mode: Python; tab-width: 4 -*-
# Copyright (c) 2005-2010 Slide, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the author nor the names of other
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""service
A rpc service provider, to be executed within the coroutine framework
"""
from gogreen import coro
from gogreen import corowork
import exceptions
import time
import sys
import os
import decorators
import error
import access
def statistics_filter(args, kwargs):
return args[1].get('command', 'none')
class SimpleWorker(corowork.Worker):
def __init__(self, *args, **kwargs):
super(SimpleWorker, self).__init__(*args, **kwargs)
self._notifier = kwargs['notifier']
self._waiter = coro.coroutine_cond()
self._objname = kwargs['object']
self._msgqueue = []
def execute(self, vid, call, seq, server, **kwargs):
cmd = call.get('command', None)
args = call.get('args', ())
kwargs = call.get('kwargs', {})
tlb, tval = call.get('tlb', ('tlb-%s' % self._objname, False))
slv, sval = call.get('slave', ('slave-read', False))
source = call.get('source')
self.debug(
'execute command %r id %r args %r kwargs %r tlb %s slv %s',
cmd, vid, args, kwargs, (tlb,tval), (slv,sval))
try:
coro.set_local(tlb, tval)
coro.set_local(slv, sval)
if source: coro.set_local(access.CORO_LOCAL_SOURCE, source)
try:
result = self._execute(vid, cmd, args, kwargs)
finally:
coro.pop_local(slv)
coro.pop_local(tlb)
coro.pop_local(access.CORO_LOCAL_SOURCE)
except error.AccessError, e:
self.warn('AccessError: %r %r' % (e, e.args,))
result = {
'rc': e.id,
'msg': e[0],
'args': e.args,
'envl': True}
self.clear()
except exceptions.Exception, e:
self.traceback()
t,v,tb = coro.traceback_info()
result = {
'rc': error.ServiceTraceback.id,
'tb': tb,
'msg': 'Traceback: [%s|%s]' % (t,v),
'args': getattr(e, 'args', str(e)),
'envl': True}
except:
self.traceback()
t,v,tb = coro.traceback_info()
result = {
'rc': error.ServiceTraceback.id,
'tb': tb,
'msg': 'Traceback: [%s|%s]' % (t,v),
'envl': True}
self.clear()
else:
self.flush()
server.rpc_response(seq, result)
def _execute(self, vid, cmd, args, kwargs):
handler = getattr(self, cmd, None)
if handler is None or not getattr(handler, 'command', 0):
return {
'rc': error.NoServiceHandler.id,
'msg': 'no handler: %s' % cmd,
'args': (cmd,),
'envl': True}
return self._call(handler, vid, *args, **kwargs)
def _call(self, handler, *args, **kwargs):
self.preemptable_set(True)
try:
result = handler(*args, **kwargs)
finally:
self.preemptable_set(False)
if getattr(handler, 'envelope', False):
result = {'rc': 0, 'result': result, 'envl': True}
return result
def complete(self):
super(SimpleWorker, self).complete()
self._notifier = None
#
# We act as an RPC proxy to ensure that messages are queued and only
# flushed once the transaction has completed.
#
def rpcs(self, *args, **kwargs):
return self._notifier.rpcs(*args, **kwargs)
def rpc(self, *args, **kwargs):
return self._notifier.rpc(*args, **kwargs)
def publish(self, object, id, cmd, args):
self._msgqueue.append((object, id, cmd, args))
def clear(self):
self._msgqueue = []
def flush(self):
for object, id, cmd, args in self._msgqueue:
self._notifier.publish(object, id, cmd, args)
self.clear()
#
# common testing commands
#
@decorators.command
def sleep(self, vid, timeout):
self._waiter.wait(timeout)
return {'timeout': timeout}
@decorators.command
def ping(self, vid, args):
return args
class Worker(SimpleWorker):
def __init__(self, *args, **kwargs):
super(Worker, self).__init__(*args, **kwargs)
def _execute(self, vid, cmd, args, kwargs):
handler = getattr(self, cmd, None)
if handler is None or not getattr(handler, 'command', 0):
return {
'rc': error.NoServiceHandler.id,
'msg': 'no handler: %s' % cmd,
'args': (cmd,),
'envl': True}
if not getattr(handler, 'cursor', 0):
return self._call(handler, vid, *args, **kwargs)
slave = getattr(handler, 'readonly', False) and access.slave_read()
extra_dbc_hints = getattr(handler, 'extra_dbc_hints', [])
extra = {}
for hint in extra_dbc_hints:
extra[hint] = kwargs.get(hint)
if getattr(handler, 'nopartition', 0):
dbc = self._get_dbc(None, slave = slave, **extra)
else:
dbc = self._get_dbc(vid, slave = slave, **extra)
if dbc is None:
return {
'rc': error.DatabaseUnavailable.id,
'msg': 'DB currently offline',
'args': (),
'envl': True}
try:
result = self._call(handler, vid, dbc.cursor(), *args, **kwargs)
dbc.commit()
finally:
self._put_dbc(dbc, slave = slave)
return result
def complete(self):
super(Worker, self).complete()
class Server(object):
worker_class = Worker
subscription = 'base'
statistics = 'basestat'
def __init__(self, size, notifier, **kwargs):
self._clean = False
if isinstance(size, list):
kwargs['sizes'] = size
else:
kwargs['size'] = size
kwargs['object'] = self.subscription
kwargs['worker'] = self.worker_class
kwargs['notifier'] = notifier
kwargs['filter'] = lambda *a: '%s.%s' % (
self.__module__,
statistics_filter(*a))
self._server = corowork.Server(**kwargs)
self._server.name = self.__class__.__name__
self._server.start()
self._notifier = kwargs['notifier']
self._bounds = {}
self._active = False
if 'loglevel' in kwargs:
self._server.set_log_level(kwargs['loglevel'])
self._notifier.set_log_level(kwargs['loglevel'])
self.load_config(kwargs['bounds'])
def __del__(self):
if not self._clean:
self.drain()
def active(self):
return bool(self._bounds)
def drain(self, timeout = None, grace = 0.0):
# if not active ignore grace period and timeout
#
if not self._active:
timeout = None
grace = 0.0
self._notifier.rpc_unregister_all(self)
coro.sleep(grace)
shutdown_start = time.time()
join = self._server.shutdown(timeout = timeout)
return {
'join' : join,
'time' : time.time() - shutdown_start
}
def rpc_call(self, obj, id, cmd, args, seq, server):
if obj == self.statistics:
self._server.command_push(obj, id, cmd, args, seq, server)
else:
prio = getattr(
self, 'command_prios', {}).get(args.get('command'), 0)
self._server.request(id, args, seq, server, prio = 0)
def load_config(self, bounds):
#
# work queue (re)size
#
self._server.resize(bounds.get('workers'))
#
# old and new RPC service mask/value
#
oldsub = (self._bounds.get('mask'), self._bounds.get('value'))
oldbrd = (self._bounds.get('bmask'), self._bounds.get('bvalue'))
newsub = (bounds.get('mask'), bounds.get('value'))
newbrd = (bounds.get('bmask'), bounds.get('bvalue'))
#
# old and new service enabler
#
oldsrv = self._bounds.get('service', bool(None not in oldsub))
newsrv = bounds.get('service', bool(None not in newsub))
oweight = self._bounds.get('weight', 1.0)
nweight = bounds.get('weight', 1.0)
if (oldsub, oldbrd, oldsrv, oweight) == (newsub,newbrd,newsrv,nweight):
return None, None
unreg = nweight != oweight or oldsub != newsub or \
oldbrd != newbrd or not newsrv
if self._active and unreg:
self._notifier.rpc_unregister_all(self)
self._active = False
if not self._active and newsrv:
for command in self._server.command_list():
self._notifier.rpc_register(self.statistics, 0, command, self)
self._notifier.rpc_slice(
self.subscription, newsub, 'execute', self, weight = nweight)
if getattr(self, 'sub_broadcast', None):
self._notifier.rpc_register(
self.sub_broadcast, newbrd, 'execute', self)
self._active = True
self._bounds = bounds
# JRM: i think it would be fine to add old and new brd values to
# this return but not sure how that'll effect the itemservs so will
# leave to libor to decide
#
return (oldsub, oldsrv, oweight), (newsub, newsrv, nweight)
#
# statistics
#
def rate(self):
return self._server.stats.rate()
def details(self):
return self._server.stats.details()
def averages(self):
return self._server.stats.averages()
#
# end...
| bsd-3-clause | 426,281,300,781,720,400 | 31.284091 | 79 | 0.559662 | false |
chrishaines/pyusps | setup.py | 1 | 1305 | #!/usr/bin/python
from setuptools import setup, find_packages
import os
EXTRAS_REQUIRES = dict(
test=[
'fudge>=1.0.3',
'nose>=1.1.2',
],
dev=[
'ipython>=0.12.1',
],
)
# Pypi package documentation
root = os.path.dirname(__file__)
path = os.path.join(root, 'README.rst')
with open(path) as fp:
long_description = fp.read()
setup(
name='pyusps',
version='0.0.6',
description='pyusps -- Python bindings for the USPS Ecommerce APIs',
long_description=long_description,
author='Andres Buritica',
author_email='[email protected]',
maintainer='Andres Buritica',
maintainer_email='[email protected]',
url='https://github.com/thelinuxkid/pyusps',
license='MIT',
packages = find_packages(),
namespace_packages = ['pyusps'],
test_suite='nose.collector',
install_requires=[
'setuptools>=0.6c11',
'lxml>=2.3.3',
'ordereddict==1.1',
],
extras_require=EXTRAS_REQUIRES,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7'
],
)
| mit | -6,469,447,712,948,329,000 | 25.632653 | 72 | 0.6 | false |
vadimadr/generator-djdj | generators/app/templates/django_project_template/settings/production.py | 1 | 2811 | """
Production settings
- DATABASE_URL and DJANGO_SECRET_KEY should be in env
"""
# noinspection PyUnresolvedReferences
from .common import * # noqa
DEBUG = env.bool('DJANGO_DEBUG', default=False)
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env('DJANGO_SECRET_KEY')
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['example.com'])
# Use template caching
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', ]),
]
DATABASES['default'] = env.db('DATABASE_URL')
# Static configuration
# ---------------------------------------
# Use whitenoise for satic: http://whitenoise.evans.io/en/stable/
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# EMAIL CONFIGURATION
# ---------------------------------------
# todo: add mailgun support
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='project_template <[email protected]>')
EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX',
default='[django] ')
# Logging config
# ----------------------------------------
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
# 'root': {
# 'level': 'WARNING',
# 'handlers': ['sentry'],
# },
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
# 'sentry': {
# 'level': 'ERROR',
# 'class':
# 'raven.contrib.django.raven_compat.handlers
# .SentryHandler',
# },
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
# 'raven': {
# 'level': 'DEBUG',
# 'handlers': ['console'],
# 'propagate': False,
# },
# 'sentry.errors': {
# 'level': 'DEBUG',
# 'handlers': ['console'],
# 'propagate': False,
# },
},
}
| mit | 4,563,077,271,762,959,000 | 28.589474 | 79 | 0.535397 | false |
tensorflow/datasets | tensorflow_datasets/text/eraser_multi_rc_test.py | 1 | 1043 | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for eraser_multi_rc dataset."""
from tensorflow_datasets import testing
from tensorflow_datasets.text import eraser_multi_rc
class EraserMultiRcTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = eraser_multi_rc.EraserMultiRc
SPLITS = {
"train": 3, # Number of fake train example
"test": 1, # Number of fake test example
"validation": 1,
}
if __name__ == "__main__":
testing.test_main()
| apache-2.0 | -7,384,162,994,259,214,000 | 31.59375 | 74 | 0.732502 | false |
ajtulloch/decisiontrees | ui/test/test.py | 1 | 1908 | from app import application as app
import app.protobufs.decisiontrees_pb2 as pb
from flask.ext.pymongo import PyMongo
from protobuf_to_dict import protobuf_to_dict
import fake_data
import json
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
app.config['TEST_DBNAME'] = 'ui_test'
try:
app.mongo = PyMongo(app, config_prefix='TEST')
except:
pass
self.client = app.test_client()
num_trees, height = (5, 5)
self.row = pb.TrainingRow(
forestConfig=fake_data.fake_config(num_trees),
forest=fake_data.fake_forest(height, num_trees),
)
with app.test_request_context():
self._id = str(app.mongo.db.decisiontrees.insert(
protobuf_to_dict(self.row))
)
def test_decision_tree_list(self):
rv = self.client.get('/api/decisiontrees/')
result = json.loads(rv.data)
self.assertEqual(len(result), 1)
self.assertDecisionTreeEqual(result[0])
def assertDecisionTreeEqual(self, response):
self.assertEqual(response["_id"], self._id)
self.assertEqual(
response["forestConfig"],
protobuf_to_dict(self.row.forestConfig)
)
self.assertEqual(response["forest"], protobuf_to_dict(self.row.forest))
def test_decision_tree_detail(self):
rv = self.client.get('/api/decisiontrees/{0}'.format(self._id))
result = json.loads(rv.data)
self.assertDecisionTreeEqual(result)
def test_decision_tree_nonexistent(self):
rv = self.client.get('/api/decisiontrees/{0}'.format(0))
result = json.loads(rv.data)
self.assertEqual(result['status'], 500)
def tearDown(self):
with app.test_request_context():
app.mongo.db.decisiontrees.remove()
if __name__ == '__main__':
unittest.main()
| mit | -3,745,424,822,609,319,400 | 31.338983 | 79 | 0.621593 | false |
mennis/oTTo | src/otto/lib/ethdrvstat.py | 1 | 10253 | #!/usr/bin/env python
#
# Copyright (c) 2014 Coraid, Inc.
# All rights reserved.
#
# $Coraid$
#
"""
Interface to read, digest and display information regarding
AoE Targets and their corresponding system information.
"""
from os import stat, listdir, path
from stat import S_ISBLK
from pprint import pformat
import re
from time import time
from json import dumps
ETHDRV_DEVICES_FILE = "/proc/ethdrv/devices"
ETHDRV_TARGETS_FILE = "/proc/ethdrv/targets"
ETHDRV_DEV_DIR = "/dev/ethdrv"
def int2bitmask(integer):
"""
given a integer return a string that
represents the bits::
>>> int2bitmask(15903)
>>> '11111000011111'
"""
return integer >= 0 and str(bin(integer))[2:] or "-" + str(bin(integer))[3:]
def bitmask2index(bitmask):
"""
given a string representing a bitmask
return a list of positions that are not
zero::
>>> bitmask2index('11111000011111')
>>> [0, 1, 2, 3, 4, 9, 10, 11, 12, 13]
"""
rmask = reversed(str(bitmask))
return [bitmask.start() for bitmask in re.finditer('1', ''.join(rmask))]
def mk_portlist(intval):
"""
Take an integer representation of a bitmask and return a list form::
> mk_portlist(3)
[0,1]
:type intval: int
:return: a list of ports in bitmask
:rtype: list
"""
return bitmask2index(int2bitmask(intval))
def is_block(fpath):
"""
given an absolute path determine if it's
a block device
"""
return path.exists(fpath) and S_ISBLK(stat(fpath).st_mode)
def mk_map(name):
"""
make a map of block devices to targets using listdir
by looking for softlinks and following the reference
to determine if it's a block device.
"""
device_map = dict()
if path.exists(name):
for fname in listdir(name):
pname = path.join(name, fname)
if path.islink(pname):
realpath = path.realpath(pname)
if is_block(realpath):
device_map[fname] = path.basename(realpath)
return device_map
class AoETarget(object):
"""
A class representing an AoE Target from the perspective of
an initiator.
"""
def __init__(self, bussaddress, aoeaddress, size, serial, naa):
self.scsiaddress = bussaddress
self.target = aoeaddress
self.file = "init"
self.size = size
self.ports = set()
self.macs = list()
self.targpath = dict()
self.serial = serial
self.naa = naa
def add_mac(self, mac):
"""
add a mac address to this target
"""
self.macs.append(mac)
def add_ports(self, ports):
"""
read a line that looked like::
185.0 002590c7671e 3 1
we convert 3 into [0,1] and extend self.ports with it
"""
portlist = mk_portlist(ports)
self.ports.update(portlist)
def add_path(self, port, mac):
"""
We read a line that looked like::
185.0 002590c7671e 3 1
we converted 3 into [0,1] and then sent
{0: '00259096645f'}
to this method, add_path, which adds
00259096645f
to self.targpath[0]['address']
"""
if not self.targpath.get(port):
self.targpath[port] = [mac]
else:
self.targpath[port].append(mac)
def __repr__(self):
state = self.file if self.file is not 'init' else "init"
return pformat({'target': self.target,
'file': self.file,
'devpath': "/dev/%s" % state,
'size': self.size,
'port': self.ports,
'macs': self.macs,
'targpath': self.targpath})
class AoEStat(object):
"""
A class to manage the AoEStat data. It is a class to both
facilitate testing of itself and to be reusable in the automation library.
"""
def __init__(self, scantime=5):
self.devices_file = ETHDRV_DEVICES_FILE
self.targets_file = ETHDRV_TARGETS_FILE
self.dev_dir = ETHDRV_DEV_DIR
self.scantime = scantime
self.lastscan = None
self._devices = list()
self.debug = None
self.mk_map = mk_map
@staticmethod
def open_file(name):
"""
mockable inteface to open
"""
return open(name)
@staticmethod
def mk_map(name):
"""
mockable interface to listdir related calls
"""
device_map = dict()
if path.exists(name):
for fname in listdir(name):
pname = path.join(name, fname)
if path.islink(pname):
realpath = path.realpath(pname)
if is_block(realpath):
device_map[fname] = path.basename(realpath)
return device_map
@staticmethod
def mk_portstr(ports):
"""
given a list of ports return a string
if the list is empty return "N/A"
"""
return ",".join([str(port) for port in ports]) or 'N/A'
@property
def devices(self):
"""
return a list of AoETargets seen and processed
"""
return self._devices
def get_devices(self):
"""
device entries look like::
3:0:185:0 185.0 480.103GB
"""
fhandle = self.open_file(self.devices_file)
lines = fhandle.read().strip()
for line in lines.splitlines():
serial, naa = None, None
busaddress, aoeaddress, size = line.split()[:3]
if len(line.split()) > 3:
serial, naa = line.split()[3:5]
self.devices.append(AoETarget(busaddress, aoeaddress, size, serial, naa))
def get_targets(self):
"""
target entries look like this::
185.0 002590c7671e 3 1
185.0 (string) AoE address
002590c7671e (string) mac address
3 (bitmask) of ports that can see that mac address
1 (bool) mac is active
add the 185.0 to self.devices
add 002590c7671e to self.targpaths[0] and self.targpaths[0]
we don't currently do anything with the 'active' information
"""
fhandle = self.open_file(self.targets_file)
lines = fhandle.read().strip()
for line in lines.splitlines():
aoeaddress, mac, ports = line.split()[:3]
ports = int(ports)
for device in self.devices:
if device.target == aoeaddress:
device.add_mac(mac)
device.add_ports(ports)
portlist = mk_portlist(ports)
for port in portlist:
device.add_path(port, mac)
break
def map_devices(self):
"""
determine which AoE target backs which scsi device and
add that to the device as 'file'
if the device is partitioned we skip everything but the
base device
"""
targmap = self.mk_map(self.dev_dir)
for targ, dev in targmap.iteritems():
canary = True
targ = targ[1:]
if len(targ.split('p')) > 1:
continue
for device in self.devices:
if device.target == targ:
device.file = dev
canary = False
break
if canary:
raise Exception("couldn't find target: %s %s" % (targ, dev))
def update(self):
"""
read and process information from the filesystem and
update properties
"""
self.get_devices()
self.get_targets()
self.map_devices()
self.lastscan = time()
def output(self, json=False, paths=False):
"""
format the current state information for output
"""
if json:
data = dict()
for entry in self.devices:
# can't use __repr__ for some json lib reason
data[entry.target] = {'target': entry.target,
'file': entry.file,
'devpath': "/dev/%s" % entry.file,
'size': entry.size,
'port': self.mk_portstr(entry.ports),
'macs': ",".join(entry.macs),
'paths': entry.targpath,
'serial': entry.serial,
'naa': entry.naa,
}
return dumps(data, sort_keys=True, indent=4, separators=(',', ': '))
else:
fmtstr = "e%(target)-10s%(file)-8s%(size)+13s %(port)s\n"
output = ""
for entry in self.devices:
output += fmtstr % {'target': entry.target,
'file': entry.file,
'path': "/dev/%s" % entry.file,
'size': entry.size,
'port': self.mk_portstr(entry.ports),
'macs': ",".join(entry.macs),
}
if paths:
for port, macaddrs in entry.targpath.iteritems():
macs = ", ".join(macaddrs)
output += '{0:>12} {1:<17}\n'.format(port, macs)
return output
if __name__ == '__main__':
from signal import signal, SIGPIPE, SIG_DFL
from optparse import OptionParser
signal(SIGPIPE, SIG_DFL)
parser = OptionParser()
parser.add_option("-j", "--json",
help="Output data as json",
action="store_true")
parser.add_option("-a", "--all",
help="Display all target paths",
action="store_true")
(options, args) = parser.parse_args()
aoestat = AoEStat()
try:
aoestat.update()
except IOError:
exit(1)
print aoestat.output(json=options.json, paths=options.all),
| bsd-3-clause | 1,185,178,689,856,153,900 | 27.639665 | 85 | 0.514874 | false |
tuna/telegram2irc | teleirc.py | 1 | 8987 | #! /usr/bin/env python3
import os
import sys
import re
import threading
import pickle
import ssl
import time
import irc.client
from telegram import Telegram
from photostore import Imgur, VimCN
from config import config
help_txt = {
'all' : 'current avaliable commands are: .nick, .help, .join, .list',
'help' : '.help [command] => show help message (for `command`).',
'nick' : '.nick <new_nick> => change your nick to `new_nick`, no space allowed.',
'join' : '.join <channel> [channel [channel [...]]] => join `channel`(s). Use `.list` to list avaliable channels.',
'list' : '.list => list all avaliable chats.',
}
msg_format = '[{nick}] {msg}'
tele_conn = None
irc_conn = None
bindings = tuple()
usernicks = {}
irc_channels = []
tele_me = None
irc_blacklist = []
usernicks_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"usernicks"
)
def on_pong(connection, event):
connection.last_pong = time.time()
print('[irc] PONG from: ', event.source)
def on_connect(connection, event):
for c in irc_channels:
if irc.client.is_channel(c):
connection.join(c)
def on_join(connection, event):
print('[irc] ', event.source + ' ' + event.target)
def on_privmsg(connection, event):
print('[irc] ', event.source + ' ' + event.target + ' ' + event.arguments[0])
tele_target = get_tele_binding(event.target)
irc_nick = event.source[:event.source.index('!')]
msg = event.arguments[0]
if tele_target is not None and irc_nick not in irc_blacklist:
tele_conn.send_msg(
tele_target,
msg_format.format(
nick=irc_nick,
msg=msg
)
)
def on_nickinuse(connection, event):
connection.nick(connection.get_nickname() + '_')
def main_loop():
def irc_thread():
reactor = irc_init()
reactor.process_forever(60)
def tele_thread():
tele_init()
while True:
msg = tele_conn.recv_one_msg()
if msg is None or msg.user_id == tele_me:
continue
print('[tel] {}, {}, {}, {}'.format(
msg.user_id, msg.username, msg.chat_id, msg.content
))
user_id, username, chat_id, content = (
str(msg.user_id), msg.username, str(msg.chat_id), msg.content
)
if chat_id is not None:
irc_target = get_irc_binding('chat#'+chat_id)
if irc_target is None:
if content.startswith('.'):
handle_command(msg)
irc_target = None
elif re.match(r'.?help\s*$', content):
# msg is from user and user needs help
send_help(user_id)
irc_target = None
else:
# msg is from user and is not a command
irc_target = get_irc_binding('user#'+user_id)
if irc_target is not None:
nick = get_usernick_from_id(user_id)
if not nick:
nick = username
if username:
change_usernick(user_id, nick)
lines = content.split('\n')
for line in lines:
irc_conn.privmsg(
irc_target,
msg_format.format(nick=nick, msg=line)
)
tasks = []
for i in (irc_thread, tele_thread):
t = threading.Thread(target=i, args=())
t.setDaemon(True)
t.start()
tasks.append(t)
for t in tasks:
t.join()
def get_irc_binding(tele_chat):
for ib, tb in bindings:
if tb == tele_chat:
return ib
return None
def get_tele_binding(irc_chan):
for ib, tb in bindings:
if ib == irc_chan:
return tb
return None
def get_usernick_from_id(userid):
userid = str(userid)
return usernicks.get(userid, None)
def change_usernick(userid, newnick):
userid = str(userid)
usernicks[userid] = newnick
save_usernicks()
def send_help(userid, help='all'):
try:
m = help_txt[help]
except KeyError:
m = help_txt['all']
tele_conn.send_user_msg(userid, m)
def invite_to_join(userid, chatlist):
for c in chatlist:
chat = get_tele_binding(c)
if chat is not None:
cmd = 'chat_add_user {chat} {user} 0'.format(
chat=chat,
user='user#' + userid
)
tele_conn.send_cmd(cmd)
else:
tele_conn.send_user_msg(userid, '{0} is not avaliable. Use `.list` to see avaliable channels'.format(c))
def handle_command(msg):
msg.user_id, msg.username, msg.chat_id, msg.content
if not msg.content.startswith('.'):
return
userid = str(msg.user_id)
try:
tmp = msg.content.split()
cmd = tmp[0][1:].lower()
args = tmp[1:]
except IndexError:
send_help(userid)
if cmd == 'nick':
try:
change_usernick(userid, args[0])
tele_conn.send_user_msg(userid, 'Your nick has changed to {0}'.format(args[0]))
except IndexError:
send_help(userid, 'nick')
elif cmd == 'help':
try:
send_help(userid, args[0])
except IndexError:
send_help(userid, 'help')
send_help(userid)
elif cmd == 'join':
if len(args) == 0:
send_help(userid, 'join')
invite_to_join(userid, args)
elif cmd == 'list':
chan = ', '.join([i[0] for i in bindings])
tele_conn.send_user_msg(userid, chan)
else:
send_help(userid)
def irc_init():
global irc_channels
global irc_conn
irc_channels = [i[0] for i in config['bindings']]
server = config['irc']['server']
port = config['irc']['port']
nickname = config['irc']['nick']
usessl = config['irc']['ssl']
# use a replacement character for unrecognized byte sequences
# see <https://pypi.python.org/pypi/irc>
irc.client.ServerConnection.buffer_class.errors = 'replace'
reactor = irc.client.Reactor()
irc_conn = reactor.server()
try:
if usessl:
ssl_factory = irc.connection.Factory(wrapper=ssl.wrap_socket)
irc_conn.connect(
server, port, nickname, connect_factory=ssl_factory)
else:
irc_conn.connect(server, port, nickname)
except irc.client.ServerConnectionError:
print(sys.exc_info()[1])
irc_conn.add_global_handler("welcome", on_connect)
irc_conn.add_global_handler("join", on_join)
irc_conn.add_global_handler("privmsg", on_privmsg)
irc_conn.add_global_handler("pubmsg", on_privmsg)
irc_conn.add_global_handler("action", on_privmsg)
irc_conn.add_global_handler("pong", on_pong)
irc_conn.add_global_handler("nicknameinuse", on_nickinuse)
irc_conn.last_pong = time.time()
def keep_alive_ping(connection):
try:
if time.time() - connection.last_pong > 360:
raise irc.client.ServerNotConnectedError('ping timeout!')
connection.last_pong = time.time()
connection.ping(connection.get_server_name())
except irc.client.ServerNotConnectedError:
print('[irc] Reconnecting...')
connection.reconnect()
connection.last_pong = time.time()
reactor.execute_every(60, keep_alive_ping, (irc_conn,))
return reactor
def tele_init():
global tele_conn
global tele_me
server = config['telegram']['server']
port = config['telegram']['port']
tele_me = int(config['telegram']['me'].replace('user#', ''))
photo_store = photo_store_init()
tele_conn = Telegram(server, port, photo_store)
def photo_store_init():
provider = config['photo_store']['provider']
if provider == "imgur":
options = config['photo_store']['options']
return Imgur(**options)
elif provider == "vim-cn":
return VimCN()
return None
def load_usernicks():
global usernicks
try:
with open(usernicks_path, 'rb') as f:
usernicks = pickle.load(f)
except Exception:
usernicks = {}
def save_usernicks():
global usernicks
try:
with open(usernicks_path, 'wb') as f:
pickle.dump(usernicks, f, pickle.HIGHEST_PROTOCOL)
except Exception:
pass
def main():
global bindings
global irc_blacklist
bindings = config['bindings']
irc_blacklist = config['irc']['blacklist']
load_usernicks()
try:
main_loop()
except (Exception, KeyboardInterrupt):
try:
irc_conn.quit('Bye')
irc_conn = None
tele_conn = None # to call __del__ method of Telegram to close connection
except Exception:
pass
finally:
print('Bye.')
if __name__ == '__main__':
main()
| mit | -1,034,934,347,828,009,700 | 26.996885 | 119 | 0.562702 | false |
sammy1881/CricketNotifier | CricketNotifier_mac.py | 1 | 10739 | '''
https://github.com/hasgar/CricLiveNotifier
'''
#!/usr/bin/python
import getpass
import os
from crontab import CronTab
from bs4 import BeautifulSoup
import urllib2,cookielib
from urllib2 import urlopen
from xml.dom.minidom import parse, parseString
import sys
from math import ceil
from sys import argv
import Foundation
import objc
import AppKit
import subprocess
import pickle
import time
NSUserNotification = objc.lookUpClass('NSUserNotification')
NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter')
tab = CronTab(user=getpass.getuser())
def notify(clear, title, subtitle, info_text, url, delay=0, sound=False, userInfo={}):
#Notification Triggering Function
notification = NSUserNotification.alloc().init()
notification.setTitle_(title)
notification.setSubtitle_(subtitle)
notification.setInformativeText_(info_text)
notification.setHasActionButton_(True)
notification.setActionButtonTitle_("View")
#notification.setUserInfo_({"action":"open_url", "value":url})
if sound:
notification.setSoundName_("NSUserNotificationDefaultSoundName")
notification.setDeliveryDate_(Foundation.NSDate.dateWithTimeInterval_sinceDate_(delay, Foundation.NSDate.date()))
center = NSUserNotificationCenter.defaultUserNotificationCenter()
center.setDelegate_(notification)
if clear == True:
NSUserNotificationCenter.defaultUserNotificationCenter().removeAllDeliveredNotifications()
else:
NSUserNotificationCenter.defaultUserNotificationCenter().scheduleNotification_(notification)
def StopCricLive(stop):
#If any network issue while fecthing from crontab
tab.remove_all(comment="CricLiveNotifier")
tab.write()
try:
os.remove('CricLiveNotifier.txt')
except OSError:
pass
if stop: sys.exit(0)
def ConnectionIssue():
#If any network issue while fecthing livematches.xml
print "Something went wrong.Please check your internet connection."
sys.exit(0)
if len(argv) == 1:
os.system('clear')
print "Please wait.."
try:
site = "http://synd.cricbuzz.com/j2me/1.0/livematches.xml"
hdr = {'User-Agent':'Mozilla/5.0'}
req = urllib2.Request(site,headers=hdr)
livematches,livematches1 = urllib2.urlopen(req),urllib2.urlopen(req)
except Exception:
ConnectionIssue()
if "<html" in livematches.read():
ConnectionIssue()
soup = BeautifulSoup(livematches1,"xml")
match_list = {}
os.system('clear')
sp_status = ""
#Pulling livematches data from cricbuzz xml using BeautifulSoup for first notification after setup
for idx,mtch in enumerate(soup.findAll('match')):
for sts in mtch.findAll('state'):
if sts.get('mchState') == 'tea' or sts.get('mchState') == 'lunch' or sts.get('mchState') == 'innings break' or sts.get('mchState') == 'inprogress':
if sts.get('mchState') == 'tea': sp_status = "Tea Break"
if sts.get('mchState') == 'lunch': sp_status = "Lunch Break"
if sts.get('mchState') == 'innings break': sp_status = "Innings Break"
match_list[idx+1] = mtch.get('datapath')
print '{0}: {1} - {2}'.format(idx+1,mtch.get('mchDesc'),mtch.get('mnum'))
#Checking is there any match available now for score update
if any(match_list):
match_no = raw_input("Select your Match by Entering the Number > ")
sound_alert = raw_input("Notification with sound (Y/N) > ")
sound_alert = (sound_alert.lower() == 'y')
auto_close = raw_input("Auto Close Notification after seconds( 0 - 15 && manual close - 0) > ")
match_link_com = match_list[int(match_no)] + "commentary.xml"
os.system('clear')
print "Please wait.."
try:
site1 = match_link_com
hdr1 = {'User-Agent':'Mozilla/5.0'}
req1 = urllib2.Request(site1,headers=hdr1)
commentary,commentary1 = urllib2.urlopen(req1),urllib2.urlopen(req1)
except Exception:
ConnectionIssue()
if "<html" in commentary.read():
ConnectionIssue()
soup = BeautifulSoup(commentary1,"xml")
bat_tm_id,last_ball,last_over,wickets,runs = 0,0.0,0,0,0
#Pulling selected match for first notification after setup
for btId in soup.findAll('btTm'):
bat_tm_id = btId.get('id')
bat_tm_name = btId.get('sName')
for Ov in btId.findAll('Inngs'):
last_ball = float(Ov.get('ovrs'))
last_over = int(round(float(Ov.get('ovrs'))))
wickets = Ov.get('wkts')
runs = Ov.get('r')
break
StopCricLive(False)
#Adding data into CricLiveNotifier.txt for update sync
data = {"last_ball_updated": last_ball,"last_over_updated": last_over,"batting_team_id": bat_tm_id,"autoclose":int(auto_close),"sound":sound_alert}
com_file = os.path.dirname(os.path.realpath(__file__))+'/CricLiveNotifier.txt'
cric_file = open(com_file, 'w+')
cric_file.truncate()
pickle.dump(data, cric_file)
cric_file.close()
com_file = 'python "'+os.path.realpath(__file__)+'" '+ match_list[int(match_no)]
cron_job = tab.new(command=com_file,comment="CricLiveNotifier")
cron_job.minute.every(1)
tab.write()
os.system('clear')
print "Done. Enjoy the match with CricLiveNotifier :)"
bat_tm_name = bat_tm_name+" "+runs+"/"+wickets
last_ball = str(last_ball) + " Overs"
notify(False,bat_tm_name, last_ball, sp_status,"", sound=True)
if int(auto_close) > 0:
time.sleep(int(auto_close))
notify(True,"", "", "","")
else:
print "There are currently no live cricket matches"
if len(argv) > 1:
#Call from crontab
if argv[1] == 'stop':
StopCricLive(False)
notify(False,"Bye Bye!", "CricLiveNotifier Turned Off!!", "","",sound=True)
else:
match_link_com = argv[1] + "commentary.xml"
try:
site1 = match_link_com
hdr1 = {'User-Agent':'Mozilla/5.0'}
req1 = urllib2.Request(site1,headers=hdr1)
commentary,commentary1 = urllib2.urlopen(req1),urllib2.urlopen(req1)
except Exception:
notify(False, "Something went wrong!", "CricLiveNotifier Turned Off", "Check your Internet Connection", "http://github.com/hasgar/CricLiveNotifier", sound=True)
StopCricLive(True)
if "<html" in commentary.read():
notify(False, "Something went wrong!", "CricLiveNotifier Turned Off", "Check your Internet Connection", "http://github.com/hasgar/CricLiveNotifier", sound=True)
StopCricLive(True)
#Pulling Updated match data for updates
soup = BeautifulSoup(commentary1,"xml")
for sts in soup.findAll('state'):
if sts.get('mchState') != 'inprogress':
sys.exit(0)
com_file = os.path.dirname(os.path.realpath(__file__))+'/CricLiveNotifier.txt'
last_updated = pickle.load( open( com_file, "rb" ) )
idx,balls_to_update,fours,sixes,wicket = 0,[],0,0,0
balls_update = {"wickets": [],"fours": [],"sixers": []}
for btId in soup.findAll('btTm'):
bat_tm_name = btId.get('sName')
bat_tm_id = btId.get('id')
for Ov in btId.findAll('Inngs'):
last_ball = Ov.get('ovrs')
last_ball1 = float(Ov.get('ovrs'))
wickets = Ov.get('wkts')
runs = Ov.get('r')
break
new_team_id = bat_tm_id
def check_ball(com):
#Check everry ball has any bundary or wicket
com_txt = com.text.split(',')
if 'out' in com_txt[1].strip().lower():
notify(False,"WICKET!!!!!", com_txt[0], "","", sound=last_updated['sound'])
if 'six' in com_txt[1].strip().lower():
notify(False,"SIIIXXXXX!!!!!", com_txt[0], "","", sound=last_updated['sound'])
if 'four' in com_txt[1].strip().lower():
notify(False,"FOOURRRRR!!!!!", com_txt[0], "","", sound=last_updated['sound'])
#Check every ball
last_ball_to_update = 0
for com in soup.findAll('c'):
com_txt = com.text.split(' ')
if "." in com_txt[0]:
com_txt[0] = float(com_txt[0])
if last_updated['batting_team_id'] != new_team_id:
if com_txt[0] < 2.0:
check_ball(com)
if idx == 0:last_ball_to_update,idx = com_txt[0],1
else:
if com_txt[0] > last_updated['last_ball_updated']:
check_ball(com)
if idx == 0:last_ball_to_update,idx = com_txt[0],1
last_updated['batting_team_id'] = new_team_id
else:
if com_txt[0] > last_updated['last_ball_updated']:
check_ball(com)
if idx == 0:last_ball_to_update,idx = com_txt[0],1
if last_ball_to_update == 0:
#if no ball updates after previous update
last_updated['last_over_updated'] = int(last_ball1)
else:
#if new ball updates after previous update
if last_updated['last_over_updated'] != int(last_ball1):
bat_tm_name = bat_tm_name+" "+runs+"/"+wickets
last_ball = last_ball + " Overs"
notify(False,"Over Update", bat_tm_name, last_ball,"", sound=True)
last_updated['last_over_updated'] = int(last_ball1)
last_updated['last_ball_updated'] = last_ball_to_update
#writing last updated data into CricLiveNotifier.txt' for update sync
com_file = os.path.dirname(os.path.realpath(__file__))+'/CricLiveNotifier.txt'
cric_file = open(com_file, 'w+')
cric_file.truncate()
pickle.dump(last_updated, cric_file)
cric_file.close()
for sts in soup.findAll('state'):
if sts.get('mchState') == 'stump' or sts.get('mchState') == 'complete':
if sts.get('mchState') == 'stump': title,subtitle = sts.get('addnStatus'),sts.get('status')
if sts.get('mchState') == 'complete': title,subtitle = "Match Over",sts.get('status')
notify(False,title, subtitle, "CricLiveNotifier Turned Off", "")
StopCricLive(True)
if last_updated['autoclose'] > 0:
time.sleep(last_updated['autoclose'])
notify(True,"", "", "","")
| mit | -4,073,888,684,243,864,600 | 46.728889 | 172 | 0.589161 | false |
CCI-MOC/GUI-Backend | api/test/views/mock_keystone.py | 1 | 35309 | import json
import re
from rest_framework import status
from rest_framework.views import APIView
from django.http import HttpResponse
from threepio import logger
from atmosphere import settings
from atmosphere.settings.local import AUTHENTICATION as auth_settings
from atmosphere.settings.local import TEST as test_settings
# Some json strings
EXPECTED_UNSCOPED_TOKEN = "gAAAAABY2WAKlR6yxvn1mvkPcW0i4fB36y68N0mg2u1CUjPg7OJ-alV2CPPg207_zOTm8s3p3hHJWYVCw5Jh3PBYECkdplVgdzH7UgviSftNM8AJboFNXbypNNaELqgSztpGh5TnBbXI5RlzAc4IIBWd8wPL-RZuOA"
EXPECTED_SCOPED_TOKEN = "gAAAAABY2qmscjNU6DB-01NFpTAxSH35C07vC6SFc3enD4NiZfBYjzT_rFMDEM-Rt0GD0-n7T_K_ain-Z8_n6ESOBQk6-QfS8I8X_YLWYA2jHhHq_KsQEfoefU_XOmb46nJ_kl0lJWsQ4fplQSaudPLPs9eqgWn4E6qntp6sGoNppiv5kECZZfAoKa9STmjksBvksRBaB2Vp"
EXPECTED_USER_ID = "6705630a653f4300baf486a8df6072de"
JSON_401 = '{"error": {"message": "The request you have made requires authentication.", "code": 401, "title": "Unauthorized"}}'
JSON_403 = '{"error": {"message": "You are not authorized to perform the requested action: identity:list_user_projects", "code": 403, "title": "Forbidden"}}'
JSON_404 = '{"error": {"message": "Not Found.", "code": 404, "title": "Not Found"}}'
JSON_404A = '{"error": {"message": "This is not a recognized Fernet token ", "code": 404, "title": "Not Found"}}'
JSON_BASEURL = '''{"version": {"status": "stable", "updated": "2016-10-06T00:00:00Z", "media-types": [{"base": "application/json",
"type": "application/vnd.openstack.identity-v3+json"}], "id": "v3.7", "links": [{"href": "http://localhost:8082/api/test/keystone/v3/",
"rel": "self"}]}}
'''
JSON_USERSPROJECTS = ''' {
"links": {
"self": "http://localhost:8082/api/test/keystone/v3/users/6705630a653f4300baf486a8df6072de/projects",
"previous": null,
"next": null
},
"projects": [{
"is_domain": false,
"description": "First test",
"links": {
"self": "http://localhost:8082/api/test/kestone/v3/projects/190ce9f5a454493e9eaae608d54fe2d1"
},
"enabled": true,
"id": "190ce9f5a454493e9eaae608d54fe2d1",
"parent_id": "default",
"domain_id": "default",
"name": "MockeyMock I"
},{
"is_domain": false,
"description": "second test",
"links": {
"self": "http://localhost:8082/api/test/kestone/v3/projects/290ce9f5a454493e9eaae608d54fe2d1"
},
"enabled": true,
"id": "290ce9f5a454493e9eaae608d54fe2d1",
"parent_id": "default",
"domain_id": "default",
"name": "MockeyMock II"
}]
}'''
JSON_FULLCATALOG = '''{
"token": {
"is_domain": false,
"methods": ["token", "password"],
"roles": [{
"id": "9fe2ff9ee4384b1894a90878d3e92bab",
"name": "_member_"
}],
"expires_at": "2017-03-28T19:13:18.000000Z",
"project": {
"domain": {
"id": "default",
"name": "Default"
},
"id": "190ce9f5a454493e9eaae608d54fe2d1",
"name": "MockeyMock"
},
"catalog": [{
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test",
"region": "Region1",
"interface": "internal",
"id": "011b6db8eedd4e9d9422bdc5c29b31ed"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test",
"region": "Region1",
"interface": "public",
"id": "4a2c135c8ddd4b28be67cbf5ddfc4e2e"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test",
"region": "Region1",
"interface": "admin",
"id": "6fe436f7b1bb4e8c94501c262fb7a4ea"
}],
"type": "metering",
"id": "1b91f76fd41d4ec8856ef904c7d7de5f",
"name": "ceilometer"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v3",
"region": "Region1",
"interface": "public",
"id": "0a34ebd757fc46318ee37552d7f58c57"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v3",
"region": "Region1",
"interface": "internal",
"id": "1c953770993546ab9bf3e5b829e8b8af"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v3",
"region": "Region1",
"interface": "admin",
"id": "bf70c0780b2d4904a3c1b5b526a51d7a"
}],
"type": "computev3",
"id": "1d995094e400496aa5eb54731aacf911",
"name": "novav3"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1.1/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "admin",
"id": "2f9c0cf634cd4244b8a2641434fb8492"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1.1/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "internal",
"id": "352b35ef014f498a89de22ba20d32e5e"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1.1/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "public",
"id": "f47cab31d98d4d76aded119fe702c4c6"
}],
"type": "data-processing",
"id": "3bff550b8b0f4e619650af43386dce58",
"name": "sahara"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "admin",
"id": "1087e53e4add4e47b2b1ce9c8d781498"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "internal",
"id": "f6bf030cf3294e928cb17dd4edaeb3b6"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "public",
"id": "f88f2c020a1546a3ae6f6d8abf5c56f1"
}],
"type": "network",
"id": "3ea76ccc58d2414f868ac02c9281a872",
"name": "neutron"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "admin",
"id": "10affb77591f495ea84c814a676b4ea7"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "internal",
"id": "14396392e51043cb9343bde94964033a"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "public",
"id": "ad5e7d2096cd46a29d3b4abd81457076"
}],
"type": "orchestration",
"id": "50a8ee48ebf1462e85c0504cd8fafa63",
"name": "heat"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "internal",
"id": "2948061330f54fe2996d05d6467c8281"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "public",
"id": "611099d850aa4936af46ba5a5bdb36a7"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "admin",
"id": "a820e9700cde4fddacdbe9be12eff6a2"
}],
"type": "volumev2",
"id": "53b96359ef1944ed9790321cd05c7bda",
"name": "cinderv2"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1",
"region": "Region1",
"interface": "internal",
"id": "8c94e99c01ab46f2b05522a009d9c989"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1",
"region": "Region1",
"interface": "admin",
"id": "a40ca45719df49feac3d85dbc09ccae8"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1",
"region": "Region1",
"interface": "public",
"id": "fb5118d119f74a21a02e5c24fa8d8808"
}],
"type": "cloudformation",
"id": "6ac605cbb7584b7f9e51ce03eb5c1807",
"name": "heat-cfn"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/services/Cloud",
"region": "Region1",
"interface": "internal",
"id": "d08a6919e5b34a59986fd6c87a98edf1"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/services/Admin",
"region": "Region1",
"interface": "admin",
"id": "db72521aa5e54bb2bae185d78b46667c"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/services/Cloud",
"region": "Region1",
"interface": "public",
"id": "f453dd9ca73849ae8333c4d1fcdce3ee"
}],
"type": "ec2",
"id": "98f865505636418480ba8840d61be054",
"name": "nova_ec2"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2.0",
"region": "Region1",
"interface": "admin",
"id": "41873bb89289483380354bba7106255d"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2.0",
"region": "Region1",
"interface": "public",
"id": "5cc6f2f9cd3f4b9bbb3fedca7a1c3644"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2.0",
"region": "Region1",
"interface": "internal",
"id": "dab69dfe6065403ebe1a43e7cc9c6700"
}],
"type": "identity",
"id": "9ba7a078940141358c54dc614a048920",
"name": "keystone"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/swift/v1",
"region": "Region1",
"interface": "internal",
"id": "678e188feb004ff783c2efd2f9f6e274"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/swift/v1",
"region": "Region1",
"interface": "admin",
"id": "b23e438672cd4cb9a9169bae2f5a8be9"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/swift/v1",
"region": "Region1",
"interface": "public",
"id": "ef184b532a7e4b6296b028eff81e5070"
}],
"type": "object-store",
"id": "a0ece720c2c84733946872fcd6d2f42b",
"name": "swift"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "public",
"id": "805b2401424a4d6f875d509d55f21dfa"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "internal",
"id": "885c88e0ac574fd79da9e8f058a05bc5"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/190ce9f5a454493e9eaae608d54fe2d1",
"region": "Region1",
"interface": "admin",
"id": "f40b80f59eb142d89a4d0e9748999bbf"
}],
"type": "compute",
"id": "a1fef68408714aa5bcbdcc87350a096a",
"name": "nova"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "public",
"id": "04536110ce764b5797189be3a282b724"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "internal",
"id": "306fee2ba44b4df5a801822c325deb7c"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "admin",
"id": "613697a0715f471794a3d4398f8be668"
}],
"type": "image",
"id": "bc9ff0e118ee4119b25f472f0817b368",
"name": "glance"
}, {
"endpoints": [],
"type": "volume",
"id": "ce02b3f8b8e74d19b76732fc2046ae1a",
"name": "cinder"
}],
"user": {
"domain": {
"id": "default",
"name": "Default"
},
"id": "6705630a653f4300baf486a8df6072de",
"name": "MockeyMock"
},
"audit_ids": ["A60bVi9WQ76Blpd_g8QJEg", "aOdetpZZTBeI1NrT9m9YaQ"],
"issued_at": "2017-03-28T18:21:32.000000Z"
}
}'''
# has placeholders for the project id and name
JSON_SCOPED_CATALOG = '''{
"token": {
"is_domain": false,
"methods": ["token", "password"],
"roles": [{
"id": "9fe2ff9ee4384b1894a90878d3e92bab",
"name": "_member_"
}],
"expires_at": "2017-04-10T21:11:33.000000Z",
"project": {
"domain": {
"id": "default",
"name": "Default"
},
"id": "mocked_project_id",
"name": "mocked_project_name"
},
"catalog": [{
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "internal",
"id": "011b6db8eedd4e9d9422bdc5c29b31ed"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "public",
"id": "4a2c135c8ddd4b28be67cbf5ddfc4e2e"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "admin",
"id": "6fe436f7b1bb4e8c94501c262fb7a4ea"
}],
"type": "metering",
"id": "1b91f76fd41d4ec8856ef904c7d7de5f",
"name": "ceilometer"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v3",
"region": "Region1",
"interface": "public",
"id": "0a34ebd757fc46318ee37552d7f58c57"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v3",
"region": "Region1",
"interface": "internal",
"id": "1c953770993546ab9bf3e5b829e8b8af"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v3",
"region": "Region1",
"interface": "admin",
"id": "bf70c0780b2d4904a3c1b5b526a51d7a"
}],
"type": "computev3",
"id": "1d995094e400496aa5eb54731aacf911",
"name": "novav3"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1.1/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "admin",
"id": "2f9c0cf634cd4244b8a2641434fb8492"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1.1/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "internal",
"id": "352b35ef014f498a89de22ba20d32e5e"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1.1/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "public",
"id": "f47cab31d98d4d76aded119fe702c4c6"
}],
"type": "data-processing",
"id": "3bff550b8b0f4e619650af43386dce58",
"name": "sahara"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "admin",
"id": "1087e53e4add4e47b2b1ce9c8d781498"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "internal",
"id": "f6bf030cf3294e928cb17dd4edaeb3b6"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "public",
"id": "f88f2c020a1546a3ae6f6d8abf5c56f1"
}],
"type": "network",
"id": "3ea76ccc58d2414f868ac02c9281a872",
"name": "neutron"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "admin",
"id": "10affb77591f495ea84c814a676b4ea7"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "internal",
"id": "14396392e51043cb9343bde94964033a"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "public",
"id": "ad5e7d2096cd46a29d3b4abd81457076"
}],
"type": "orchestration",
"id": "50a8ee48ebf1462e85c0504cd8fafa63",
"name": "heat"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "internal",
"id": "2948061330f54fe2996d05d6467c8281"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "public",
"id": "611099d850aa4936af46ba5a5bdb36a7"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "admin",
"id": "a820e9700cde4fddacdbe9be12eff6a2"
}],
"type": "volumev2",
"id": "53b96359ef1944ed9790321cd05c7bda",
"name": "cinderv2"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1",
"region": "Region1",
"interface": "internal",
"id": "8c94e99c01ab46f2b05522a009d9c989"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1",
"region": "Region1",
"interface": "admin",
"id": "a40ca45719df49feac3d85dbc09ccae8"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v1",
"region": "Region1",
"interface": "public",
"id": "fb5118d119f74a21a02e5c24fa8d8808"
}],
"type": "cloudformation",
"id": "6ac605cbb7584b7f9e51ce03eb5c1807",
"name": "heat-cfn"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/services/Cloud",
"region": "Region1",
"interface": "internal",
"id": "d08a6919e5b34a59986fd6c87a98edf1"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/services/Admin",
"region": "Region1",
"interface": "admin",
"id": "db72521aa5e54bb2bae185d78b46667c"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/services/Cloud",
"region": "Region1",
"interface": "public",
"id": "f453dd9ca73849ae8333c4d1fcdce3ee"
}],
"type": "ec2",
"id": "98f865505636418480ba8840d61be054",
"name": "nova_ec2"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone7/v2.0",
"region": "Region1",
"interface": "admin",
"id": "41873bb89289483380354bba7106255d"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2.0",
"region": "Region1",
"interface": "public",
"id": "5cc6f2f9cd3f4b9bbb3fedca7a1c3644"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2.0",
"region": "Region1",
"interface": "internal",
"id": "dab69dfe6065403ebe1a43e7cc9c6700"
}],
"type": "identity",
"id": "9ba7a078940141358c54dc614a048920",
"name": "keystone"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "https://e1-swift.massopencloud.org/swift/v1",
"region": "Region1",
"interface": "internal",
"id": "3ffd688a4ade4414ad2302a6a8a5109f"
}, {
"region_id": "Region1",
"url": "https://e1-swift.massopencloud.org/swift/v1",
"region": "Region1",
"interface": "public",
"id": "9dd43cd14943473f970dd7ca5a0a3132"
}, {
"region_id": "Region1",
"url": "https://e1-swift.massopencloud.org/swift/v1",
"region": "Region1",
"interface": "admin",
"id": "a73cac46bee34cb88e2f8d554908d86f"
}],
"type": "object-store",
"id": "a0ece720c2c84733946872fcd6d2f42b",
"name": "swift"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "public",
"id": "805b2401424a4d6f875d509d55f21dfa"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "internal",
"id": "885c88e0ac574fd79da9e8f058a05bc5"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone/v2/c7294bb6bf4c4503bbdee9a08525d4aa",
"region": "Region1",
"interface": "admin",
"id": "f40b80f59eb142d89a4d0e9748999bbf"
}],
"type": "compute",
"id": "a1fef68408714aa5bcbdcc87350a096a",
"name": "nova"
}, {
"endpoints": [{
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "public",
"id": "04536110ce764b5797189be3a282b724"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "internal",
"id": "306fee2ba44b4df5a801822c325deb7c"
}, {
"region_id": "Region1",
"url": "http://localhost:8082/api/test/keystone",
"region": "Region1",
"interface": "admin",
"id": "613697a0715f471794a3d4398f8be668"
}],
"type": "image",
"id": "bc9ff0e118ee4119b25f472f0817b368",
"name": "glance"
}, {
"endpoints": [],
"type": "volume",
"id": "ce02b3f8b8e74d19b76732fc2046ae1a",
"name": "cinder"
}],
"user": {
"domain": {
"id": "default",
"name": "Default"
},
"id": "6705630a653f4300baf486a8df6072de",
"name": "MockeyMock"
},
"audit_ids": ["bTsT12C2Q8KHK-cWM2igKQ", "gMPj5yGtSymCq6PweJZUJA"],
"issued_at": "2017-04-10T20:11:36.000000Z"
}
}'''
JSON_AUTH = '''{"token": {"issued_at": "2017-03-27T14:33:34.000000Z", "audit_ids": ["xM40d8iLTGS-CaE70Xki7A"], "methods": ["password"], "expires_at": "2017-03-28T15:33:34.000000Z",
"user": {"domain": {"id": "default", "name": "Default"}, "id": "6705630a653f4300baf486a8df6072de", "name": "MockeyMock"}}}
'''
class MockKeystoneView(APIView):
"""
This is the catch all view. use this to figure out what urls/headers/json is being based in
"""
def get(self, request):
logger.info(" MockKeystoneView:get called")
for header in request.META:
logger.info(" header: %s value %s" % (header, request.META[header]))
return HttpResponse(JSON_404, status=status.HTTP_404_NOT_FOUND, content_type='application/json')
def post(self, request):
logger.info("MockKeystoneView:post called")
logger.info(request.body)
return HttpResponse(JSON_404, status=status.HTTP_404_NOT_FOUND, content_type='application/json')
class MockKeystoneUsersProjects(APIView):
"""
Handles /v3/Users/<user_id>/Porjects
"""
def get(self, request, user_id):
logger.info("MockKeystoneUserProjects:get called (user id: %s) " % (user_id))
# for header in request.META:
# logger.info(" header: %s value %s" % (header, request.META[header]) )
token = request.META.get('HTTP_X_AUTH_TOKEN')
logger.info(" token: %s" % token)
logger.info(" %s" % EXPECTED_UNSCOPED_TOKEN)
logger.info(" user: %s" % user_id)
logger.info(" %s" % EXPECTED_USER_ID)
if token == EXPECTED_UNSCOPED_TOKEN and user_id == EXPECTED_USER_ID:
response = HttpResponse(JSON_USERSPROJECTS, status=status.HTTP_200_OK, content_type="application/json")
response['x-openstack-request-id'] = "req-6a536bef-af21-46a9-94e3-31f47998644f"
elif token == EXPECTED_UNSCOPED_TOKEN:
logger.info("ERROR expected unscoped_token")
response = HttpResponse(JSON_403, status=status.HTTP_403_FORBIDDEN, content_type="application/json")
response['x-openstack-request-id'] = "req-fe6bb228-537a-41f1-8e57-0b3037c0e473"
else:
logger.info("ERROR: unknown userid")
response = HttpResponse(JSON_401, status=status.HTTP_403_FORBIDDEN, content_type="application/json")
response['x-openstack-request-id'] = "req-fe6bb228-537a-41f1-8e57-0b3037c0e473"
return response
def post(self, request):
logger.info("MockKeystoneView:post called")
logger.info(request.body)
return HttpResponse(JSON_404, status=status.HTTP_404_NOT_FOUND, content_type='application/json')
class MockKeystoneAuthTokens(APIView):
"""
handles /v3/auth/tokens
"""
def get(self, request):
logger.info("MockKeystoneAuthToken:get called")
for header in request.META:
logger.info(" header: %s value %s" % (header, request.META[header]))
if (request.META['HTTP_X_AUTH_TOKEN'] == EXPECTED_SCOPED_TOKEN
and request.META['HTTP_X_SUBJECT_TOKEN'] == EXPECTED_SCOPED_TOKEN):
response = HttpResponse(JSON_FULLCATALOG, status=status.HTTP_201_CREATED, content_type='application/json')
response['x-openstack-request-id'] = 'req-4227f7a0-4631-4014-8c70-d0bf42ff6553'
response['X-Subject-Token'] = EXPECTED_UNSCOPED_TOKEN
return response
return HttpResponse(JSON_404, status=status.HTTP_404_NOT_FOUND, content_type='application/json')
def post(self, request):
logger.info("MockKeystoneAuthToken:post called")
logger.info(request.body)
ks_request = json.loads(request.body)
token = ks_request.get('auth', {}).get('identity', {}).get('token', {}).get('id')
is_unscoped = 'unscoped' in ks_request.get('auth', {}).get('scope', {})
project_id = ks_request.get('auth', {}).get('scope', {}).get('project', {}).get('id')
# need to distinugish between token and password - note: we use scoped with token
if token:
if token == EXPECTED_UNSCOPED_TOKEN:
if project_id == '190ce9f5a454493e9eaae608d54fe2d1':
json_scoped_catalog = re.sub("mock_project_id", project_id, JSON_SCOPED_CATALOG)
json_scoped_catalog = re.sub("mock_project_name", 'MockeyMock I', json_scoped_catalog)
response = HttpResponse(json_scoped_catalog, status=status.HTTP_201_CREATED, content_type='application/json')
elif project_id == '290ce9f5a454493e9eaae608d54fe2d1':
json_scoped_catalog = re.sub("mock_project_id", project_id, JSON_SCOPED_CATALOG)
json_scoped_catalog = re.sub("mock_project_name", 'MockeyMock II', json_scoped_catalog)
response = HttpResponse(json_scoped_catalog, status=status.HTTP_201_CREATED, content_type='application/json')
else:
response = HttpResponse(JSON_FULLCATALOG, status=status.HTTP_201_CREATED, content_type='application/json')
response['X-Subject-Token'] = EXPECTED_SCOPED_TOKEN
response['x-openstack-request-id'] = 'req-4227f7a0-4631-4014-8c70-d0bf42ff6553'
return response
elif token == EXPECTED_SCOPED_TOKEN and is_unscoped:
response = HttpResponse(JSON_AUTH, status=status.HTTP_201_CREATED, content_type='application/json')
response['x-openstack-request-id'] = 'req-4227f7a0-4631-4014-8c70-d0bf42ff6553'
response['X-Subject-Token'] = EXPECTED_UNSCOPED_TOKEN
return response
else:
response = HttpResponse(JSON_404A, status=status.HTTP_404_NOT_FOUND, content_type='application/json')
response['x-openstack-request-id'] = 'req-4227f7a0-4631-4014-8c70-d0bf42ff6553'
return response
if (ks_request['auth']['identity']['password']['user']['name'] == test_settings['username']
and ks_request['auth']['identity']['password']['user']['password'] == test_settings['password']):
logger.info("*** success ***")
response = HttpResponse(JSON_AUTH, status=status.HTTP_201_CREATED, content_type='application/json')
response['x-openstack-request-id'] = 'req-4227f7a0-4631-4014-8c70-d0bf42ff6553'
response['X-Subject-Token'] = EXPECTED_UNSCOPED_TOKEN
return response
else:
logger.info("*** failed ***")
return HttpResponse(JSON_401, status=status.HTTP_401_UNAUTHORIZED, content_type='application/json')
class MockKeystoneBaseUrl(APIView):
"""
passes back the base URL /v3
"""
def get(self, request):
logger.info("MockKeystoneBaseUrl:get called")
response = HttpResponse(JSON_BASEURL, status=status.HTTP_200_OK, content_type='application/json')
response['x-openstack-request-id'] = 'req-3ac5dc05-785a-4a30-ba18-e13bf619767f'
return response
def post(self, request):
logger.info("MockKeystoneBaseUrl:post called")
response = HttpResponse(JSON_BASEURL, status=status.HTTP_200_OK, content_type='application/json')
response['x-openstack-request-id'] = 'req-3ac5dc05-785a-4a30-ba18-e13bf619767f'
return response
| apache-2.0 | 5,929,951,241,089,079,000 | 42.430504 | 230 | 0.508879 | false |
kmike/tornado-slacker | test_project/settings.py | 1 | 1318 | # Django settings for test project.
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
join = lambda p: os.path.abspath(os.path.join(PROJECT_ROOT, p))
sys.path.insert(0, join('..'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': join('db.sqlite'),
# :memory: databases cause obscure bugs in multithreaded environment
# and django uses :memory: as TEST_NAME by default so it is necessary
# to make test database real file.
'TEST_NAME': join('db-test.sqlite'),
}
}
SECRET_KEY = '5mcs97ar-(nnxhfkx0%^+0^sr!e(ax=x$2-!8dqy25ff-l1*a='
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_DIRS = (
join('templates'),
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'testapp',
)
| mit | 4,513,988,263,674,335,000 | 26.458333 | 77 | 0.68437 | false |
daveschaefer/checkloc | checkloc/localecodes.py | 1 | 3662 | #!/usr/bin/env python
#
# Copyright (C) 2014 Dave Schaefer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
List of many Mozilla locale codes
Kept in a separate file so it doesn't take up space in the main module.
The base list was taken from
https://svn.mozilla.org/libs/product-details/json/languages.json
Other codes are added as extensions make use of them.
"""
MOZILLA_LOCALE_CODES = {
'ach': True,
'af': True,
'ak': True,
'am-et': True,
'an': True,
'ar': True,
'as': True,
'ast': True,
'az': True,
'be': True,
'bg': True,
'bg-BG': True,
'bn-BD': True,
'bn-IN': True,
'br': True,
'bs': True,
'ca': True,
'ca-valencia': True,
'cs': True,
'cs-CZ': True,
'csb': True,
'cy': True,
'da': True,
'dbg': True,
'de': True,
'de-AT': True,
'de-CH': True,
'de-DE': True,
'dsb': True,
'ee': True,
'el': True,
'en-AU': True,
'en-CA': True,
'en-GB': True,
'en-NZ': True,
'en-US': True,
'en-ZA': True,
'eo': True,
'es': True,
'es-AR': True,
'es-CL': True,
'es-ES': True,
'es-MX': True,
'et': True,
'et-EE': True,
'eu': True,
'fa': True,
'ff': True,
'fi': True,
'fj-FJ': True,
'fr': True,
'fur-IT': True,
'fy-NL': True,
'ga': True,
'ga-IE': True,
'gd': True,
'gl': True,
'gl-ES': True,
'gu': True,
'gu-IN': True,
'he': True,
'hi': True,
'hi-IN': True,
'hr': True,
'hr-HR': True,
'hsb': True,
'hu': True,
'hu-HU': True,
'hy-AM': True,
'id': True,
'is': True,
'it': True,
'ja': True,
'ja-JP': True,
'ja-JP-mac': True,
'ka': True,
'kk': True,
'km': True,
'kn': True,
'ko': True,
'ku': True,
'la': True,
'lg': True,
'lij': True,
'ln': True,
'lo': True,
'lt': True,
'lv': True,
'lv-LV': True,
'mai': True,
'mg': True,
'mi': True,
'mk': True,
'ml': True,
'mn': True,
'mr': True,
'ms': True,
'ms-MY': True,
'my': True,
'nb-NO': True,
'ne-NP': True,
'nl': True,
'nn-NO': True,
'nr': True,
'nso': True,
'oc': True,
'or': True,
'pa': True,
'pa-IN': True,
'pl': True,
'pt-BR': True,
'pt-PT': True,
'rm': True,
'ro': True,
'ru': True,
'rw': True,
'sa': True,
'sah': True,
'sat': True,
'si': True,
'sk': True,
'sk-SK': True,
'sl': True,
'sl-SI': True,
'son': True,
'sq': True,
'sr': True,
'sr-Cyrl': True,
'sr-Latn': True,
'ss': True,
'st': True,
'sv-SE': True,
'sw': True,
'ta': True,
'ta-IN': True,
'ta-LK': True,
'te': True,
'tl': True,
'th': True,
'tn': True,
'tr': True,
'ts': True,
'tt-RU': True,
'uk': True,
'uk-UA': True,
'ur': True,
'uz': True,
've': True,
'vi': True,
'wo': True,
'x-testing': True,
'xh': True,
'zh-CN': True,
'zh-TW': True,
'zu': True
}
if __name__ == "__main__":
pass
| gpl-3.0 | 3,032,949,822,129,747,000 | 18.794595 | 73 | 0.488804 | false |
nandub/pygale | pygale/prettypuff.py | 1 | 3972 | #!/uns/bin/python
import os, sys, string, re, time, string
import pygale, gale_env, userinfo
#------------------------------------------------------------
# Global configuration
#------------------------------------------------------------
def bold_location(text):
if sys.platform == 'win32':
return text
else:
i = string.rfind(text, '@')
local = text[:i]
domain = text[i:]
return "[1m" + local + "[0m" + domain
def bold_keyword(text):
if sys.platform == 'win32':
return text
else:
return '[1m' + text + '[0m'
def format(text, termwidth=80):
lines = string.split(text, '\r\n')
if lines and lines[-1] != '':
lines.append('')
out = []
while lines:
l = lines[0]
lines = lines[1:]
if len(l) > termwidth:
wrappoint = string.rfind(l, ' ', 0, termwidth)
if wrappoint == -1:
# No space, wrap at termwidth
while len(l) > termwidth:
out.append(l[:termwidth])
l = l[termwidth:]
out.append(l)
else:
out.append(l[:wrappoint])
lines.insert(0, l[wrappoint+1:])
else:
out.append(l)
return string.join(out, '\n')
def show(puff, file=None, termwidth=80):
# If not otherwise specified, use stdout
if file is None:
file = sys.stdout
#--------------------------------------------------
# Catch all the message headers in the envvars
#--------------------------------------------------
# Sender: real name of person sending puff
sender = puff.get_text_first('message/sender', 'Unknown sender')
# Location, eg "[email protected]"
locations = puff.get_loc()
# Sign: Gale ID of sender, eg "[email protected]"
signer = puff.get_signer('Unknown sig')
if signer is None:
signer = '*unsigned*'
# Date message was sent, eg "1998-08-24 15:18:47"
date = puff.get_time_first('id/time', None)
if date is None:
date = time.strftime('%m-%d %H:%M:%S', time.localtime(time.time()))
else:
date = time.strftime('%m-%d %H:%M:%S', time.localtime(date))
# Escryption recipients, list of ids
recipients = puff.get_recipients()
if '' in recipients:
# null key
recipients = []
# Status of recipient for a receipt, eg "in/present"
status = puff.get_text_first('notice/presence', None)
# Receipt (new-style only)
receipt = puff.get_text_first('answer.receipt', None)
# Client used to send puff
idclass = puff.get_text_first('id/class', 'Unknown client')
# Get the text of the message
text = puff.get_text_first('message/body', '')
if text:
text = format(text, termwidth)
# Receipts
if status is not None or receipt is not None:
s = '* %s' % date
if receipt is not None:
s = s + ' received:'
if status is not None:
s = s + ' %s' % status
if file.isatty():
s = s + ' %s (%s)\n' % (bold_location(signer), sender)
else:
s = s + ' %s (%s)\n' % (signer, sender)
file.write(s)
return
# Beep on "private" puff
# Private means it was encrypted but not to the signer
if file.isatty() and recipients and signer and signer not in recipients:
file.write('\007')
# separator bar
file.write('\r' + ('-' * termwidth) + '\n')
# Bold locations
locs = string.split(locations, None)
if file.isatty():
locs = map(bold_location, locs)
# Format message
header = 'To: %s' % string.join(locs, ' ')
keywords = puff.get_text('message.keyword')
if keywords:
if file.isatty():
keywords = map(bold_keyword, keywords)
keyw_text = map(lambda x: '/' + x, keywords)
header = header + ' %s' % string.join(keyw_text, ' ')
if puff.get_text('question.receipt'):
header = header + ' [rcpt]'
file.write(header + '\n')
if text:
file.write(text.encode('latin-1'))
bolded_sig = "-- %s (%s) at %s --" %\
(bold_location(signer), sender, date)
normal_sig = "-- %s (%s) at %s --" % (signer, sender, date)
bolded_sig = ' ' * (termwidth-len(normal_sig)) + bolded_sig
nonbolded_sig = ' ' * (termwidth-len(normal_sig)) + normal_sig
if file.isatty():
file.write(bolded_sig + '\n')
else:
file.write(nonbolded_sig + '\n')
if __name__ == '__main__':
main()
| lgpl-2.1 | -7,324,132,790,013,130,000 | 27.782609 | 73 | 0.596928 | false |
openstack/heat | heat/tests/db/test_sqlalchemy_api.py | 1 | 168791 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import json
import logging
import time
from unittest import mock
import uuid
import fixtures
from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_utils import timeutils
from sqlalchemy.orm import exc
from sqlalchemy.orm import session
from heat.common import context
from heat.common import exception
from heat.common import short_id
from heat.common import template_format
from heat.db.sqlalchemy import api as db_api
from heat.db.sqlalchemy import models
from heat.engine.clients.os import glance
from heat.engine.clients.os import nova
from heat.engine import environment
from heat.engine import resource as rsrc
from heat.engine import stack as parser
from heat.engine import template as tmpl
from heat.engine import template_files
from heat.tests import common
from heat.tests.openstack.nova import fakes as fakes_nova
from heat.tests import utils
wp_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "WordPress",
"Parameters" : {
"KeyName" : {
"Description" : "KeyName",
"Type" : "String",
"Default" : "test"
}
},
"Resources" : {
"WebServer": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId" : "F17-x86_64-gold",
"InstanceType" : "m1.large",
"KeyName" : "test",
"UserData" : "wordpress"
}
}
}
}
'''
UUIDs = (UUID1, UUID2, UUID3) = sorted([str(uuid.uuid4())
for x in range(3)])
class SqlAlchemyTest(common.HeatTestCase):
def setUp(self):
super(SqlAlchemyTest, self).setUp()
self.fc = fakes_nova.FakeClient()
self.ctx = utils.dummy_context()
def _mock_get_image_id_success(self, imageId_input, imageId):
self.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id',
return_value=imageId)
def _setup_test_stack(self, stack_name, stack_id=None, owner_id=None,
stack_user_project_id=None, backup=False):
t = template_format.parse(wp_template)
template = tmpl.Template(
t, env=environment.Environment({'KeyName': 'test'}))
stack_id = stack_id or str(uuid.uuid4())
stack = parser.Stack(self.ctx, stack_name, template,
owner_id=owner_id,
stack_user_project_id=stack_user_project_id)
with utils.UUIDStub(stack_id):
stack.store(backup=backup)
return (template, stack)
def _mock_create(self):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self._mock_get_image_id_success('F17-x86_64-gold', 744)
self.fc.servers.create = mock.Mock(
return_value=self.fc.servers.list()[4])
return self.fc
def _mock_delete(self):
self.patchobject(self.fc.servers, 'delete',
side_effect=fakes_nova.fake_exception())
@mock.patch.object(db_api, '_paginate_query')
def test_filter_and_page_query_paginates_query(self, mock_paginate_query):
query = mock.Mock()
db_api._filter_and_page_query(self.ctx, query)
self.assertTrue(mock_paginate_query.called)
@mock.patch.object(db_api, '_events_paginate_query')
def test_events_filter_and_page_query(self, mock_events_paginate_query):
query = mock.Mock()
db_api._events_filter_and_page_query(self.ctx, query)
self.assertTrue(mock_events_paginate_query.called)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_events_filter_invalid_sort_key(self, mock_paginate_query):
query = mock.Mock()
class InvalidSortKey(db_api.utils.InvalidSortKey):
@property
def message(_):
self.fail("_events_paginate_query() should not have tried to "
"access .message attribute - it's deprecated in "
"oslo.db and removed from base Exception in Py3K.")
mock_paginate_query.side_effect = InvalidSortKey()
self.assertRaises(exception.Invalid,
db_api._events_filter_and_page_query,
self.ctx, query, sort_keys=['foo'])
@mock.patch.object(db_api.db_filters, 'exact_filter')
def test_filter_and_page_query_handles_no_filters(self, mock_db_filter):
query = mock.Mock()
db_api._filter_and_page_query(self.ctx, query)
mock_db_filter.assert_called_once_with(mock.ANY, mock.ANY, {})
@mock.patch.object(db_api.db_filters, 'exact_filter')
def test_events_filter_and_page_query_handles_no_filters(self,
mock_db_filter):
query = mock.Mock()
db_api._events_filter_and_page_query(self.ctx, query)
mock_db_filter.assert_called_once_with(mock.ANY, mock.ANY, {})
@mock.patch.object(db_api.db_filters, 'exact_filter')
def test_filter_and_page_query_applies_filters(self, mock_db_filter):
query = mock.Mock()
filters = {'foo': 'bar'}
db_api._filter_and_page_query(self.ctx, query, filters=filters)
self.assertTrue(mock_db_filter.called)
@mock.patch.object(db_api.db_filters, 'exact_filter')
def test_events_filter_and_page_query_applies_filters(self,
mock_db_filter):
query = mock.Mock()
filters = {'foo': 'bar'}
db_api._events_filter_and_page_query(self.ctx, query, filters=filters)
self.assertTrue(mock_db_filter.called)
@mock.patch.object(db_api, '_paginate_query')
def test_filter_and_page_query_allowed_sort_keys(self,
mock_paginate_query):
query = mock.Mock()
sort_keys = ['stack_name', 'foo']
db_api._filter_and_page_query(self.ctx, query, sort_keys=sort_keys)
args, _ = mock_paginate_query.call_args
self.assertIn(['name'], args)
@mock.patch.object(db_api, '_events_paginate_query')
def test_events_filter_and_page_query_allowed_sort_keys(
self, mock_paginate_query):
query = mock.Mock()
sort_keys = ['event_time', 'foo']
db_api._events_filter_and_page_query(self.ctx, query,
sort_keys=sort_keys)
args, _ = mock_paginate_query.call_args
self.assertIn(['created_at'], args)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_paginate_query_default_sorts_by_created_at_and_id(
self, mock_paginate_query):
query = mock.Mock()
model = mock.Mock()
db_api._paginate_query(self.ctx, query, model, sort_keys=None)
args, _ = mock_paginate_query.call_args
self.assertIn(['created_at', 'id'], args)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_paginate_query_default_sorts_dir_by_desc(self,
mock_paginate_query):
query = mock.Mock()
model = mock.Mock()
db_api._paginate_query(self.ctx, query, model, sort_dir=None)
args, _ = mock_paginate_query.call_args
self.assertIn('desc', args)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_paginate_query_uses_given_sort_plus_id(self,
mock_paginate_query):
query = mock.Mock()
model = mock.Mock()
db_api._paginate_query(self.ctx, query, model, sort_keys=['name'])
args, _ = mock_paginate_query.call_args
self.assertIn(['name', 'id'], args)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_paginate_query_gets_model_marker(self, mock_paginate_query):
query = mock.Mock()
model = mock.Mock()
marker = mock.Mock()
mock_query_object = mock.Mock()
mock_query_object.get.return_value = 'real_marker'
ctx = mock.MagicMock()
ctx.session.query.return_value = mock_query_object
db_api._paginate_query(ctx, query, model, marker=marker)
mock_query_object.get.assert_called_once_with(marker)
args, _ = mock_paginate_query.call_args
self.assertIn('real_marker', args)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_paginate_query_raises_invalid_sort_key(self, mock_paginate_query):
query = mock.Mock()
model = mock.Mock()
class InvalidSortKey(db_api.utils.InvalidSortKey):
@property
def message(_):
self.fail("_paginate_query() should not have tried to access "
".message attribute - it's deprecated in oslo.db "
"and removed from base Exception class in Py3K.")
mock_paginate_query.side_effect = InvalidSortKey()
self.assertRaises(exception.Invalid, db_api._paginate_query,
self.ctx, query, model, sort_keys=['foo'])
def test_get_sort_keys_returns_empty_list_if_no_keys(self):
sort_keys = None
mapping = {}
filtered_keys = db_api._get_sort_keys(sort_keys, mapping)
self.assertEqual([], filtered_keys)
def test_get_sort_keys_allow_single_key(self):
sort_key = 'foo'
mapping = {'foo': 'Foo'}
filtered_keys = db_api._get_sort_keys(sort_key, mapping)
self.assertEqual(['Foo'], filtered_keys)
def test_get_sort_keys_allow_multiple_keys(self):
sort_keys = ['foo', 'bar', 'nope']
mapping = {'foo': 'Foo', 'bar': 'Bar'}
filtered_keys = db_api._get_sort_keys(sort_keys, mapping)
self.assertIn('Foo', filtered_keys)
self.assertIn('Bar', filtered_keys)
self.assertEqual(2, len(filtered_keys))
def test_encryption(self):
stack_name = 'test_encryption'
stack = self._setup_test_stack(stack_name)[1]
self._mock_create()
stack.create()
stack = parser.Stack.load(self.ctx, stack.id)
cs = stack['WebServer']
cs.data_set('my_secret', 'fake secret', True)
rs = db_api.resource_get_by_name_and_stack(self.ctx,
'WebServer',
stack.id)
encrypted_key = rs.data[0]['value']
self.assertNotEqual(encrypted_key, "fake secret")
# Test private_key property returns decrypted value
self.assertEqual("fake secret", db_api.resource_data_get(
self.ctx, cs.id, 'my_secret'))
# do this twice to verify that the orm does not commit the unencrypted
# value.
self.assertEqual("fake secret", db_api.resource_data_get(
self.ctx, cs.id, 'my_secret'))
self.fc.servers.create.assert_called_once_with(
image=744, flavor=3, key_name='test',
name=mock.ANY,
security_groups=None,
userdata=mock.ANY, scheduler_hints=None,
meta=None, nics=None,
availability_zone=None,
block_device_mapping=None
)
def test_resource_data_delete(self):
stack = self._setup_test_stack('res_data_delete', UUID1)[1]
self._mock_create()
stack.create()
stack = parser.Stack.load(self.ctx, stack.id)
resource = stack['WebServer']
resource.data_set('test', 'test_data')
self.assertEqual('test_data', db_api.resource_data_get(
self.ctx, resource.id, 'test'))
db_api.resource_data_delete(self.ctx, resource.id, 'test')
self.assertRaises(exception.NotFound,
db_api.resource_data_get, self.ctx,
resource.id, 'test')
self.fc.servers.create.assert_called_once_with(
image=744, flavor=3, key_name='test',
name=mock.ANY,
security_groups=None,
userdata=mock.ANY, scheduler_hints=None,
meta=None, nics=None,
availability_zone=None,
block_device_mapping=None
)
def test_stack_get_by_name(self):
name = 'stack_get_by_name'
stack = self._setup_test_stack(name, UUID1,
stack_user_project_id=UUID2)[1]
st = db_api.stack_get_by_name(self.ctx, name)
self.assertEqual(UUID1, st.id)
self.ctx.tenant = UUID3
st = db_api.stack_get_by_name(self.ctx, name)
self.assertIsNone(st)
self.ctx.tenant = UUID2
st = db_api.stack_get_by_name(self.ctx, name)
self.assertEqual(UUID1, st.id)
stack.delete()
st = db_api.stack_get_by_name(self.ctx, name)
self.assertIsNone(st)
def test_stack_create_multiple(self):
name = 'stack_race'
stack = self._setup_test_stack(name, UUID1,
stack_user_project_id=UUID2)[1]
self.assertRaises(exception.StackExists,
self._setup_test_stack,
name, UUID2, stack_user_project_id=UUID2)
st = db_api.stack_get_by_name(self.ctx, name)
self.assertEqual(UUID1, st.id)
stack.delete()
self.assertIsNone(db_api.stack_get_by_name(self.ctx, name))
def test_nested_stack_get_by_name(self):
stack1 = self._setup_test_stack('neststack1', UUID1)[1]
stack2 = self._setup_test_stack('neststack2', UUID2,
owner_id=stack1.id)[1]
result = db_api.stack_get_by_name(self.ctx, 'neststack2')
self.assertEqual(UUID2, result.id)
stack2.delete()
result = db_api.stack_get_by_name(self.ctx, 'neststack2')
self.assertIsNone(result)
def test_stack_get_by_name_and_owner_id(self):
stack1 = self._setup_test_stack('ownstack1', UUID1,
stack_user_project_id=UUID3)[1]
stack2 = self._setup_test_stack('ownstack2', UUID2,
owner_id=stack1.id,
stack_user_project_id=UUID3)[1]
result = db_api.stack_get_by_name_and_owner_id(self.ctx, 'ownstack2',
None)
self.assertIsNone(result)
result = db_api.stack_get_by_name_and_owner_id(self.ctx, 'ownstack2',
stack1.id)
self.assertEqual(UUID2, result.id)
self.ctx.tenant = str(uuid.uuid4())
result = db_api.stack_get_by_name_and_owner_id(self.ctx, 'ownstack2',
None)
self.assertIsNone(result)
self.ctx.tenant = UUID3
result = db_api.stack_get_by_name_and_owner_id(self.ctx, 'ownstack2',
stack1.id)
self.assertEqual(UUID2, result.id)
stack2.delete()
result = db_api.stack_get_by_name_and_owner_id(self.ctx, 'ownstack2',
stack1.id)
self.assertIsNone(result)
def test_stack_get(self):
stack = self._setup_test_stack('stack_get', UUID1)[1]
st = db_api.stack_get(self.ctx, UUID1, show_deleted=False)
self.assertEqual(UUID1, st.id)
stack.delete()
st = db_api.stack_get(self.ctx, UUID1, show_deleted=False)
self.assertIsNone(st)
st = db_api.stack_get(self.ctx, UUID1, show_deleted=True)
self.assertEqual(UUID1, st.id)
def test_stack_get_status(self):
stack = self._setup_test_stack('stack_get_status', UUID1)[1]
st = db_api.stack_get_status(self.ctx, UUID1)
self.assertEqual(('CREATE', 'IN_PROGRESS', '', None), st)
stack.delete()
st = db_api.stack_get_status(self.ctx, UUID1)
self.assertEqual(
('DELETE', 'COMPLETE',
'Stack DELETE completed successfully', None),
st)
self.assertRaises(exception.NotFound,
db_api.stack_get_status, self.ctx, UUID2)
def test_stack_get_show_deleted_context(self):
stack = self._setup_test_stack('stack_get_deleted', UUID1)[1]
self.assertFalse(self.ctx.show_deleted)
st = db_api.stack_get(self.ctx, UUID1)
self.assertEqual(UUID1, st.id)
stack.delete()
st = db_api.stack_get(self.ctx, UUID1)
self.assertIsNone(st)
self.ctx.show_deleted = True
st = db_api.stack_get(self.ctx, UUID1)
self.assertEqual(UUID1, st.id)
def test_stack_get_all(self):
stacks = [self._setup_test_stack('stack_get_all_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(3, len(st_db))
stacks[0].delete()
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(2, len(st_db))
stacks[1].delete()
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(1, len(st_db))
def test_stack_get_all_show_deleted(self):
stacks = [self._setup_test_stack('stack_get_all_deleted_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(3, len(st_db))
stacks[0].delete()
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(2, len(st_db))
st_db = db_api.stack_get_all(self.ctx, show_deleted=True)
self.assertEqual(3, len(st_db))
def test_stack_get_all_show_nested(self):
stack1 = self._setup_test_stack('neststack_get_all_1', UUID1)[1]
stack2 = self._setup_test_stack('neststack_get_all_2', UUID2,
owner_id=stack1.id)[1]
# Backup stack should not be returned
stack3 = self._setup_test_stack('neststack_get_all_1*', UUID3,
owner_id=stack1.id,
backup=True)[1]
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(1, len(st_db))
self.assertEqual(stack1.id, st_db[0].id)
st_db = db_api.stack_get_all(self.ctx, show_nested=True)
self.assertEqual(2, len(st_db))
st_ids = [s.id for s in st_db]
self.assertNotIn(stack3.id, st_ids)
self.assertIn(stack1.id, st_ids)
self.assertIn(stack2.id, st_ids)
def test_stack_get_all_with_filters(self):
self._setup_test_stack('foo', UUID1)
self._setup_test_stack('baz', UUID2)
filters = {'name': 'foo'}
results = db_api.stack_get_all(self.ctx,
filters=filters)
self.assertEqual(1, len(results))
self.assertEqual('foo', results[0]['name'])
def test_stack_get_all_filter_matches_in_list(self):
self._setup_test_stack('wibble', UUID1)
self._setup_test_stack('bar', UUID2)
filters = {'name': ['bar', 'quux']}
results = db_api.stack_get_all(self.ctx,
filters=filters)
self.assertEqual(1, len(results))
self.assertEqual('bar', results[0]['name'])
def test_stack_get_all_returns_all_if_no_filters(self):
self._setup_test_stack('stack_get_all_no_filter1', UUID1)
self._setup_test_stack('stack_get_all_no_filter2', UUID2)
filters = None
results = db_api.stack_get_all(self.ctx,
filters=filters)
self.assertEqual(2, len(results))
def test_stack_get_all_default_sort_keys_and_dir(self):
stacks = [self._setup_test_stack('stacks_def_sort_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_get_all(self.ctx)
self.assertEqual(3, len(st_db))
self.assertEqual(stacks[2].id, st_db[0].id)
self.assertEqual(stacks[1].id, st_db[1].id)
self.assertEqual(stacks[0].id, st_db[2].id)
def test_stack_get_all_default_sort_dir(self):
stacks = [self._setup_test_stack('stacks_def_sort_dir_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_get_all(self.ctx, sort_dir='asc')
self.assertEqual(3, len(st_db))
self.assertEqual(stacks[0].id, st_db[0].id)
self.assertEqual(stacks[1].id, st_db[1].id)
self.assertEqual(stacks[2].id, st_db[2].id)
def test_stack_get_all_str_sort_keys(self):
stacks = [self._setup_test_stack('stacks_str_sort_keys_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_get_all(self.ctx,
sort_keys='creation_time')
self.assertEqual(3, len(st_db))
self.assertEqual(stacks[0].id, st_db[0].id)
self.assertEqual(stacks[1].id, st_db[1].id)
self.assertEqual(stacks[2].id, st_db[2].id)
@mock.patch.object(db_api.utils, 'paginate_query')
def test_stack_get_all_filters_sort_keys(self, mock_paginate):
sort_keys = ['stack_name', 'stack_status', 'creation_time',
'updated_time', 'stack_owner']
db_api.stack_get_all(self.ctx, sort_keys=sort_keys)
args = mock_paginate.call_args[0]
used_sort_keys = set(args[3])
expected_keys = set(['name', 'status', 'created_at',
'updated_at', 'id'])
self.assertEqual(expected_keys, used_sort_keys)
def test_stack_get_all_marker(self):
stacks = [self._setup_test_stack('stacks_marker_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_get_all(self.ctx, marker=stacks[1].id)
self.assertEqual(1, len(st_db))
self.assertEqual(stacks[0].id, st_db[0].id)
def test_stack_get_all_non_existing_marker(self):
[self._setup_test_stack('stacks_nonex_marker_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
uuid = 'this stack doesn\'t exist'
st_db = db_api.stack_get_all(self.ctx, marker=uuid)
self.assertEqual(3, len(st_db))
def test_stack_get_all_doesnt_mutate_sort_keys(self):
[self._setup_test_stack('stacks_sort_nomutate_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
sort_keys = ['id']
db_api.stack_get_all(self.ctx, sort_keys=sort_keys)
self.assertEqual(['id'], sort_keys)
def test_stack_get_all_hidden_tags(self):
cfg.CONF.set_override('hidden_stack_tags', ['hidden'])
stacks = [self._setup_test_stack('stacks_hidden_tags_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['hidden']
stacks[0].store()
stacks[1].tags = ['random']
stacks[1].store()
st_db = db_api.stack_get_all(self.ctx, show_hidden=True)
self.assertEqual(3, len(st_db))
st_db_visible = db_api.stack_get_all(self.ctx, show_hidden=False)
self.assertEqual(2, len(st_db_visible))
# Make sure the hidden stack isn't in the stacks returned by
# stack_get_all_visible()
for stack in st_db_visible:
self.assertNotEqual(stacks[0].id, stack.id)
def test_stack_get_all_by_tags(self):
stacks = [self._setup_test_stack('stacks_tags_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag1']
stacks[0].store()
stacks[1].tags = ['tag1', 'tag2']
stacks[1].store()
stacks[2].tags = ['tag1', 'tag2', 'tag3']
stacks[2].store()
st_db = db_api.stack_get_all(self.ctx, tags=['tag2'])
self.assertEqual(2, len(st_db))
st_db = db_api.stack_get_all(self.ctx, tags=['tag1', 'tag2'])
self.assertEqual(2, len(st_db))
st_db = db_api.stack_get_all(self.ctx, tags=['tag1', 'tag2', 'tag3'])
self.assertEqual(1, len(st_db))
def test_stack_get_all_by_tags_any(self):
stacks = [self._setup_test_stack('stacks_tags_any_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag2']
stacks[0].store()
stacks[1].tags = ['tag1', 'tag2']
stacks[1].store()
stacks[2].tags = ['tag1', 'tag3']
stacks[2].store()
st_db = db_api.stack_get_all(self.ctx, tags_any=['tag1'])
self.assertEqual(2, len(st_db))
st_db = db_api.stack_get_all(self.ctx, tags_any=['tag1', 'tag2',
'tag3'])
self.assertEqual(3, len(st_db))
def test_stack_get_all_by_not_tags(self):
stacks = [self._setup_test_stack('stacks_not_tags_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag1']
stacks[0].store()
stacks[1].tags = ['tag1', 'tag2']
stacks[1].store()
stacks[2].tags = ['tag1', 'tag2', 'tag3']
stacks[2].store()
st_db = db_api.stack_get_all(self.ctx, not_tags=['tag2'])
self.assertEqual(1, len(st_db))
st_db = db_api.stack_get_all(self.ctx, not_tags=['tag1', 'tag2'])
self.assertEqual(1, len(st_db))
st_db = db_api.stack_get_all(self.ctx, not_tags=['tag1', 'tag2',
'tag3'])
self.assertEqual(2, len(st_db))
def test_stack_get_all_by_not_tags_any(self):
stacks = [self._setup_test_stack('stacks_not_tags_any_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag2']
stacks[0].store()
stacks[1].tags = ['tag1', 'tag2']
stacks[1].store()
stacks[2].tags = ['tag1', 'tag3']
stacks[2].store()
st_db = db_api.stack_get_all(self.ctx, not_tags_any=['tag1'])
self.assertEqual(1, len(st_db))
st_db = db_api.stack_get_all(self.ctx, not_tags_any=['tag1', 'tag2',
'tag3'])
self.assertEqual(0, len(st_db))
def test_stack_get_all_by_tag_with_pagination(self):
stacks = [self._setup_test_stack('stacks_tag_page_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag1']
stacks[0].store()
stacks[1].tags = ['tag2']
stacks[1].store()
stacks[2].tags = ['tag1']
stacks[2].store()
st_db = db_api.stack_get_all(self.ctx, tags=['tag1'])
self.assertEqual(2, len(st_db))
st_db = db_api.stack_get_all(self.ctx, tags=['tag1'], limit=1)
self.assertEqual(1, len(st_db))
self.assertEqual(stacks[2].id, st_db[0].id)
st_db = db_api.stack_get_all(self.ctx, tags=['tag1'], limit=1,
marker=stacks[2].id)
self.assertEqual(1, len(st_db))
self.assertEqual(stacks[0].id, st_db[0].id)
def test_stack_get_all_by_tag_with_show_hidden(self):
cfg.CONF.set_override('hidden_stack_tags', ['hidden'])
stacks = [self._setup_test_stack('stacks_tag_hidden_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag1']
stacks[0].store()
stacks[1].tags = ['hidden', 'tag1']
stacks[1].store()
st_db = db_api.stack_get_all(self.ctx, tags=['tag1'],
show_hidden=True)
self.assertEqual(2, len(st_db))
st_db = db_api.stack_get_all(self.ctx, tags=['tag1'],
show_hidden=False)
self.assertEqual(1, len(st_db))
def test_stack_count_all(self):
stacks = [self._setup_test_stack('stacks_count_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
st_db = db_api.stack_count_all(self.ctx)
self.assertEqual(3, st_db)
stacks[0].delete()
st_db = db_api.stack_count_all(self.ctx)
self.assertEqual(2, st_db)
# show deleted
st_db = db_api.stack_count_all(self.ctx, show_deleted=True)
self.assertEqual(3, st_db)
stacks[1].delete()
st_db = db_api.stack_count_all(self.ctx)
self.assertEqual(1, st_db)
# show deleted
st_db = db_api.stack_count_all(self.ctx, show_deleted=True)
self.assertEqual(3, st_db)
def test_count_all_hidden_tags(self):
cfg.CONF.set_override('hidden_stack_tags', ['hidden'])
stacks = [self._setup_test_stack('stacks_count_hid_tag_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['hidden']
stacks[0].store()
stacks[1].tags = ['random']
stacks[1].store()
st_db = db_api.stack_count_all(self.ctx, show_hidden=True)
self.assertEqual(3, st_db)
st_db_visible = db_api.stack_count_all(self.ctx, show_hidden=False)
self.assertEqual(2, st_db_visible)
def test_count_all_by_tags(self):
stacks = [self._setup_test_stack('stacks_count_all_tag_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag1']
stacks[0].store()
stacks[1].tags = ['tag2']
stacks[1].store()
stacks[2].tags = ['tag2']
stacks[2].store()
st_db = db_api.stack_count_all(self.ctx, tags=['tag1'])
self.assertEqual(1, st_db)
st_db = db_api.stack_count_all(self.ctx, tags=['tag2'])
self.assertEqual(2, st_db)
def test_count_all_by_tag_with_show_hidden(self):
cfg.CONF.set_override('hidden_stack_tags', ['hidden'])
stacks = [self._setup_test_stack('stacks_count_all_tagsh_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
stacks[0].tags = ['tag1']
stacks[0].store()
stacks[1].tags = ['hidden', 'tag1']
stacks[1].store()
st_db = db_api.stack_count_all(self.ctx, tags=['tag1'],
show_hidden=True)
self.assertEqual(2, st_db)
st_db = db_api.stack_count_all(self.ctx, tags=['tag1'],
show_hidden=False)
self.assertEqual(1, st_db)
def test_stack_count_all_with_filters(self):
self._setup_test_stack('sca_foo', UUID1)
self._setup_test_stack('sca_bar', UUID2)
filters = {'name': 'sca_bar'}
st_db = db_api.stack_count_all(self.ctx, filters=filters)
self.assertEqual(1, st_db)
def test_stack_count_all_show_nested(self):
stack1 = self._setup_test_stack('stack1', UUID1)[1]
self._setup_test_stack('stack2', UUID2,
owner_id=stack1.id)
# Backup stack should not be counted
self._setup_test_stack('stack1*', UUID3,
owner_id=stack1.id,
backup=True)
st_db = db_api.stack_count_all(self.ctx)
self.assertEqual(1, st_db)
st_db = db_api.stack_count_all(self.ctx, show_nested=True)
self.assertEqual(2, st_db)
def test_event_get_all_by_stack(self):
stack = self._setup_test_stack('stack_events', UUID1)[1]
self._mock_create()
stack.create()
stack._persist_state()
events = db_api.event_get_all_by_stack(self.ctx, UUID1)
self.assertEqual(4, len(events))
# test filter by resource_status
filters = {'resource_status': 'COMPLETE'}
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
filters=filters)
self.assertEqual(2, len(events))
self.assertEqual('COMPLETE', events[0].resource_status)
self.assertEqual('COMPLETE', events[1].resource_status)
# test filter by resource_action
filters = {'resource_action': 'CREATE'}
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
filters=filters)
self.assertEqual(4, len(events))
self.assertEqual('CREATE', events[0].resource_action)
self.assertEqual('CREATE', events[1].resource_action)
self.assertEqual('CREATE', events[2].resource_action)
self.assertEqual('CREATE', events[3].resource_action)
# test filter by resource_type
filters = {'resource_type': 'AWS::EC2::Instance'}
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
filters=filters)
self.assertEqual(2, len(events))
self.assertEqual('AWS::EC2::Instance', events[0].resource_type)
self.assertEqual('AWS::EC2::Instance', events[1].resource_type)
filters = {'resource_type': 'OS::Nova::Server'}
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
filters=filters)
self.assertEqual(0, len(events))
# test limit and marker
events_all = db_api.event_get_all_by_stack(self.ctx, UUID1)
marker = events_all[0].uuid
expected = events_all[1].uuid
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
limit=1, marker=marker)
self.assertEqual(1, len(events))
self.assertEqual(expected, events[0].uuid)
self._mock_delete()
stack.delete()
# test filter by resource_status
filters = {'resource_status': 'COMPLETE'}
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
filters=filters)
self.assertEqual(4, len(events))
self.assertEqual('COMPLETE', events[0].resource_status)
self.assertEqual('COMPLETE', events[1].resource_status)
self.assertEqual('COMPLETE', events[2].resource_status)
self.assertEqual('COMPLETE', events[3].resource_status)
# test filter by resource_action
filters = {'resource_action': 'DELETE',
'resource_status': 'COMPLETE'}
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
filters=filters)
self.assertEqual(2, len(events))
self.assertEqual('DELETE', events[0].resource_action)
self.assertEqual('COMPLETE', events[0].resource_status)
self.assertEqual('DELETE', events[1].resource_action)
self.assertEqual('COMPLETE', events[1].resource_status)
# test limit and marker
events_all = db_api.event_get_all_by_stack(self.ctx, UUID1)
self.assertEqual(8, len(events_all))
marker = events_all[1].uuid
events2_uuid = events_all[2].uuid
events3_uuid = events_all[3].uuid
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
limit=1, marker=marker)
self.assertEqual(1, len(events))
self.assertEqual(events2_uuid, events[0].uuid)
events = db_api.event_get_all_by_stack(self.ctx, UUID1,
limit=2, marker=marker)
self.assertEqual(2, len(events))
self.assertEqual(events2_uuid, events[0].uuid)
self.assertEqual(events3_uuid, events[1].uuid)
self.fc.servers.create.assert_called_once_with(
image=744, flavor=3, key_name='test',
name=mock.ANY,
security_groups=None,
userdata=mock.ANY, scheduler_hints=None,
meta=None, nics=None,
availability_zone=None,
block_device_mapping=None
)
def test_event_count_all_by_stack(self):
stack = self._setup_test_stack('stack_event_count', UUID1)[1]
self._mock_create()
stack.create()
stack._persist_state()
num_events = db_api.event_count_all_by_stack(self.ctx, UUID1)
self.assertEqual(4, num_events)
self._mock_delete()
stack.delete()
num_events = db_api.event_count_all_by_stack(self.ctx, UUID1)
self.assertEqual(8, num_events)
self.fc.servers.create.assert_called_once_with(
image=744, flavor=3, key_name='test',
name=mock.ANY,
security_groups=None,
userdata=mock.ANY, scheduler_hints=None,
meta=None, nics=None,
availability_zone=None,
block_device_mapping=None
)
def test_event_get_all_by_tenant(self):
stacks = [self._setup_test_stack('stack_ev_ten_%d' % i, x)[1]
for i, x in enumerate(UUIDs)]
self._mock_create()
[s.create() for s in stacks]
[s._persist_state() for s in stacks]
events = db_api.event_get_all_by_tenant(self.ctx)
self.assertEqual(12, len(events))
self._mock_delete()
[s.delete() for s in stacks]
events = db_api.event_get_all_by_tenant(self.ctx)
self.assertEqual(0, len(events))
self.fc.servers.create.assert_called_with(
image=744, flavor=3, key_name='test',
name=mock.ANY,
security_groups=None,
userdata=mock.ANY, scheduler_hints=None,
meta=None, nics=None,
availability_zone=None,
block_device_mapping=None
)
self.assertEqual(len(stacks), self.fc.servers.create.call_count)
def test_user_creds_password(self):
self.ctx.password = 'password'
self.ctx.trust_id = None
self.ctx.region_name = 'RegionOne'
db_creds = db_api.user_creds_create(self.ctx)
load_creds = db_api.user_creds_get(self.ctx, db_creds['id'])
self.assertEqual('test_username', load_creds.get('username'))
self.assertEqual('password', load_creds.get('password'))
self.assertEqual('test_tenant', load_creds.get('tenant'))
self.assertEqual('test_tenant_id', load_creds.get('tenant_id'))
self.assertEqual('RegionOne', load_creds.get('region_name'))
self.assertIsNotNone(load_creds.get('created_at'))
self.assertIsNone(load_creds.get('updated_at'))
self.assertEqual('http://server.test:5000/v2.0',
load_creds.get('auth_url'))
self.assertIsNone(load_creds.get('trust_id'))
self.assertIsNone(load_creds.get('trustor_user_id'))
def test_user_creds_password_too_long(self):
self.ctx.trust_id = None
self.ctx.password = 'O123456789O1234567' * 20
error = self.assertRaises(exception.Error,
db_api.user_creds_create,
self.ctx)
self.assertIn('Length of OS_PASSWORD after encryption exceeds '
'Heat limit (255 chars)', str(error))
def test_user_creds_trust(self):
self.ctx.username = None
self.ctx.password = None
self.ctx.trust_id = 'atrust123'
self.ctx.trustor_user_id = 'atrustor123'
self.ctx.tenant = 'atenant123'
self.ctx.project_name = 'atenant'
self.ctx.auth_url = 'anauthurl'
self.ctx.region_name = 'aregion'
db_creds = db_api.user_creds_create(self.ctx)
load_creds = db_api.user_creds_get(self.ctx, db_creds['id'])
self.assertIsNone(load_creds.get('username'))
self.assertIsNone(load_creds.get('password'))
self.assertIsNotNone(load_creds.get('created_at'))
self.assertIsNone(load_creds.get('updated_at'))
self.assertEqual('anauthurl', load_creds.get('auth_url'))
self.assertEqual('aregion', load_creds.get('region_name'))
self.assertEqual('atenant123', load_creds.get('tenant_id'))
self.assertEqual('atenant', load_creds.get('tenant'))
self.assertEqual('atrust123', load_creds.get('trust_id'))
self.assertEqual('atrustor123', load_creds.get('trustor_user_id'))
def test_user_creds_none(self):
self.ctx.username = None
self.ctx.password = None
self.ctx.trust_id = None
self.ctx.region_name = None
db_creds = db_api.user_creds_create(self.ctx)
load_creds = db_api.user_creds_get(self.ctx, db_creds['id'])
self.assertIsNone(load_creds.get('username'))
self.assertIsNone(load_creds.get('password'))
self.assertIsNone(load_creds.get('trust_id'))
self.assertIsNone(load_creds.get('region_name'))
def test_software_config_create(self):
tenant_id = self.ctx.tenant_id
config = db_api.software_config_create(
self.ctx, {'name': 'config_mysql',
'tenant': tenant_id})
self.assertIsNotNone(config)
self.assertEqual('config_mysql', config.name)
self.assertEqual(tenant_id, config.tenant)
def test_software_config_get(self):
self.assertRaises(
exception.NotFound,
db_api.software_config_get,
self.ctx,
str(uuid.uuid4()))
conf = ('#!/bin/bash\n'
'echo "$bar and $foo"\n')
config = {
'inputs': [{'name': 'foo'}, {'name': 'bar'}],
'outputs': [{'name': 'result'}],
'config': conf,
'options': {}
}
tenant_id = self.ctx.tenant_id
values = {'name': 'config_mysql',
'tenant': tenant_id,
'group': 'Heat::Shell',
'config': config}
config = db_api.software_config_create(
self.ctx, values)
config_id = config.id
config = db_api.software_config_get(self.ctx, config_id)
self.assertIsNotNone(config)
self.assertEqual('config_mysql', config.name)
self.assertEqual(tenant_id, config.tenant)
self.assertEqual('Heat::Shell', config.group)
self.assertEqual(conf, config.config['config'])
self.ctx.tenant = None
self.assertRaises(
exception.NotFound,
db_api.software_config_get,
self.ctx,
config_id)
# admin can get the software_config
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
config = db_api.software_config_get(admin_ctx, config_id)
self.assertIsNotNone(config)
def _create_software_config_record(self):
tenant_id = self.ctx.tenant_id
software_config = db_api.software_config_create(
self.ctx, {'name': 'config_mysql',
'tenant': tenant_id})
self.assertIsNotNone(software_config)
return software_config.id
def _test_software_config_get_all(self, get_ctx=None):
self.assertEqual([], db_api.software_config_get_all(self.ctx))
scf_id = self._create_software_config_record()
software_configs = db_api.software_config_get_all(get_ctx or self.ctx)
self.assertEqual(1, len(software_configs))
self.assertEqual(scf_id, software_configs[0].id)
def test_software_config_get_all(self):
self._test_software_config_get_all()
def test_software_config_get_all_by_admin(self):
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
self._test_software_config_get_all(get_ctx=admin_ctx)
def test_software_config_delete(self):
scf_id = self._create_software_config_record()
cfg = db_api.software_config_get(self.ctx, scf_id)
self.assertIsNotNone(cfg)
db_api.software_config_delete(self.ctx, scf_id)
err = self.assertRaises(
exception.NotFound,
db_api.software_config_get,
self.ctx,
scf_id)
self.assertIn(scf_id, str(err))
err = self.assertRaises(
exception.NotFound, db_api.software_config_delete,
self.ctx, scf_id)
self.assertIn(scf_id, str(err))
def test_software_config_delete_by_admin(self):
scf_id = self._create_software_config_record()
cfg = db_api.software_config_get(self.ctx, scf_id)
self.assertIsNotNone(cfg)
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
db_api.software_config_delete(admin_ctx, scf_id)
def test_software_config_delete_not_allowed(self):
tenant_id = self.ctx.tenant_id
config = db_api.software_config_create(
self.ctx, {'name': 'config_mysql',
'tenant': tenant_id})
config_id = config.id
values = {
'tenant': tenant_id,
'stack_user_project_id': str(uuid.uuid4()),
'config_id': config_id,
'server_id': str(uuid.uuid4()),
}
db_api.software_deployment_create(self.ctx, values)
err = self.assertRaises(
exception.InvalidRestrictedAction, db_api.software_config_delete,
self.ctx, config_id)
msg = ("Software config with id %s can not be deleted as it is "
"referenced" % config_id)
self.assertIn(msg, str(err))
def _deployment_values(self):
tenant_id = self.ctx.tenant_id
stack_user_project_id = str(uuid.uuid4())
config_id = db_api.software_config_create(
self.ctx, {'name': 'config_mysql', 'tenant': tenant_id}).id
server_id = str(uuid.uuid4())
input_values = {'foo': 'fooooo', 'bar': 'baaaaa'}
values = {
'tenant': tenant_id,
'stack_user_project_id': stack_user_project_id,
'config_id': config_id,
'server_id': server_id,
'input_values': input_values
}
return values
def test_software_deployment_create(self):
values = self._deployment_values()
deployment = db_api.software_deployment_create(self.ctx, values)
self.assertIsNotNone(deployment)
self.assertEqual(values['tenant'], deployment.tenant)
def test_software_deployment_get(self):
self.assertRaises(
exception.NotFound,
db_api.software_deployment_get,
self.ctx,
str(uuid.uuid4()))
values = self._deployment_values()
deployment = db_api.software_deployment_create(self.ctx, values)
self.assertIsNotNone(deployment)
deployment_id = deployment.id
deployment = db_api.software_deployment_get(self.ctx, deployment_id)
self.assertIsNotNone(deployment)
self.assertEqual(values['tenant'], deployment.tenant)
self.assertEqual(values['config_id'], deployment.config_id)
self.assertEqual(values['server_id'], deployment.server_id)
self.assertEqual(values['input_values'], deployment.input_values)
self.assertEqual(
values['stack_user_project_id'], deployment.stack_user_project_id)
# assert not found with invalid context tenant
self.ctx.tenant = str(uuid.uuid4())
self.assertRaises(
exception.NotFound,
db_api.software_deployment_get,
self.ctx,
deployment_id)
# assert found with stack_user_project_id context tenant
self.ctx.tenant = deployment.stack_user_project_id
deployment = db_api.software_deployment_get(self.ctx, deployment_id)
self.assertIsNotNone(deployment)
self.assertEqual(values['tenant'], deployment.tenant)
# admin can get the deployments
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
deployment = db_api.software_deployment_get(admin_ctx, deployment_id)
self.assertIsNotNone(deployment)
def test_software_deployment_get_all(self):
self.assertEqual([], db_api.software_deployment_get_all(self.ctx))
values = self._deployment_values()
deployment = db_api.software_deployment_create(self.ctx, values)
self.assertIsNotNone(deployment)
deployments = db_api.software_deployment_get_all(self.ctx)
self.assertEqual(1, len(deployments))
self.assertEqual(deployment.id, deployments[0].id)
deployments = db_api.software_deployment_get_all(
self.ctx, server_id=values['server_id'])
self.assertEqual(1, len(deployments))
self.assertEqual(deployment.id, deployments[0].id)
deployments = db_api.software_deployment_get_all(
self.ctx, server_id=str(uuid.uuid4()))
self.assertEqual([], deployments)
# admin can get the deployments of other tenants
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
deployments = db_api.software_deployment_get_all(admin_ctx)
self.assertEqual(1, len(deployments))
def test_software_deployment_update(self):
deployment_id = str(uuid.uuid4())
err = self.assertRaises(exception.NotFound,
db_api.software_deployment_update,
self.ctx, deployment_id, values={})
self.assertIn(deployment_id, str(err))
values = self._deployment_values()
deployment = db_api.software_deployment_create(self.ctx, values)
deployment_id = deployment.id
values = {'status': 'COMPLETED'}
deployment = db_api.software_deployment_update(
self.ctx, deployment_id, values)
self.assertIsNotNone(deployment)
self.assertEqual(values['status'], deployment.status)
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
values = {'status': 'FAILED'}
deployment = db_api.software_deployment_update(
admin_ctx, deployment_id, values)
self.assertIsNotNone(deployment)
self.assertEqual(values['status'], deployment.status)
def _test_software_deployment_delete(self, test_ctx=None):
deployment_id = str(uuid.uuid4())
err = self.assertRaises(exception.NotFound,
db_api.software_deployment_delete,
self.ctx, deployment_id)
self.assertIn(deployment_id, str(err))
values = self._deployment_values()
deployment = db_api.software_deployment_create(self.ctx, values)
deployment_id = deployment.id
test_ctx = test_ctx or self.ctx
deployment = db_api.software_deployment_get(test_ctx, deployment_id)
self.assertIsNotNone(deployment)
db_api.software_deployment_delete(test_ctx, deployment_id)
err = self.assertRaises(
exception.NotFound,
db_api.software_deployment_get,
test_ctx,
deployment_id)
self.assertIn(deployment_id, str(err))
def test_software_deployment_delete(self):
self._test_software_deployment_delete()
def test_software_deployment_delete_by_admin(self):
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
self._test_software_deployment_delete(test_ctx=admin_ctx)
def test_snapshot_create(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
self.assertIsNotNone(snapshot)
self.assertEqual(values['tenant'], snapshot.tenant)
def test_snapshot_create_with_name(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id, 'name': 'snap1'}
snapshot = db_api.snapshot_create(self.ctx, values)
self.assertIsNotNone(snapshot)
self.assertEqual(values['tenant'], snapshot.tenant)
self.assertEqual('snap1', snapshot.name)
def test_snapshot_get_not_found(self):
self.assertRaises(
exception.NotFound,
db_api.snapshot_get,
self.ctx,
str(uuid.uuid4()))
def test_snapshot_get(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
self.assertIsNotNone(snapshot)
snapshot_id = snapshot.id
snapshot = db_api.snapshot_get(self.ctx, snapshot_id)
self.assertIsNotNone(snapshot)
self.assertEqual(values['tenant'], snapshot.tenant)
self.assertEqual(values['status'], snapshot.status)
self.assertIsNotNone(snapshot.created_at)
def test_snapshot_get_by_another_stack(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
stack1 = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
self.assertIsNotNone(snapshot)
snapshot_id = snapshot.id
self.assertRaises(exception.SnapshotNotFound,
db_api.snapshot_get_by_stack,
self.ctx, snapshot_id, stack1)
def test_snapshot_get_not_found_invalid_tenant(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
self.ctx.tenant = str(uuid.uuid4())
self.assertRaises(
exception.NotFound,
db_api.snapshot_get,
self.ctx,
snapshot.id)
def test_snapshot_update_not_found(self):
snapshot_id = str(uuid.uuid4())
err = self.assertRaises(exception.NotFound,
db_api.snapshot_update,
self.ctx, snapshot_id, values={})
self.assertIn(snapshot_id, str(err))
def test_snapshot_update(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
snapshot_id = snapshot.id
values = {'status': 'COMPLETED'}
snapshot = db_api.snapshot_update(self.ctx, snapshot_id, values)
self.assertIsNotNone(snapshot)
self.assertEqual(values['status'], snapshot.status)
def test_snapshot_delete_not_found(self):
snapshot_id = str(uuid.uuid4())
err = self.assertRaises(exception.NotFound,
db_api.snapshot_delete,
self.ctx, snapshot_id)
self.assertIn(snapshot_id, str(err))
def test_snapshot_delete(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
snapshot_id = snapshot.id
snapshot = db_api.snapshot_get(self.ctx, snapshot_id)
self.assertIsNotNone(snapshot)
db_api.snapshot_delete(self.ctx, snapshot_id)
err = self.assertRaises(
exception.NotFound,
db_api.snapshot_get,
self.ctx,
snapshot_id)
self.assertIn(snapshot_id, str(err))
def test_snapshot_get_all(self):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
values = {'tenant': self.ctx.tenant_id, 'status': 'IN_PROGRESS',
'stack_id': stack.id}
snapshot = db_api.snapshot_create(self.ctx, values)
self.assertIsNotNone(snapshot)
[snapshot] = db_api.snapshot_get_all(self.ctx, stack.id)
self.assertIsNotNone(snapshot)
self.assertEqual(values['tenant'], snapshot.tenant)
self.assertEqual(values['status'], snapshot.status)
self.assertIsNotNone(snapshot.created_at)
def create_raw_template(context, **kwargs):
t = template_format.parse(wp_template)
template = {
'template': t,
}
if 'files' not in kwargs and 'files_id' not in kwargs:
# modern raw_templates have associated raw_template_files DB obj
tf = template_files.TemplateFiles({'foo': 'bar'})
tf.store(context)
kwargs['files_id'] = tf.files_id
template.update(kwargs)
return db_api.raw_template_create(context, template)
def create_user_creds(ctx, **kwargs):
ctx_dict = ctx.to_dict()
ctx_dict.update(kwargs)
ctx = context.RequestContext.from_dict(ctx_dict)
return db_api.user_creds_create(ctx)
def create_stack(ctx, template, user_creds, **kwargs):
values = {
'name': short_id.generate_id(),
'raw_template_id': template.id,
'username': ctx.username,
'tenant': ctx.tenant_id,
'action': 'create',
'status': 'complete',
'status_reason': 'create_complete',
'parameters': {},
'user_creds_id': user_creds['id'],
'owner_id': None,
'backup': False,
'timeout': '60',
'disable_rollback': 0,
'current_traversal': 'dummy-uuid',
'prev_raw_template': None
}
values.update(kwargs)
if 'tenant' in kwargs:
ctx.tenant_id = kwargs['tenant']
return db_api.stack_create(ctx, values)
def create_resource(ctx, stack, legacy_prop_data=False, **kwargs):
phy_res_id = UUID1
if 'phys_res_id' in kwargs:
phy_res_id = kwargs.pop('phys_res_id')
if not legacy_prop_data:
rpd = db_api.resource_prop_data_create(ctx, {'data': {'foo1': 'bar1'},
'encrypted': False})
values = {
'name': 'test_resource_name',
'physical_resource_id': phy_res_id,
'action': 'create',
'status': 'complete',
'status_reason': 'create_complete',
'rsrc_metadata': json.loads('{"foo": "123"}'),
'stack_id': stack.id,
'atomic_key': 1,
}
if not legacy_prop_data:
values['rsrc_prop_data'] = rpd
else:
values['properties_data'] = {'foo1': 'bar1'}
values.update(kwargs)
return db_api.resource_create(ctx, values)
def create_resource_data(ctx, resource, **kwargs):
values = {
'key': 'test_resource_key',
'value': 'test_value',
'redact': 0,
}
values.update(kwargs)
return db_api.resource_data_set(ctx, resource.id, **values)
def create_resource_prop_data(ctx, **kwargs):
values = {
'data': {'foo1': 'bar1'},
'encrypted': False
}
values.update(kwargs)
return db_api.resource_prop_data_create(ctx, **values)
def create_event(ctx, legacy_prop_data=False, **kwargs):
if not legacy_prop_data:
rpd = db_api.resource_prop_data_create(ctx,
{'data': {'foo2': 'ev_bar'},
'encrypted': False})
values = {
'stack_id': 'test_stack_id',
'resource_action': 'create',
'resource_status': 'complete',
'resource_name': 'res',
'physical_resource_id': UUID1,
'resource_status_reason': "create_complete",
}
if not legacy_prop_data:
values['rsrc_prop_data'] = rpd
else:
values['resource_properties'] = {'foo2': 'ev_bar'}
values.update(kwargs)
return db_api.event_create(ctx, values)
def create_service(ctx, **kwargs):
values = {
'id': '7079762f-c863-4954-ba61-9dccb68c57e2',
'engine_id': 'f9aff81e-bc1f-4119-941d-ad1ea7f31d19',
'host': 'engine-1',
'hostname': 'host1.devstack.org',
'binary': 'heat-engine',
'topic': 'engine',
'report_interval': 60}
values.update(kwargs)
return db_api.service_create(ctx, values)
def create_sync_point(ctx, **kwargs):
values = {'entity_id': '0782c463-064a-468d-98fd-442efb638e3a',
'is_update': True,
'traversal_id': '899ff81e-fc1f-41f9-f41d-ad1ea7f31d19',
'atomic_key': 0,
'stack_id': 'f6359498-764b-49e7-a515-ad31cbef885b',
'input_data': {}}
values.update(kwargs)
return db_api.sync_point_create(ctx, values)
class DBAPIRawTemplateTest(common.HeatTestCase):
def setUp(self):
super(DBAPIRawTemplateTest, self).setUp()
self.ctx = utils.dummy_context()
def test_raw_template_create(self):
t = template_format.parse(wp_template)
tp = create_raw_template(self.ctx, template=t)
self.assertIsNotNone(tp.id)
self.assertEqual(t, tp.template)
def test_raw_template_get(self):
t = template_format.parse(wp_template)
tp = create_raw_template(self.ctx, template=t)
template = db_api.raw_template_get(self.ctx, tp.id)
self.assertEqual(tp.id, template.id)
self.assertEqual(tp.template, template.template)
def test_raw_template_update(self):
another_wp_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "WordPress",
"Parameters" : {
"KeyName" : {
"Description" : "KeyName",
"Type" : "String",
"Default" : "test"
}
},
"Resources" : {
"WebServer": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId" : "fedora-20.x86_64.qcow2",
"InstanceType" : "m1.xlarge",
"KeyName" : "test",
"UserData" : "wordpress"
}
}
}
}
'''
new_t = template_format.parse(another_wp_template)
new_files = {
'foo': 'bar',
'myfile': 'file:///home/somefile'
}
new_values = {
'template': new_t,
'files': new_files
}
orig_tp = create_raw_template(self.ctx)
updated_tp = db_api.raw_template_update(self.ctx,
orig_tp.id, new_values)
self.assertEqual(orig_tp.id, updated_tp.id)
self.assertEqual(new_t, updated_tp.template)
self.assertEqual(new_files, updated_tp.files)
def test_raw_template_delete(self):
t = template_format.parse(wp_template)
tp = create_raw_template(self.ctx, template=t)
db_api.raw_template_delete(self.ctx, tp.id)
self.assertRaises(exception.NotFound, db_api.raw_template_get,
self.ctx, tp.id)
class DBAPIUserCredsTest(common.HeatTestCase):
def setUp(self):
super(DBAPIUserCredsTest, self).setUp()
self.ctx = utils.dummy_context()
def test_user_creds_create_trust(self):
user_creds = create_user_creds(self.ctx, trust_id='test_trust_id',
trustor_user_id='trustor_id')
self.assertIsNotNone(user_creds['id'])
self.assertEqual('test_trust_id', user_creds['trust_id'])
self.assertEqual('trustor_id', user_creds['trustor_user_id'])
self.assertIsNone(user_creds['username'])
self.assertIsNone(user_creds['password'])
self.assertEqual(self.ctx.project_name, user_creds['tenant'])
self.assertEqual(self.ctx.tenant_id, user_creds['tenant_id'])
def test_user_creds_create_password(self):
user_creds = create_user_creds(self.ctx)
self.assertIsNotNone(user_creds['id'])
self.assertEqual(self.ctx.password, user_creds['password'])
def test_user_creds_get(self):
user_creds = create_user_creds(self.ctx)
ret_user_creds = db_api.user_creds_get(self.ctx, user_creds['id'])
self.assertEqual(user_creds['password'],
ret_user_creds['password'])
def test_user_creds_get_noexist(self):
self.assertIsNone(db_api.user_creds_get(self.ctx, 123456))
def test_user_creds_delete(self):
user_creds = create_user_creds(self.ctx)
self.assertIsNotNone(user_creds['id'])
db_api.user_creds_delete(self.ctx, user_creds['id'])
creds = db_api.user_creds_get(self.ctx, user_creds['id'])
self.assertIsNone(creds)
mock_delete = self.patchobject(session.Session, 'delete')
err = self.assertRaises(
exception.NotFound, db_api.user_creds_delete,
self.ctx, user_creds['id'])
exp_msg = ('Attempt to delete user creds with id '
'%s that does not exist' % user_creds['id'])
self.assertIn(exp_msg, str(err))
self.assertEqual(0, mock_delete.call_count)
def test_user_creds_delete_retries(self):
mock_delete = self.patchobject(session.Session, 'delete')
# returns StaleDataErrors, so we try delete 3 times
mock_delete.side_effect = [exc.StaleDataError,
exc.StaleDataError,
None]
user_creds = create_user_creds(self.ctx)
self.assertIsNotNone(user_creds['id'])
self.assertIsNone(
db_api.user_creds_delete(self.ctx, user_creds['id']))
self.assertEqual(3, mock_delete.call_count)
# returns other errors, so we try delete once
mock_delete.side_effect = [exc.UnmappedError]
self.assertRaises(exc.UnmappedError, db_api.user_creds_delete,
self.ctx, user_creds['id'])
self.assertEqual(4, mock_delete.call_count)
class DBAPIStackTagTest(common.HeatTestCase):
def setUp(self):
super(DBAPIStackTagTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
def test_stack_tags_set(self):
tags = db_api.stack_tags_set(self.ctx, self.stack.id, ['tag1', 'tag2'])
self.assertEqual(self.stack.id, tags[0].stack_id)
self.assertEqual('tag1', tags[0].tag)
tags = db_api.stack_tags_set(self.ctx, self.stack.id, [])
self.assertIsNone(tags)
def test_stack_tags_get(self):
db_api.stack_tags_set(self.ctx, self.stack.id, ['tag1', 'tag2'])
tags = db_api.stack_tags_get(self.ctx, self.stack.id)
self.assertEqual(self.stack.id, tags[0].stack_id)
self.assertEqual('tag1', tags[0].tag)
tags = db_api.stack_tags_get(self.ctx, UUID1)
self.assertIsNone(tags)
def test_stack_tags_delete(self):
db_api.stack_tags_set(self.ctx, self.stack.id, ['tag1', 'tag2'])
db_api.stack_tags_delete(self.ctx, self.stack.id)
tags = db_api.stack_tags_get(self.ctx, self.stack.id)
self.assertIsNone(tags)
class DBAPIStackTest(common.HeatTestCase):
def setUp(self):
super(DBAPIStackTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
def test_stack_create(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
self.assertIsNotNone(stack.id)
self.assertEqual(12, len(stack.name))
self.assertEqual(self.template.id, stack.raw_template_id)
self.assertEqual(self.ctx.username, stack.username)
self.assertEqual(self.ctx.tenant_id, stack.tenant)
self.assertEqual('create', stack.action)
self.assertEqual('complete', stack.status)
self.assertEqual('create_complete', stack.status_reason)
self.assertEqual({}, stack.parameters)
self.assertEqual(self.user_creds['id'], stack.user_creds_id)
self.assertIsNone(stack.owner_id)
self.assertEqual('60', stack.timeout)
self.assertFalse(stack.disable_rollback)
def test_stack_delete(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
stack_id = stack.id
resource = create_resource(self.ctx, stack)
db_api.stack_delete(self.ctx, stack_id)
self.assertIsNone(db_api.stack_get(self.ctx, stack_id,
show_deleted=False))
self.assertRaises(exception.NotFound, db_api.resource_get,
self.ctx, resource.id)
self.assertRaises(exception.NotFound, db_api.stack_delete,
self.ctx, stack_id)
# Testing soft delete
ret_stack = db_api.stack_get(self.ctx, stack_id, show_deleted=True)
self.assertIsNotNone(ret_stack)
self.assertEqual(stack_id, ret_stack.id)
self.assertEqual(12, len(ret_stack.name))
# Testing child resources deletion
self.assertRaises(exception.NotFound, db_api.resource_get,
self.ctx, resource.id)
def test_stack_update(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
values = {
'name': 'db_test_stack_name2',
'action': 'update',
'status': 'failed',
'status_reason': "update_failed",
'timeout': '90',
'current_traversal': 'another-dummy-uuid',
}
db_api.stack_update(self.ctx, stack.id, values)
stack = db_api.stack_get(self.ctx, stack.id)
self.assertEqual('db_test_stack_name2', stack.name)
self.assertEqual('update', stack.action)
self.assertEqual('failed', stack.status)
self.assertEqual('update_failed', stack.status_reason)
self.assertEqual(90, stack.timeout)
self.assertEqual('another-dummy-uuid', stack.current_traversal)
self.assertRaises(exception.NotFound, db_api.stack_update, self.ctx,
UUID2, values)
def test_stack_update_matches_traversal_id(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
values = {
'current_traversal': 'another-dummy-uuid',
}
updated = db_api.stack_update(self.ctx, stack.id, values,
exp_trvsl='dummy-uuid')
self.assertTrue(updated)
stack = db_api.stack_get(self.ctx, stack.id)
self.assertEqual('another-dummy-uuid', stack.current_traversal)
# test update fails when expected traversal is not matched
matching_uuid = 'another-dummy-uuid'
updated = db_api.stack_update(self.ctx, stack.id, values,
exp_trvsl=matching_uuid)
self.assertTrue(updated)
diff_uuid = 'some-other-dummy-uuid'
updated = db_api.stack_update(self.ctx, stack.id, values,
exp_trvsl=diff_uuid)
self.assertFalse(updated)
@mock.patch.object(time, 'sleep')
def test_stack_update_retries_on_deadlock(self, sleep):
stack = create_stack(self.ctx, self.template, self.user_creds)
with mock.patch('sqlalchemy.orm.query.Query.update',
side_effect=db_exception.DBDeadlock) as mock_update:
self.assertRaises(db_exception.DBDeadlock,
db_api.stack_update, self.ctx, stack.id, {})
self.assertEqual(21, mock_update.call_count)
def test_stack_set_status_release_lock(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
values = {
'name': 'db_test_stack_name2',
'action': 'update',
'status': 'failed',
'status_reason': "update_failed",
'timeout': '90',
'current_traversal': 'another-dummy-uuid',
}
db_api.stack_lock_create(self.ctx, stack.id, UUID1)
observed = db_api.persist_state_and_release_lock(self.ctx, stack.id,
UUID1, values)
self.assertIsNone(observed)
stack = db_api.stack_get(self.ctx, stack.id)
self.assertEqual('db_test_stack_name2', stack.name)
self.assertEqual('update', stack.action)
self.assertEqual('failed', stack.status)
self.assertEqual('update_failed', stack.status_reason)
self.assertEqual(90, stack.timeout)
self.assertEqual('another-dummy-uuid', stack.current_traversal)
def test_stack_set_status_release_lock_failed(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
values = {
'name': 'db_test_stack_name2',
'action': 'update',
'status': 'failed',
'status_reason': "update_failed",
'timeout': '90',
'current_traversal': 'another-dummy-uuid',
}
db_api.stack_lock_create(self.ctx, stack.id, UUID2)
observed = db_api.persist_state_and_release_lock(self.ctx, stack.id,
UUID1, values)
self.assertTrue(observed)
def test_stack_set_status_failed_release_lock(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
values = {
'name': 'db_test_stack_name2',
'action': 'update',
'status': 'failed',
'status_reason': "update_failed",
'timeout': '90',
'current_traversal': 'another-dummy-uuid',
}
db_api.stack_lock_create(self.ctx, stack.id, UUID1)
observed = db_api.persist_state_and_release_lock(self.ctx, UUID2,
UUID1, values)
self.assertTrue(observed)
def test_stack_get_returns_a_stack(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
ret_stack = db_api.stack_get(self.ctx, stack.id, show_deleted=False)
self.assertIsNotNone(ret_stack)
self.assertEqual(stack.id, ret_stack.id)
self.assertEqual(12, len(ret_stack.name))
def test_stack_get_returns_none_if_stack_does_not_exist(self):
stack = db_api.stack_get(self.ctx, UUID1, show_deleted=False)
self.assertIsNone(stack)
def test_stack_get_returns_none_if_tenant_id_does_not_match(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
self.ctx.tenant = 'abc'
stack = db_api.stack_get(self.ctx, UUID1, show_deleted=False)
self.assertIsNone(stack)
def test_stack_get_tenant_is_stack_user_project_id(self):
stack = create_stack(self.ctx, self.template, self.user_creds,
stack_user_project_id='astackuserproject')
self.ctx.tenant = 'astackuserproject'
ret_stack = db_api.stack_get(self.ctx, stack.id, show_deleted=False)
self.assertIsNotNone(ret_stack)
self.assertEqual(stack.id, ret_stack.id)
self.assertEqual(12, len(ret_stack.name))
def test_stack_get_can_return_a_stack_from_different_tenant(self):
# create a stack with the common tenant
stack = create_stack(self.ctx, self.template, self.user_creds)
# admin context can get the stack
admin_ctx = utils.dummy_context(user='admin_username',
tenant_id='admin_tenant',
is_admin=True)
ret_stack = db_api.stack_get(admin_ctx, stack.id,
show_deleted=False)
self.assertEqual(stack.id, ret_stack.id)
self.assertEqual(12, len(ret_stack.name))
def test_stack_get_by_name(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
ret_stack = db_api.stack_get_by_name(self.ctx, stack.name)
self.assertIsNotNone(ret_stack)
self.assertEqual(stack.id, ret_stack.id)
self.assertEqual(12, len(ret_stack.name))
self.assertIsNone(db_api.stack_get_by_name(self.ctx, 'abc'))
self.ctx.tenant = 'abc'
self.assertIsNone(db_api.stack_get_by_name(self.ctx, 'abc'))
def test_stack_get_all(self):
values = [
{'name': 'stack1'},
{'name': 'stack2'},
{'name': 'stack3'},
{'name': 'stack4'}
]
[create_stack(self.ctx, self.template, self.user_creds,
**val) for val in values]
ret_stacks = db_api.stack_get_all(self.ctx)
self.assertEqual(4, len(ret_stacks))
names = [ret_stack.name for ret_stack in ret_stacks]
[self.assertIn(val['name'], names) for val in values]
def test_stack_get_all_by_owner_id(self):
parent_stack1 = create_stack(self.ctx, self.template, self.user_creds)
parent_stack2 = create_stack(self.ctx, self.template, self.user_creds)
values = [
{'owner_id': parent_stack1.id},
{'owner_id': parent_stack1.id},
{'owner_id': parent_stack2.id},
{'owner_id': parent_stack2.id},
]
[create_stack(self.ctx, self.template, self.user_creds,
**val) for val in values]
stack1_children = db_api.stack_get_all_by_owner_id(self.ctx,
parent_stack1.id)
self.assertEqual(2, len(stack1_children))
stack2_children = db_api.stack_get_all_by_owner_id(self.ctx,
parent_stack2.id)
self.assertEqual(2, len(stack2_children))
def test_stack_get_all_by_root_owner_id(self):
parent_stack1 = create_stack(self.ctx, self.template, self.user_creds)
parent_stack2 = create_stack(self.ctx, self.template, self.user_creds)
for i in range(3):
lvl1_st = create_stack(self.ctx, self.template, self.user_creds,
owner_id=parent_stack1.id)
for j in range(2):
create_stack(self.ctx, self.template, self.user_creds,
owner_id=lvl1_st.id)
for i in range(2):
lvl1_st = create_stack(self.ctx, self.template, self.user_creds,
owner_id=parent_stack2.id)
for j in range(4):
lvl2_st = create_stack(self.ctx, self.template,
self.user_creds, owner_id=lvl1_st.id)
for k in range(3):
create_stack(self.ctx, self.template,
self.user_creds, owner_id=lvl2_st.id)
stack1_children = db_api.stack_get_all_by_root_owner_id(
self.ctx,
parent_stack1.id)
# 3 stacks on the first level + 6 stack on the second
self.assertEqual(9, len(list(stack1_children)))
stack2_children = db_api.stack_get_all_by_root_owner_id(
self.ctx,
parent_stack2.id)
# 2 + 8 + 24
self.assertEqual(34, len(list(stack2_children)))
def test_stack_get_all_with_regular_tenant(self):
values = [
{'tenant': UUID1},
{'tenant': UUID1},
{'tenant': UUID2},
{'tenant': UUID2},
{'tenant': UUID2},
]
[create_stack(self.ctx, self.template, self.user_creds,
**val) for val in values]
self.ctx.tenant = UUID1
stacks = db_api.stack_get_all(self.ctx)
self.assertEqual(2, len(stacks))
self.ctx.tenant = UUID2
stacks = db_api.stack_get_all(self.ctx)
self.assertEqual(3, len(stacks))
self.ctx.tenant = UUID3
self.assertEqual([], db_api.stack_get_all(self.ctx))
def test_stack_get_all_with_admin_context(self):
values = [
{'tenant': UUID1},
{'tenant': UUID1},
{'tenant': UUID2},
{'tenant': UUID2},
{'tenant': UUID2},
]
[create_stack(self.ctx, self.template, self.user_creds,
**val) for val in values]
admin_ctx = utils.dummy_context(user='admin_user',
tenant_id='admin_tenant',
is_admin=True)
stacks = db_api.stack_get_all(admin_ctx)
self.assertEqual(5, len(stacks))
def test_stack_count_all_with_regular_tenant(self):
values = [
{'tenant': UUID1},
{'tenant': UUID1},
{'tenant': UUID2},
{'tenant': UUID2},
{'tenant': UUID2},
]
[create_stack(self.ctx, self.template, self.user_creds,
**val) for val in values]
self.ctx.tenant = UUID1
self.assertEqual(2, db_api.stack_count_all(self.ctx))
self.ctx.tenant = UUID2
self.assertEqual(3, db_api.stack_count_all(self.ctx))
def test_stack_count_all_with_admin_context(self):
values = [
{'tenant': UUID1},
{'tenant': UUID1},
{'tenant': UUID2},
{'tenant': UUID2},
{'tenant': UUID2},
]
[create_stack(self.ctx, self.template, self.user_creds,
**val) for val in values]
admin_ctx = utils.dummy_context(user='admin_user',
tenant_id='admin_tenant',
is_admin=True)
self.assertEqual(5, db_api.stack_count_all(admin_ctx))
def test_purge_deleted(self):
now = timeutils.utcnow()
delta = datetime.timedelta(seconds=3600 * 7)
deleted = [now - delta * i for i in range(1, 6)]
tmpl_files = [template_files.TemplateFiles(
{'foo': 'file contents %d' % i}) for i in range(5)]
[tmpl_file.store(self.ctx) for tmpl_file in tmpl_files]
templates = [create_raw_template(self.ctx,
files_id=tmpl_files[i].files_id
) for i in range(5)]
creds = [create_user_creds(self.ctx) for i in range(5)]
stacks = [create_stack(self.ctx, templates[i], creds[i],
deleted_at=deleted[i]) for i in range(5)]
resources = [create_resource(self.ctx, stacks[i]) for i in range(5)]
events = [create_event(self.ctx, stack_id=stacks[i].id)
for i in range(5)]
db_api.purge_deleted(age=1, granularity='days')
admin_ctx = utils.dummy_context(is_admin=True)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2), (3, 4))
db_api.purge_deleted(age=22, granularity='hours')
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2), (3, 4))
db_api.purge_deleted(age=1100, granularity='minutes')
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1), (2, 3, 4))
db_api.purge_deleted(age=3600, granularity='seconds')
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (), (0, 1, 2, 3, 4))
# test wrong age
self.assertRaises(exception.Error, db_api.purge_deleted, -1, 'seconds')
def test_purge_project_deleted(self):
now = timeutils.utcnow()
delta = datetime.timedelta(seconds=3600 * 7)
deleted = [now - delta * i for i in range(1, 6)]
tmpl_files = [template_files.TemplateFiles(
{'foo': 'file contents %d' % i}) for i in range(5)]
[tmpl_file.store(self.ctx) for tmpl_file in tmpl_files]
templates = [create_raw_template(self.ctx,
files_id=tmpl_files[i].files_id
) for i in range(5)]
values = [
{'tenant': UUID1},
{'tenant': UUID1},
{'tenant': UUID1},
{'tenant': UUID2},
{'tenant': UUID2},
]
creds = [create_user_creds(self.ctx) for i in range(5)]
stacks = [create_stack(self.ctx, templates[i], creds[i],
deleted_at=deleted[i], **values[i]
) for i in range(5)]
resources = [create_resource(self.ctx, stacks[i]) for i in range(5)]
events = [create_event(self.ctx, stack_id=stacks[i].id)
for i in range(5)]
db_api.purge_deleted(age=1, granularity='days', project_id=UUID1)
admin_ctx = utils.dummy_context(is_admin=True)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2, 3, 4), ())
db_api.purge_deleted(age=22, granularity='hours', project_id=UUID1)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2, 3, 4), ())
db_api.purge_deleted(age=1100, granularity='minutes', project_id=UUID1)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 3, 4), (2,))
db_api.purge_deleted(age=30, granularity='hours', project_id=UUID2)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 3), (2, 4))
db_api.purge_deleted(age=3600, granularity='seconds', project_id=UUID1)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (3,), (0, 1, 2, 4))
db_api.purge_deleted(age=3600, granularity='seconds', project_id=UUID2)
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (), (0, 1, 2, 3, 4))
def test_purge_deleted_prev_raw_template(self):
now = timeutils.utcnow()
templates = [create_raw_template(self.ctx) for i in range(2)]
stacks = [create_stack(self.ctx, templates[0],
create_user_creds(self.ctx),
deleted_at=now - datetime.timedelta(seconds=10),
prev_raw_template=templates[1])]
db_api.purge_deleted(age=3600, granularity='seconds')
ctx = utils.dummy_context(is_admin=True)
self.assertIsNotNone(db_api.stack_get(ctx, stacks[0].id,
show_deleted=True))
self.assertIsNotNone(db_api.raw_template_get(ctx, templates[1].id))
stacks = [create_stack(self.ctx, templates[0],
create_user_creds(self.ctx),
deleted_at=now - datetime.timedelta(seconds=10),
prev_raw_template=templates[1],
tenant=UUID1)]
db_api.purge_deleted(age=3600, granularity='seconds', project_id=UUID1)
self.assertIsNotNone(db_api.stack_get(ctx, stacks[0].id,
show_deleted=True))
self.assertIsNotNone(db_api.raw_template_get(ctx, templates[1].id))
db_api.purge_deleted(age=0, granularity='seconds', project_id=UUID2)
self.assertIsNotNone(db_api.stack_get(ctx, stacks[0].id,
show_deleted=True))
self.assertIsNotNone(db_api.raw_template_get(ctx, templates[1].id))
def test_dont_purge_shared_raw_template_files(self):
now = timeutils.utcnow()
delta = datetime.timedelta(seconds=3600 * 7)
deleted = [now - delta * i for i in range(1, 6)]
# the last two template_files are identical to first two
# (so should not be purged)
tmpl_files = [template_files.TemplateFiles(
{'foo': 'more file contents'}) for i in range(3)]
[tmpl_file.store(self.ctx) for tmpl_file in tmpl_files]
templates = [create_raw_template(self.ctx,
files_id=tmpl_files[i % 3].files_id
) for i in range(5)]
creds = [create_user_creds(self.ctx) for i in range(5)]
[create_stack(self.ctx, templates[i], creds[i],
deleted_at=deleted[i]) for i in range(5)]
db_api.purge_deleted(age=15, granularity='hours')
# The third raw_template_files object should be purged (along
# with the last three stacks/templates). However, the other
# two are shared with existing templates, so should not be
# purged.
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[0].files_id))
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[1].files_id))
self.assertRaises(exception.NotFound,
db_api.raw_template_files_get,
self.ctx, tmpl_files[2].files_id)
def test_dont_purge_project_shared_raw_template_files(self):
now = timeutils.utcnow()
delta = datetime.timedelta(seconds=3600 * 7)
deleted = [now - delta * i for i in range(1, 6)]
# the last two template_files are identical to first two
# (so should not be purged)
tmpl_files = [template_files.TemplateFiles(
{'foo': 'more file contents'}) for i in range(3)]
[tmpl_file.store(self.ctx) for tmpl_file in tmpl_files]
templates = [create_raw_template(self.ctx,
files_id=tmpl_files[i % 3].files_id
) for i in range(5)]
creds = [create_user_creds(self.ctx) for i in range(5)]
[create_stack(self.ctx, templates[i], creds[i],
deleted_at=deleted[i], tenant=UUID1
) for i in range(5)]
db_api.purge_deleted(age=0, granularity='seconds', project_id=UUID3)
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[0].files_id))
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[1].files_id))
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[2].files_id))
db_api.purge_deleted(age=15, granularity='hours', project_id=UUID1)
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[0].files_id))
self.assertIsNotNone(db_api.raw_template_files_get(
self.ctx, tmpl_files[1].files_id))
self.assertRaises(exception.NotFound,
db_api.raw_template_files_get,
self.ctx, tmpl_files[2].files_id)
def _deleted_stack_existance(self, ctx, stacks, resources, events,
tmpl_files, existing, deleted):
for s in existing:
self.assertIsNotNone(db_api.stack_get(ctx, stacks[s].id,
show_deleted=True))
self.assertIsNotNone(db_api.raw_template_files_get(
ctx, tmpl_files[s].files_id))
self.assertIsNotNone(db_api.resource_get(
ctx, resources[s].id))
self.assertIsNotNone(ctx.session.query(
models.Event).get(events[s].id))
self.assertIsNotNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=resources[s].rsrc_prop_data.id).first())
self.assertIsNotNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=events[s].rsrc_prop_data.id).first())
for s in deleted:
self.assertIsNone(db_api.stack_get(ctx, stacks[s].id,
show_deleted=True))
rt_id = stacks[s].raw_template_id
self.assertRaises(exception.NotFound,
db_api.raw_template_get, ctx, rt_id)
self.assertEqual({}, db_api.resource_get_all_by_stack(
ctx, stacks[s].id))
self.assertRaises(exception.NotFound,
db_api.raw_template_files_get,
ctx, tmpl_files[s].files_id)
self.assertEqual([],
db_api.event_get_all_by_stack(ctx,
stacks[s].id))
self.assertIsNone(ctx.session.query(
models.Event).get(events[s].id))
self.assertIsNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=resources[s].rsrc_prop_data.id).first())
self.assertIsNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=events[s].rsrc_prop_data.id).first())
self.assertEqual([],
db_api.event_get_all_by_stack(ctx,
stacks[s].id))
self.assertIsNone(db_api.user_creds_get(
self.ctx, stacks[s].user_creds_id))
def test_purge_deleted_batch_arg(self):
now = timeutils.utcnow()
delta = datetime.timedelta(seconds=3600)
deleted = now - delta
for i in range(7):
create_stack(self.ctx, self.template, self.user_creds,
deleted_at=deleted)
with mock.patch('heat.db.sqlalchemy.api._purge_stacks') as mock_ps:
db_api.purge_deleted(age=0, batch_size=2)
self.assertEqual(4, mock_ps.call_count)
def test_stack_get_root_id(self):
root = create_stack(self.ctx, self.template, self.user_creds,
name='root stack')
child_1 = create_stack(self.ctx, self.template, self.user_creds,
name='child 1 stack', owner_id=root.id)
child_2 = create_stack(self.ctx, self.template, self.user_creds,
name='child 2 stack', owner_id=child_1.id)
child_3 = create_stack(self.ctx, self.template, self.user_creds,
name='child 3 stack', owner_id=child_2.id)
self.assertEqual(root.id, db_api.stack_get_root_id(
self.ctx, child_3.id))
self.assertEqual(root.id, db_api.stack_get_root_id(
self.ctx, child_2.id))
self.assertEqual(root.id, db_api.stack_get_root_id(
self.ctx, root.id))
self.assertEqual(root.id, db_api.stack_get_root_id(
self.ctx, child_1.id))
self.assertIsNone(db_api.stack_get_root_id(
self.ctx, 'non existent stack'))
def test_stack_count_total_resources(self):
def add_resources(stack, count, root_stack_id):
for i in range(count):
create_resource(
self.ctx,
stack,
False,
name='%s-%s' % (stack.name, i),
root_stack_id=root_stack_id
)
root = create_stack(self.ctx, self.template, self.user_creds,
name='root stack')
# stack with 3 children
s_1 = create_stack(self.ctx, self.template, self.user_creds,
name='s_1', owner_id=root.id)
s_1_1 = create_stack(self.ctx, self.template, self.user_creds,
name='s_1_1', owner_id=s_1.id)
s_1_2 = create_stack(self.ctx, self.template, self.user_creds,
name='s_1_2', owner_id=s_1.id)
s_1_3 = create_stack(self.ctx, self.template, self.user_creds,
name='s_1_3', owner_id=s_1.id)
# stacks 4 ancestors deep
s_2 = create_stack(self.ctx, self.template, self.user_creds,
name='s_2', owner_id=root.id)
s_2_1 = create_stack(self.ctx, self.template, self.user_creds,
name='s_2_1', owner_id=s_2.id)
s_2_1_1 = create_stack(self.ctx, self.template, self.user_creds,
name='s_2_1_1', owner_id=s_2_1.id)
s_2_1_1_1 = create_stack(self.ctx, self.template, self.user_creds,
name='s_2_1_1_1', owner_id=s_2_1_1.id)
s_3 = create_stack(self.ctx, self.template, self.user_creds,
name='s_3', owner_id=root.id)
s_4 = create_stack(self.ctx, self.template, self.user_creds,
name='s_4', owner_id=root.id)
add_resources(root, 3, root.id)
add_resources(s_1, 2, root.id)
add_resources(s_1_1, 4, root.id)
add_resources(s_1_2, 5, root.id)
add_resources(s_1_3, 6, root.id)
add_resources(s_2, 1, root.id)
add_resources(s_2_1_1_1, 1, root.id)
add_resources(s_3, 4, root.id)
self.assertEqual(26, db_api.stack_count_total_resources(
self.ctx, root.id))
self.assertEqual(0, db_api.stack_count_total_resources(
self.ctx, s_4.id))
self.assertEqual(0, db_api.stack_count_total_resources(
self.ctx, 'asdf'))
self.assertEqual(0, db_api.stack_count_total_resources(
self.ctx, None))
class DBAPIResourceTest(common.HeatTestCase):
def setUp(self):
super(DBAPIResourceTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
def test_resource_create(self):
res = create_resource(self.ctx, self.stack)
ret_res = db_api.resource_get(self.ctx, res.id)
self.assertIsNotNone(ret_res)
self.assertEqual('test_resource_name', ret_res.name)
self.assertEqual(UUID1, ret_res.physical_resource_id)
self.assertEqual('create', ret_res.action)
self.assertEqual('complete', ret_res.status)
self.assertEqual('create_complete', ret_res.status_reason)
self.assertEqual('{"foo": "123"}', json.dumps(ret_res.rsrc_metadata))
self.assertEqual(self.stack.id, ret_res.stack_id)
def test_resource_get(self):
res = create_resource(self.ctx, self.stack)
ret_res = db_api.resource_get(self.ctx, res.id)
self.assertIsNotNone(ret_res)
self.assertRaises(exception.NotFound, db_api.resource_get,
self.ctx, UUID2)
def test_resource_get_by_name_and_stack(self):
create_resource(self.ctx, self.stack)
ret_res = db_api.resource_get_by_name_and_stack(self.ctx,
'test_resource_name',
self.stack.id)
self.assertIsNotNone(ret_res)
self.assertEqual('test_resource_name', ret_res.name)
self.assertEqual(self.stack.id, ret_res.stack_id)
self.assertIsNone(db_api.resource_get_by_name_and_stack(self.ctx,
'abc',
self.stack.id))
def test_resource_get_by_physical_resource_id(self):
create_resource(self.ctx, self.stack)
ret_res = db_api.resource_get_by_physical_resource_id(self.ctx, UUID1)
self.assertIsNotNone(ret_res)
self.assertEqual(UUID1, ret_res.physical_resource_id)
self.assertIsNone(db_api.resource_get_by_physical_resource_id(self.ctx,
UUID2))
def test_resource_get_all_by_physical_resource_id(self):
create_resource(self.ctx, self.stack)
create_resource(self.ctx, self.stack)
ret_res = db_api.resource_get_all_by_physical_resource_id(self.ctx,
UUID1)
ret_list = list(ret_res)
self.assertEqual(2, len(ret_list))
for res in ret_list:
self.assertEqual(UUID1, res.physical_resource_id)
mt = db_api.resource_get_all_by_physical_resource_id(self.ctx, UUID2)
self.assertFalse(list(mt))
def test_resource_get_all_by_with_admin_context(self):
admin_ctx = utils.dummy_context(is_admin=True,
tenant_id='admin_tenant')
create_resource(self.ctx, self.stack, phys_res_id=UUID1)
create_resource(self.ctx, self.stack, phys_res_id=UUID2)
ret_res = db_api.resource_get_all_by_physical_resource_id(admin_ctx,
UUID1)
ret_list = list(ret_res)
self.assertEqual(1, len(ret_list))
self.assertEqual(UUID1, ret_list[0].physical_resource_id)
mt = db_api.resource_get_all_by_physical_resource_id(admin_ctx, UUID2)
ret_list = list(mt)
self.assertEqual(1, len(ret_list))
self.assertEqual(UUID2, ret_list[0].physical_resource_id)
def test_resource_get_all(self):
values = [
{'name': 'res1'},
{'name': 'res2'},
{'name': 'res3'},
]
[create_resource(self.ctx, self.stack, False, **val)
for val in values]
resources = db_api.resource_get_all(self.ctx)
self.assertEqual(3, len(resources))
names = [resource.name for resource in resources]
[self.assertIn(val['name'], names) for val in values]
def test_resource_get_all_by_stack(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds)
self.stack2 = create_stack(self.ctx, self.template, self.user_creds)
values = [
{'name': 'res1', 'stack_id': self.stack.id},
{'name': 'res2', 'stack_id': self.stack.id},
{'name': 'res3', 'stack_id': self.stack.id},
{'name': 'res4', 'stack_id': self.stack1.id},
]
[create_resource(self.ctx, self.stack, False, **val)
for val in values]
# Test for all resources in a stack
resources = db_api.resource_get_all_by_stack(self.ctx, self.stack.id)
self.assertEqual(3, len(resources))
self.assertEqual('res1', resources.get('res1').name)
self.assertEqual('res2', resources.get('res2').name)
self.assertEqual('res3', resources.get('res3').name)
# Test for resources matching single entry
resources = db_api.resource_get_all_by_stack(self.ctx,
self.stack.id,
filters=dict(name='res1'))
self.assertEqual(1, len(resources))
self.assertEqual('res1', resources.get('res1').name)
# Test for resources matching multi entry
resources = db_api.resource_get_all_by_stack(self.ctx,
self.stack.id,
filters=dict(name=[
'res1',
'res2'
]))
self.assertEqual(2, len(resources))
self.assertEqual('res1', resources.get('res1').name)
self.assertEqual('res2', resources.get('res2').name)
self.assertEqual({}, db_api.resource_get_all_by_stack(
self.ctx, self.stack2.id))
def test_resource_get_all_active_by_stack(self):
values = [
{'name': 'res1', 'action': rsrc.Resource.DELETE,
'status': rsrc.Resource.COMPLETE},
{'name': 'res2', 'action': rsrc.Resource.DELETE,
'status': rsrc.Resource.IN_PROGRESS},
{'name': 'res3', 'action': rsrc.Resource.UPDATE,
'status': rsrc.Resource.IN_PROGRESS},
{'name': 'res4', 'action': rsrc.Resource.UPDATE,
'status': rsrc.Resource.COMPLETE},
{'name': 'res5', 'action': rsrc.Resource.INIT,
'status': rsrc.Resource.COMPLETE},
{'name': 'res6'},
]
[create_resource(self.ctx, self.stack, **val) for val in values]
resources = db_api.resource_get_all_active_by_stack(self.ctx,
self.stack.id)
self.assertEqual(5, len(resources))
for rsrc_id, res in resources.items():
self.assertIn(res.name, ['res2', 'res3', 'res4', 'res5', 'res6'])
def test_resource_get_all_by_root_stack(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds)
self.stack2 = create_stack(self.ctx, self.template, self.user_creds)
create_resource(self.ctx, self.stack, name='res1',
root_stack_id=self.stack.id)
create_resource(self.ctx, self.stack, name='res2',
root_stack_id=self.stack.id)
create_resource(self.ctx, self.stack, name='res3',
root_stack_id=self.stack.id)
create_resource(self.ctx, self.stack1, name='res4',
root_stack_id=self.stack.id)
# Test for all resources in a stack
resources = db_api.resource_get_all_by_root_stack(
self.ctx, self.stack.id)
self.assertEqual(4, len(resources))
resource_names = [r.name for r in resources.values()]
self.assertEqual(['res1', 'res2', 'res3', 'res4'],
sorted(resource_names))
# Test for resources matching single entry
resources = db_api.resource_get_all_by_root_stack(
self.ctx, self.stack.id, filters=dict(name='res1'))
self.assertEqual(1, len(resources))
resource_names = [r.name for r in resources.values()]
self.assertEqual(['res1'], resource_names)
self.assertEqual(1, len(resources))
# Test for resources matching multi entry
resources = db_api.resource_get_all_by_root_stack(
self.ctx, self.stack.id, filters=dict(name=[
'res1',
'res2'
])
)
self.assertEqual(2, len(resources))
resource_names = [r.name for r in resources.values()]
self.assertEqual(['res1', 'res2'],
sorted(resource_names))
self.assertEqual({}, db_api.resource_get_all_by_root_stack(
self.ctx, self.stack2.id))
def test_resource_purge_deleted_by_stack(self):
val = {'name': 'res1', 'action': rsrc.Resource.DELETE,
'status': rsrc.Resource.COMPLETE}
resource = create_resource(self.ctx, self.stack, **val)
db_api.resource_purge_deleted(self.ctx, self.stack.id)
self.assertRaises(exception.NotFound, db_api.resource_get,
self.ctx, resource.id)
@mock.patch.object(time, 'sleep')
def test_resource_purge_deleted_by_stack_retry_on_deadlock(self, m_sleep):
val = {'name': 'res1', 'action': rsrc.Resource.DELETE,
'status': rsrc.Resource.COMPLETE}
create_resource(self.ctx, self.stack, **val)
with mock.patch('sqlalchemy.orm.query.Query.delete',
side_effect=db_exception.DBDeadlock) as mock_delete:
self.assertRaises(db_exception.DBDeadlock,
db_api.resource_purge_deleted,
self.ctx, self.stack.id)
self.assertEqual(21, mock_delete.call_count)
def test_engine_get_all_locked_by_stack(self):
values = [
{'name': 'res1', 'action': rsrc.Resource.DELETE,
'root_stack_id': self.stack.id,
'status': rsrc.Resource.COMPLETE},
{'name': 'res2', 'action': rsrc.Resource.DELETE,
'root_stack_id': self.stack.id,
'status': rsrc.Resource.IN_PROGRESS, 'engine_id': 'engine-001'},
{'name': 'res3', 'action': rsrc.Resource.UPDATE,
'root_stack_id': self.stack.id,
'status': rsrc.Resource.IN_PROGRESS, 'engine_id': 'engine-002'},
{'name': 'res4', 'action': rsrc.Resource.CREATE,
'root_stack_id': self.stack.id,
'status': rsrc.Resource.COMPLETE},
{'name': 'res5', 'action': rsrc.Resource.INIT,
'root_stack_id': self.stack.id,
'status': rsrc.Resource.COMPLETE},
{'name': 'res6', 'action': rsrc.Resource.CREATE,
'root_stack_id': self.stack.id,
'status': rsrc.Resource.IN_PROGRESS, 'engine_id': 'engine-001'},
{'name': 'res6'},
]
for val in values:
create_resource(self.ctx, self.stack, **val)
engines = db_api.engine_get_all_locked_by_stack(self.ctx,
self.stack.id)
self.assertEqual({'engine-001', 'engine-002'}, engines)
class DBAPIResourceReplacementTest(common.HeatTestCase):
def setUp(self):
self.useFixture(utils.ForeignKeyConstraintFixture())
super(DBAPIResourceReplacementTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
def test_resource_create_replacement(self):
orig = create_resource(self.ctx, self.stack)
tmpl_id = create_raw_template(self.ctx).id
repl = db_api.resource_create_replacement(
self.ctx,
orig.id,
{'name': orig.name, 'replaces': orig.id,
'stack_id': orig.stack_id, 'current_template_id': tmpl_id},
1, None)
self.assertIsNotNone(repl)
self.assertEqual(orig.name, repl.name)
self.assertNotEqual(orig.id, repl.id)
self.assertEqual(orig.id, repl.replaces)
def test_resource_create_replacement_template_gone(self):
orig = create_resource(self.ctx, self.stack)
other_ctx = utils.dummy_context()
tmpl_id = create_raw_template(self.ctx).id
db_api.raw_template_delete(other_ctx, tmpl_id)
repl = db_api.resource_create_replacement(
self.ctx,
orig.id,
{'name': orig.name, 'replaces': orig.id,
'stack_id': orig.stack_id, 'current_template_id': tmpl_id},
1, None)
self.assertIsNone(repl)
def test_resource_create_replacement_updated(self):
orig = create_resource(self.ctx, self.stack)
other_ctx = utils.dummy_context()
tmpl_id = create_raw_template(self.ctx).id
db_api.resource_update_and_save(other_ctx, orig.id, {'atomic_key': 2})
self.assertRaises(exception.UpdateInProgress,
db_api.resource_create_replacement,
self.ctx,
orig.id,
{'name': orig.name, 'replaces': orig.id,
'stack_id': orig.stack_id,
'current_template_id': tmpl_id},
1, None)
def test_resource_create_replacement_updated_concurrent(self):
orig = create_resource(self.ctx, self.stack)
other_ctx = utils.dummy_context()
tmpl_id = create_raw_template(self.ctx).id
def update_atomic_key(*args, **kwargs):
db_api.resource_update_and_save(other_ctx, orig.id,
{'atomic_key': 2})
self.patchobject(db_api, '_try_resource_update',
new=mock.Mock(wraps=db_api._try_resource_update,
side_effect=update_atomic_key))
self.assertRaises(exception.UpdateInProgress,
db_api.resource_create_replacement,
self.ctx,
orig.id,
{'name': orig.name, 'replaces': orig.id,
'stack_id': orig.stack_id,
'current_template_id': tmpl_id},
1, None)
def test_resource_create_replacement_locked(self):
orig = create_resource(self.ctx, self.stack)
other_ctx = utils.dummy_context()
tmpl_id = create_raw_template(self.ctx).id
db_api.resource_update_and_save(other_ctx, orig.id, {'engine_id': 'a',
'atomic_key': 2})
self.assertRaises(exception.UpdateInProgress,
db_api.resource_create_replacement,
self.ctx,
orig.id,
{'name': orig.name, 'replaces': orig.id,
'stack_id': orig.stack_id,
'current_template_id': tmpl_id},
1, None)
def test_resource_create_replacement_locked_concurrent(self):
orig = create_resource(self.ctx, self.stack)
other_ctx = utils.dummy_context()
tmpl_id = create_raw_template(self.ctx).id
def lock_resource(*args, **kwargs):
db_api.resource_update_and_save(other_ctx, orig.id,
{'engine_id': 'a',
'atomic_key': 2})
self.patchobject(db_api, '_try_resource_update',
new=mock.Mock(wraps=db_api._try_resource_update,
side_effect=lock_resource))
self.assertRaises(exception.UpdateInProgress,
db_api.resource_create_replacement,
self.ctx,
orig.id,
{'name': orig.name, 'replaces': orig.id,
'stack_id': orig.stack_id,
'current_template_id': tmpl_id},
1, None)
class DBAPIStackLockTest(common.HeatTestCase):
def setUp(self):
super(DBAPIStackLockTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
def test_stack_lock_create_success(self):
observed = db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
self.assertIsNone(observed)
def test_stack_lock_create_fail_double_same(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
self.assertEqual(UUID1, observed)
def test_stack_lock_create_fail_double_different(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_create(self.ctx, self.stack.id, UUID2)
self.assertEqual(UUID1, observed)
def test_stack_lock_get_id_success(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_get_engine_id(self.ctx, self.stack.id)
self.assertEqual(UUID1, observed)
def test_stack_lock_get_id_return_none(self):
observed = db_api.stack_lock_get_engine_id(self.ctx, self.stack.id)
self.assertIsNone(observed)
def test_stack_lock_steal_success(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_steal(self.ctx, self.stack.id,
UUID1, UUID2)
self.assertIsNone(observed)
def test_stack_lock_steal_fail_gone(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
db_api.stack_lock_release(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_steal(self.ctx, self.stack.id,
UUID1, UUID2)
self.assertTrue(observed)
def test_stack_lock_steal_fail_stolen(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
# Simulate stolen lock
db_api.stack_lock_release(self.ctx, self.stack.id, UUID1)
db_api.stack_lock_create(self.ctx, self.stack.id, UUID2)
observed = db_api.stack_lock_steal(self.ctx, self.stack.id,
UUID3, UUID2)
self.assertEqual(UUID2, observed)
def test_stack_lock_release_success(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_release(self.ctx, self.stack.id, UUID1)
self.assertIsNone(observed)
def test_stack_lock_release_fail_double(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
db_api.stack_lock_release(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_release(self.ctx, self.stack.id, UUID1)
self.assertTrue(observed)
def test_stack_lock_release_fail_wrong_engine_id(self):
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
observed = db_api.stack_lock_release(self.ctx, self.stack.id, UUID2)
self.assertTrue(observed)
@mock.patch.object(time, 'sleep')
def test_stack_lock_retry_on_deadlock(self, sleep):
with mock.patch('sqlalchemy.orm.Session.add',
side_effect=db_exception.DBDeadlock) as mock_add:
self.assertRaises(db_exception.DBDeadlock,
db_api.stack_lock_create, self.ctx,
self.stack.id, UUID1)
self.assertEqual(4, mock_add.call_count)
class DBAPIResourceDataTest(common.HeatTestCase):
def setUp(self):
super(DBAPIResourceDataTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
self.resource = create_resource(self.ctx, self.stack)
self.resource.context = self.ctx
def test_resource_data_set_get(self):
create_resource_data(self.ctx, self.resource)
val = db_api.resource_data_get(
self.ctx, self.resource.id, 'test_resource_key')
self.assertEqual('test_value', val)
# Updating existing resource data
create_resource_data(self.ctx, self.resource, value='foo')
val = db_api.resource_data_get(
self.ctx, self.resource.id, 'test_resource_key')
self.assertEqual('foo', val)
# Testing with encrypted value
create_resource_data(self.ctx, self.resource,
key='encryped_resource_key', redact=True)
val = db_api.resource_data_get(
self.ctx, self.resource.id, 'encryped_resource_key')
self.assertEqual('test_value', val)
# get all by querying for data
vals = db_api.resource_data_get_all(self.resource.context,
self.resource.id)
self.assertEqual(2, len(vals))
self.assertEqual('foo', vals.get('test_resource_key'))
self.assertEqual('test_value', vals.get('encryped_resource_key'))
# get all by using associated resource data
self.resource = db_api.resource_get(self.ctx, self.resource.id)
vals = db_api.resource_data_get_all(self.ctx, None, self.resource.data)
self.assertEqual(2, len(vals))
self.assertEqual('foo', vals.get('test_resource_key'))
self.assertEqual('test_value', vals.get('encryped_resource_key'))
def test_resource_data_delete(self):
create_resource_data(self.ctx, self.resource)
res_data = db_api.resource_data_get_by_key(self.ctx, self.resource.id,
'test_resource_key')
self.assertIsNotNone(res_data)
self.assertEqual('test_value', res_data.value)
db_api.resource_data_delete(self.ctx, self.resource.id,
'test_resource_key')
self.assertRaises(exception.NotFound, db_api.resource_data_get_by_key,
self.ctx, self.resource.id, 'test_resource_key')
self.assertIsNotNone(res_data)
self.assertRaises(exception.NotFound, db_api.resource_data_get_all,
self.resource.context,
self.resource.id)
class DBAPIEventTest(common.HeatTestCase):
def setUp(self):
super(DBAPIEventTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
def test_event_create(self):
stack = create_stack(self.ctx, self.template, self.user_creds)
event = create_event(self.ctx, stack_id=stack.id)
ret_event = self.ctx.session.query(models.Event).get(event.id)
self.assertIsNotNone(ret_event)
self.assertEqual(stack.id, ret_event.stack_id)
self.assertEqual('create', ret_event.resource_action)
self.assertEqual('complete', ret_event.resource_status)
self.assertEqual('res', ret_event.resource_name)
self.assertEqual(UUID1, ret_event.physical_resource_id)
self.assertEqual('create_complete', ret_event.resource_status_reason)
self.assertEqual({'foo2': 'ev_bar'}, ret_event.rsrc_prop_data.data)
def test_event_get_all_by_tenant(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds,
tenant='tenant1')
self.stack2 = create_stack(self.ctx, self.template, self.user_creds,
tenant='tenant2')
values = [
{'stack_id': self.stack1.id, 'resource_name': 'res1'},
{'stack_id': self.stack1.id, 'resource_name': 'res2'},
{'stack_id': self.stack2.id, 'resource_name': 'res3'},
]
[create_event(self.ctx, **val) for val in values]
self.ctx.tenant = 'tenant1'
events = db_api.event_get_all_by_tenant(self.ctx)
self.assertEqual(2, len(events))
marker = events[0].uuid
expected = events[1].uuid
events = db_api.event_get_all_by_tenant(self.ctx,
marker=marker)
self.assertEqual(1, len(events))
self.assertEqual(expected, events[0].uuid)
events = db_api.event_get_all_by_tenant(self.ctx, limit=1)
self.assertEqual(1, len(events))
filters = {'resource_name': 'res2'}
events = db_api.event_get_all_by_tenant(self.ctx,
filters=filters)
self.assertEqual(1, len(events))
self.assertEqual('res2', events[0].resource_name)
sort_keys = 'resource_type'
events = db_api.event_get_all_by_tenant(self.ctx,
sort_keys=sort_keys)
self.assertEqual(2, len(events))
self.ctx.tenant = 'tenant2'
events = db_api.event_get_all_by_tenant(self.ctx)
self.assertEqual(1, len(events))
def test_event_get_all_by_stack(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds)
self.stack2 = create_stack(self.ctx, self.template, self.user_creds)
values = [
{'stack_id': self.stack1.id, 'resource_name': 'res1'},
{'stack_id': self.stack1.id, 'resource_name': 'res2'},
{'stack_id': self.stack2.id, 'resource_name': 'res3'},
]
[create_event(self.ctx, **val) for val in values]
self.ctx.tenant = 'tenant1'
events = db_api.event_get_all_by_stack(self.ctx, self.stack1.id)
self.assertEqual(2, len(events))
self.ctx.tenant = 'tenant2'
events = db_api.event_get_all_by_stack(self.ctx, self.stack2.id)
self.assertEqual(1, len(events))
def test_event_count_all_by_stack(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds)
self.stack2 = create_stack(self.ctx, self.template, self.user_creds)
values = [
{'stack_id': self.stack1.id, 'resource_name': 'res1'},
{'stack_id': self.stack1.id, 'resource_name': 'res2'},
{'stack_id': self.stack2.id, 'resource_name': 'res3'},
]
[create_event(self.ctx, **val) for val in values]
self.assertEqual(2, db_api.event_count_all_by_stack(self.ctx,
self.stack1.id))
self.assertEqual(1, db_api.event_count_all_by_stack(self.ctx,
self.stack2.id))
class DBAPIServiceTest(common.HeatTestCase):
def setUp(self):
super(DBAPIServiceTest, self).setUp()
self.ctx = utils.dummy_context()
def test_service_create_get(self):
service = create_service(self.ctx)
ret_service = db_api.service_get(self.ctx, service.id)
self.assertIsNotNone(ret_service)
self.assertEqual(service.id, ret_service.id)
self.assertEqual(service.hostname, ret_service.hostname)
self.assertEqual(service.binary, ret_service.binary)
self.assertEqual(service.host, ret_service.host)
self.assertEqual(service.topic, ret_service.topic)
self.assertEqual(service.engine_id, ret_service.engine_id)
self.assertEqual(service.report_interval, ret_service.report_interval)
self.assertIsNotNone(service.created_at)
self.assertIsNone(service.updated_at)
self.assertIsNone(service.deleted_at)
def test_service_get_all_by_args(self):
# Host-1
values = [{'id': str(uuid.uuid4()),
'hostname': 'host-1',
'host': 'engine-1'}]
# Host-2
for i in [0, 1, 2]:
values.append({'id': str(uuid.uuid4()),
'hostname': 'host-2',
'host': 'engine-%s' % i})
[create_service(self.ctx, **val) for val in values]
services = db_api.service_get_all(self.ctx)
self.assertEqual(4, len(services))
services_by_args = db_api.service_get_all_by_args(self.ctx,
hostname='host-2',
binary='heat-engine',
host='engine-0')
self.assertEqual(1, len(services_by_args))
self.assertEqual('host-2', services_by_args[0].hostname)
self.assertEqual('heat-engine', services_by_args[0].binary)
self.assertEqual('engine-0', services_by_args[0].host)
def test_service_update(self):
service = create_service(self.ctx)
values = {'hostname': 'host-updated',
'host': 'engine-updated',
'retry_interval': 120}
service = db_api.service_update(self.ctx, service.id, values)
self.assertEqual('host-updated', service.hostname)
self.assertEqual(120, service.retry_interval)
self.assertEqual('engine-updated', service.host)
# simple update, expected the updated_at is updated
old_updated_date = service.updated_at
service = db_api.service_update(self.ctx, service.id, dict())
self.assertGreater(service.updated_at, old_updated_date)
def test_service_delete_soft_delete(self):
service = create_service(self.ctx)
# Soft delete
db_api.service_delete(self.ctx, service.id)
ret_service = db_api.service_get(self.ctx, service.id)
self.assertEqual(ret_service.id, service.id)
# Delete
db_api.service_delete(self.ctx, service.id, False)
ex = self.assertRaises(exception.EntityNotFound, db_api.service_get,
self.ctx, service.id)
self.assertEqual('Service', ex.kwargs.get('entity'))
class DBAPIResourceUpdateTest(common.HeatTestCase):
def setUp(self):
super(DBAPIResourceUpdateTest, self).setUp()
self.ctx = utils.dummy_context()
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
self.resource = create_resource(self.ctx, stack, False,
atomic_key=0)
def test_unlocked_resource_update(self):
values = {'engine_id': 'engine-1',
'action': 'CREATE',
'status': 'IN_PROGRESS'}
db_res = db_api.resource_get(self.ctx, self.resource.id)
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, None)
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-1', db_res.engine_id)
self.assertEqual('CREATE', db_res.action)
self.assertEqual('IN_PROGRESS', db_res.status)
self.assertEqual(1, db_res.atomic_key)
def test_locked_resource_update_by_same_engine(self):
values = {'engine_id': 'engine-1',
'action': 'CREATE',
'status': 'IN_PROGRESS'}
db_res = db_api.resource_get(self.ctx, self.resource.id)
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, None)
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-1', db_res.engine_id)
self.assertEqual(1, db_res.atomic_key)
values = {'engine_id': 'engine-1',
'action': 'CREATE',
'status': 'FAILED'}
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, 'engine-1')
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-1', db_res.engine_id)
self.assertEqual('CREATE', db_res.action)
self.assertEqual('FAILED', db_res.status)
self.assertEqual(2, db_res.atomic_key)
def test_locked_resource_update_by_other_engine(self):
values = {'engine_id': 'engine-1',
'action': 'CREATE',
'status': 'IN_PROGRESS'}
db_res = db_api.resource_get(self.ctx, self.resource.id)
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, None)
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-1', db_res.engine_id)
self.assertEqual(1, db_res.atomic_key)
values = {'engine_id': 'engine-2',
'action': 'CREATE',
'status': 'FAILED'}
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, 'engine-2')
self.assertFalse(ret)
def test_release_resource_lock(self):
values = {'engine_id': 'engine-1',
'action': 'CREATE',
'status': 'IN_PROGRESS'}
db_res = db_api.resource_get(self.ctx, self.resource.id)
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, None)
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-1', db_res.engine_id)
self.assertEqual(1, db_res.atomic_key)
# Set engine id as None to release the lock
values = {'engine_id': None,
'action': 'CREATE',
'status': 'COMPLETE'}
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, 'engine-1')
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertIsNone(db_res.engine_id)
self.assertEqual('CREATE', db_res.action)
self.assertEqual('COMPLETE', db_res.status)
self.assertEqual(2, db_res.atomic_key)
def test_steal_resource_lock(self):
values = {'engine_id': 'engine-1',
'action': 'CREATE',
'status': 'IN_PROGRESS'}
db_res = db_api.resource_get(self.ctx, self.resource.id)
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, None)
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-1', db_res.engine_id)
self.assertEqual(1, db_res.atomic_key)
# Set engine id as engine-2 and pass expected engine id as old engine
# i.e engine-1 in DB api steal the lock
values = {'engine_id': 'engine-2',
'action': 'DELETE',
'status': 'IN_PROGRESS'}
ret = db_api.resource_update(self.ctx, self.resource.id,
values, db_res.atomic_key, 'engine-1')
self.assertTrue(ret)
db_res = db_api.resource_get(self.ctx, self.resource.id, refresh=True)
self.assertEqual('engine-2', db_res.engine_id)
self.assertEqual('DELETE', db_res.action)
self.assertEqual(2, db_res.atomic_key)
class DBAPISyncPointTest(common.HeatTestCase):
def setUp(self):
super(DBAPISyncPointTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
self.resources = [create_resource(self.ctx, self.stack, name='res1'),
create_resource(self.ctx, self.stack, name='res2'),
create_resource(self.ctx, self.stack, name='res3')]
def test_sync_point_create_get(self):
for res in self.resources:
# create sync_point for resources and verify
sync_point_rsrc = create_sync_point(
self.ctx, entity_id=str(res.id), stack_id=self.stack.id,
traversal_id=self.stack.current_traversal
)
ret_sync_point_rsrc = db_api.sync_point_get(
self.ctx, sync_point_rsrc.entity_id,
sync_point_rsrc.traversal_id, sync_point_rsrc.is_update
)
self.assertIsNotNone(ret_sync_point_rsrc)
self.assertEqual(sync_point_rsrc.entity_id,
ret_sync_point_rsrc.entity_id)
self.assertEqual(sync_point_rsrc.traversal_id,
ret_sync_point_rsrc.traversal_id)
self.assertEqual(sync_point_rsrc.is_update,
ret_sync_point_rsrc.is_update)
self.assertEqual(sync_point_rsrc.atomic_key,
ret_sync_point_rsrc.atomic_key)
self.assertEqual(sync_point_rsrc.stack_id,
ret_sync_point_rsrc.stack_id)
self.assertEqual(sync_point_rsrc.input_data,
ret_sync_point_rsrc.input_data)
# Finally create sync_point for stack and verify
sync_point_stack = create_sync_point(
self.ctx, entity_id=self.stack.id, stack_id=self.stack.id,
traversal_id=self.stack.current_traversal
)
ret_sync_point_stack = db_api.sync_point_get(
self.ctx, sync_point_stack.entity_id,
sync_point_stack.traversal_id, sync_point_stack.is_update
)
self.assertIsNotNone(ret_sync_point_stack)
self.assertEqual(sync_point_stack.entity_id,
ret_sync_point_stack.entity_id)
self.assertEqual(sync_point_stack.traversal_id,
ret_sync_point_stack.traversal_id)
self.assertEqual(sync_point_stack.is_update,
ret_sync_point_stack.is_update)
self.assertEqual(sync_point_stack.atomic_key,
ret_sync_point_stack.atomic_key)
self.assertEqual(sync_point_stack.stack_id,
ret_sync_point_stack.stack_id)
self.assertEqual(sync_point_stack.input_data,
ret_sync_point_stack.input_data)
def test_sync_point_update(self):
sync_point = create_sync_point(
self.ctx, entity_id=str(self.resources[0].id),
stack_id=self.stack.id, traversal_id=self.stack.current_traversal
)
self.assertEqual({}, sync_point.input_data)
self.assertEqual(0, sync_point.atomic_key)
# first update
rows_updated = db_api.sync_point_update_input_data(
self.ctx, sync_point.entity_id, sync_point.traversal_id,
sync_point.is_update, sync_point.atomic_key,
{'input_data': '{key: value}'}
)
self.assertEqual(1, rows_updated)
ret_sync_point = db_api.sync_point_get(self.ctx,
sync_point.entity_id,
sync_point.traversal_id,
sync_point.is_update)
self.assertIsNotNone(ret_sync_point)
# check if atomic_key was incremented on write
self.assertEqual(1, ret_sync_point.atomic_key)
self.assertEqual({'input_data': '{key: value}'},
ret_sync_point.input_data)
# second update
rows_updated = db_api.sync_point_update_input_data(
self.ctx, ret_sync_point.entity_id, ret_sync_point.traversal_id,
ret_sync_point.is_update, ret_sync_point.atomic_key,
{'input_data': '{key1: value1}'}
)
self.assertEqual(1, rows_updated)
ret_sync_point = db_api.sync_point_get(self.ctx,
sync_point.entity_id,
sync_point.traversal_id,
sync_point.is_update)
self.assertIsNotNone(ret_sync_point)
# check if atomic_key was incremented on write
self.assertEqual(2, ret_sync_point.atomic_key)
self.assertEqual({'input_data': '{key1: value1}'},
ret_sync_point.input_data)
def test_sync_point_concurrent_update(self):
sync_point = create_sync_point(
self.ctx, entity_id=str(self.resources[0].id),
stack_id=self.stack.id, traversal_id=self.stack.current_traversal
)
self.assertEqual({}, sync_point.input_data)
self.assertEqual(0, sync_point.atomic_key)
# update where atomic_key is 0 and succeeds.
rows_updated = db_api.sync_point_update_input_data(
self.ctx, sync_point.entity_id, sync_point.traversal_id,
sync_point.is_update, 0, {'input_data': '{key: value}'}
)
self.assertEqual(1, rows_updated)
# another update where atomic_key is 0 and does not update.
rows_updated = db_api.sync_point_update_input_data(
self.ctx, sync_point.entity_id, sync_point.traversal_id,
sync_point.is_update, 0, {'input_data': '{key: value}'}
)
self.assertEqual(0, rows_updated)
def test_sync_point_delete(self):
for res in self.resources:
sync_point_rsrc = create_sync_point(
self.ctx, entity_id=str(res.id), stack_id=self.stack.id,
traversal_id=self.stack.current_traversal
)
self.assertIsNotNone(sync_point_rsrc)
sync_point_stack = create_sync_point(
self.ctx, entity_id=self.stack.id,
stack_id=self.stack.id,
traversal_id=self.stack.current_traversal
)
self.assertIsNotNone(sync_point_stack)
rows_deleted = db_api.sync_point_delete_all_by_stack_and_traversal(
self.ctx, self.stack.id,
self.stack.current_traversal
)
self.assertGreater(rows_deleted, 0)
self.assertEqual(4, rows_deleted)
# Additionally check if sync_point_get returns None.
for res in self.resources:
ret_sync_point_rsrc = db_api.sync_point_get(
self.ctx, str(res.id), self.stack.current_traversal, True
)
self.assertIsNone(ret_sync_point_rsrc)
ret_sync_point_stack = db_api.sync_point_get(
self.ctx, self.stack.id, self.stack.current_traversal, True
)
self.assertIsNone(ret_sync_point_stack)
@mock.patch.object(time, 'sleep')
def test_syncpoint_create_deadlock(self, sleep):
with mock.patch('sqlalchemy.orm.Session.add',
side_effect=db_exception.DBDeadlock) as add:
for res in self.resources:
self.assertRaises(db_exception.DBDeadlock,
create_sync_point,
self.ctx, entity_id=str(res.id),
stack_id=self.stack.id,
traversal_id=self.stack.current_traversal)
self.assertEqual(len(self.resources) * 21, add.call_count)
class DBAPIMigratePropertiesDataTest(common.HeatTestCase):
def setUp(self):
super(DBAPIMigratePropertiesDataTest, self).setUp()
self.ctx = utils.dummy_context()
templ = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, templ, user_creds)
stack2 = create_stack(self.ctx, templ, user_creds)
create_resource(self.ctx, stack, True, name='res1')
create_resource(self.ctx, stack2, True, name='res2')
create_event(self.ctx, True)
create_event(self.ctx, True)
def _test_migrate_resource(self, batch_size=50):
resources = self.ctx.session.query(models.Resource).all()
self.assertEqual(2, len(resources))
for resource in resources:
self.assertEqual('bar1', resource.properties_data['foo1'])
db_api.db_properties_data_migrate(self.ctx, batch_size=batch_size)
for resource in resources:
self.assertEqual('bar1', resource.rsrc_prop_data.data['foo1'])
self.assertFalse(resource.rsrc_prop_data.encrypted)
self.assertIsNone(resource.properties_data)
self.assertIsNone(resource.properties_data_encrypted)
def _test_migrate_event(self, batch_size=50):
events = self.ctx.session.query(models.Event).all()
self.assertEqual(2, len(events))
for event in events:
self.assertEqual('ev_bar', event.resource_properties['foo2'])
db_api.db_properties_data_migrate(self.ctx, batch_size=batch_size)
self.ctx.session.expire_all()
events = self.ctx.session.query(models.Event).all()
for event in events:
self.assertEqual('ev_bar', event.rsrc_prop_data.data['foo2'])
self.assertFalse(event.rsrc_prop_data.encrypted)
self.assertIsNone(event.resource_properties)
def test_migrate_event(self):
self._test_migrate_event()
def test_migrate_event_in_batches(self):
self._test_migrate_event(batch_size=1)
def test_migrate_resource(self):
self._test_migrate_resource()
def test_migrate_resource_in_batches(self):
self._test_migrate_resource(batch_size=1)
def test_migrate_encrypted_resource(self):
resources = self.ctx.session.query(models.Resource).all()
db_api.db_encrypt_parameters_and_properties(
self.ctx, 'i have a key for you if you want')
encrypted_data_pre_migration = resources[0].properties_data['foo1'][1]
db_api.db_properties_data_migrate(self.ctx)
resources = self.ctx.session.query(models.Resource).all()
self.assertTrue(resources[0].rsrc_prop_data.encrypted)
self.assertIsNone(resources[0].properties_data)
self.assertIsNone(resources[0].properties_data_encrypted)
self.assertEqual('cryptography_decrypt_v1',
resources[0].rsrc_prop_data.data['foo1'][0])
self.assertEqual(encrypted_data_pre_migration,
resources[0].rsrc_prop_data.data['foo1'][1])
db_api.db_decrypt_parameters_and_properties(
self.ctx, 'i have a key for you if you want')
self.ctx.session.expire_all()
resources = self.ctx.session.query(models.Resource).all()
self.assertEqual('bar1', resources[0].rsrc_prop_data.data['foo1'])
self.assertFalse(resources[0].rsrc_prop_data.encrypted)
self.assertIsNone(resources[0].properties_data)
self.assertIsNone(resources[0].properties_data_encrypted)
class DBAPICryptParamsPropsTest(common.HeatTestCase):
def setUp(self):
super(DBAPICryptParamsPropsTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = self._create_template()
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
self.resources = [create_resource(self.ctx, self.stack, name='res1')]
hidden_params_dict = {
'param2': 'bar',
'param_number': '456',
'param_boolean': '1',
'param_map': '{\"test\":\"json\"}',
'param_comma_list': '[\"Hola\", \"Senor\"]'}
def _create_template(self):
"""Initialize sample template."""
self.t = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
param3:
type: string
description: value3
hidden: true
default: "don't encrypt me! I'm not sensitive enough"
param_string_default_int:
type: string
description: String parameter with integer default value
default: 4353
hidden: true
param_number:
type: number
description: Number parameter
default: 4353
hidden: true
param_boolean:
type: boolean
description: boolean parameter
default: true
hidden: true
param_map:
type: json
description: json parameter
default: {"fee": {"fi":"fo"}}
hidden: true
param_comma_list:
type: comma_delimited_list
description: cdl parameter
default: ["hola", "senorita"]
hidden: true
resources:
a_resource:
type: GenericResourceType
''')
template = {
'template': self.t,
'files': {'foo': 'bar'},
'environment': {
'parameters': {
'param1': 'foo',
'param2': 'bar',
'param_number': '456',
'param_boolean': '1',
'param_map': '{\"test\":\"json\"}',
'param_comma_list': '[\"Hola\", \"Senor\"]'}}}
return db_api.raw_template_create(self.ctx, template)
def encrypt(self, enc_key=None, batch_size=50,
legacy_prop_data=False):
session = self.ctx.session
if enc_key is None:
enc_key = cfg.CONF.auth_encryption_key
self.assertEqual([], db_api.db_encrypt_parameters_and_properties(
self.ctx, enc_key, batch_size=batch_size))
for enc_tmpl in session.query(models.RawTemplate).all():
for param_name in self.hidden_params_dict.keys():
self.assertEqual(
'cryptography_decrypt_v1',
enc_tmpl.environment['parameters'][param_name][0])
self.assertEqual(
'foo', enc_tmpl.environment['parameters']['param1'])
# test that decryption does not store (or encrypt) default
# values in template's environment['parameters']
self.assertIsNone(
enc_tmpl.environment['parameters'].get('param3'))
enc_resources = session.query(models.Resource).all()
self.assertNotEqual([], enc_resources)
for enc_resource in enc_resources:
if legacy_prop_data:
self.assertEqual(
'cryptography_decrypt_v1',
enc_resource.properties_data['foo1'][0])
else:
self.assertEqual(
'cryptography_decrypt_v1',
enc_resource.rsrc_prop_data.data['foo1'][0])
ev = enc_tmpl.environment['parameters']['param2'][1]
return ev
def decrypt(self, encrypt_value, enc_key=None,
batch_size=50, legacy_prop_data=False):
session = self.ctx.session
if enc_key is None:
enc_key = cfg.CONF.auth_encryption_key
self.assertEqual([], db_api.db_decrypt_parameters_and_properties(
self.ctx, enc_key, batch_size=batch_size))
for dec_tmpl in session.query(models.RawTemplate).all():
self.assertNotEqual(
encrypt_value,
dec_tmpl.environment['parameters']['param2'][1])
for param_name, param_value in self.hidden_params_dict.items():
self.assertEqual(
param_value,
dec_tmpl.environment['parameters'][param_name])
self.assertEqual(
'foo', dec_tmpl.environment['parameters']['param1'])
self.assertIsNone(
dec_tmpl.environment['parameters'].get('param3'))
# test that decryption does not store default
# values in template's environment['parameters']
self.assertIsNone(dec_tmpl.environment['parameters'].get(
'param3'))
decrypt_value = dec_tmpl.environment['parameters']['param2'][1]
dec_resources = session.query(models.Resource).all()
self.assertNotEqual([], dec_resources)
for dec_resource in dec_resources:
if legacy_prop_data:
self.assertEqual(
'bar1', dec_resource.properties_data['foo1'])
else:
self.assertEqual(
'bar1', dec_resource.rsrc_prop_data.data['foo1'])
return decrypt_value
def _test_db_encrypt_decrypt(self, batch_size=50, legacy_prop_data=False):
session = self.ctx.session
raw_templates = session.query(models.RawTemplate).all()
self.assertNotEqual([], raw_templates)
for r_tmpl in raw_templates:
for param_name, param_value in self.hidden_params_dict.items():
self.assertEqual(param_value,
r_tmpl.environment['parameters'][param_name])
self.assertEqual('foo',
r_tmpl.environment['parameters']['param1'])
resources = session.query(models.Resource).all()
self.assertNotEqual([], resources)
self.assertEqual(len(resources), len(raw_templates))
for resource in resources:
resource = db_api.resource_get(self.ctx, resource.id)
if legacy_prop_data:
self.assertEqual(
'bar1', resource.properties_data['foo1'])
else:
self.assertEqual(
'bar1', resource.rsrc_prop_data.data['foo1'])
# Test encryption
encrypt_value = self.encrypt(batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
# Test that encryption is idempotent
encrypt_value2 = self.encrypt(batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
self.assertEqual(encrypt_value, encrypt_value2)
# Test decryption
decrypt_value = self.decrypt(encrypt_value, batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
# Test that decryption is idempotent
decrypt_value2 = self.decrypt(encrypt_value, batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
self.assertEqual(decrypt_value, decrypt_value2)
# Test using a different encryption key to encrypt & decrypt
encrypt_value3 = self.encrypt(
enc_key='774c15be099ea74123a9b9592ff12680',
batch_size=batch_size, legacy_prop_data=legacy_prop_data)
decrypt_value3 = self.decrypt(
encrypt_value3, enc_key='774c15be099ea74123a9b9592ff12680',
batch_size=batch_size, legacy_prop_data=legacy_prop_data)
self.assertEqual(decrypt_value, decrypt_value3)
self.assertNotEqual(encrypt_value, decrypt_value)
self.assertNotEqual(encrypt_value3, decrypt_value3)
self.assertNotEqual(encrypt_value, encrypt_value3)
def test_db_encrypt_decrypt(self):
"""Test encryption and decryption for single template and resource."""
self._test_db_encrypt_decrypt()
def test_db_encrypt_decrypt_legacy_prop_data(self):
"""Test encryption and decryption for res with legacy prop data."""
# delete what setUp created
[self.ctx.session.delete(r) for r in
self.ctx.session.query(models.Resource).all()]
[self.ctx.session.delete(s) for s in
self.ctx.session.query(models.Stack).all()]
[self.ctx.session.delete(t) for t in
self.ctx.session.query(models.RawTemplate).all()]
tmpl = self._create_template()
stack = create_stack(self.ctx, tmpl, self.user_creds)
create_resource(self.ctx, stack, True, name='res1')
self._test_db_encrypt_decrypt(legacy_prop_data=True)
def test_db_encrypt_decrypt_in_batches(self):
"""Test encryption and decryption in for several templates and resources.
Test encryption and decryption with set batch size of
templates and resources.
"""
tmpl1 = self._create_template()
tmpl2 = self._create_template()
stack = create_stack(self.ctx, tmpl1, self.user_creds)
create_resource(self.ctx, stack, False, name='res1')
stack2 = create_stack(self.ctx, tmpl2, self.user_creds)
create_resource(self.ctx, stack2, False, name='res2')
self._test_db_encrypt_decrypt(batch_size=1)
def test_db_encrypt_decrypt_exception_continue(self):
"""Test that encryption and decryption proceed after an exception"""
def create_malformed_template():
"""Initialize a malformed template which should fail encryption."""
t = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
param3:
type: string
description: value3
hidden: true
default: "don't encrypt me! I'm not sensitive enough"
resources:
a_resource:
type: GenericResourceType
''')
template = {
'template': t,
'files': {'foo': 'bar'},
'environment': ''} # <- environment should be a dict
return db_api.raw_template_create(self.ctx, template)
create_malformed_template()
self._create_template()
# Test encryption
enc_result = db_api.db_encrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key, batch_size=50)
self.assertEqual(1, len(enc_result))
self.assertIs(AttributeError, type(enc_result[0]))
enc_tmpls = self.ctx.session.query(models.RawTemplate).all()
self.assertEqual('', enc_tmpls[1].environment)
self.assertEqual('cryptography_decrypt_v1',
enc_tmpls[2].environment['parameters']['param2'][0])
# Test decryption
dec_result = db_api.db_decrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key, batch_size=50)
self.assertEqual(len(dec_result), 1)
self.assertIs(AttributeError, type(dec_result[0]))
dec_tmpls = self.ctx.session.query(models.RawTemplate).all()
self.assertEqual('', dec_tmpls[1].environment)
self.assertEqual('bar',
dec_tmpls[2].environment['parameters']['param2'])
def test_db_encrypt_no_env(self):
template = {
'template': self.t,
'files': {'foo': 'bar'},
'environment': None}
db_api.raw_template_create(self.ctx, template)
self.assertEqual([], db_api.db_encrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key))
def test_db_encrypt_no_env_parameters(self):
template = {
'template': self.t,
'files': {'foo': 'bar'},
'environment': {'encrypted_param_names': ['a']}}
db_api.raw_template_create(self.ctx, template)
self.assertEqual([], db_api.db_encrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key))
def test_db_encrypt_no_properties_data(self):
ctx = utils.dummy_context()
template = self._create_template()
user_creds = create_user_creds(ctx)
stack = create_stack(ctx, template, user_creds)
resources = [create_resource(ctx, stack, name='res1')]
resources[0].properties_data = None
self.assertEqual([], db_api.db_encrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key))
def test_db_encrypt_decrypt_verbose_on(self):
info_logger = self.useFixture(
fixtures.FakeLogger(level=logging.INFO,
format="%(levelname)8s [%(name)s] "
"%(message)s"))
ctx = utils.dummy_context()
template = self._create_template()
user_creds = create_user_creds(ctx)
stack = create_stack(ctx, template, user_creds)
create_resource(ctx, stack, legacy_prop_data=True, name='res2')
db_api.db_encrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key, verbose=True)
self.assertIn("Processing raw_template 1", info_logger.output)
self.assertIn("Finished encrypt processing of raw_template 1",
info_logger.output)
self.assertIn("Processing resource_properties_data 1",
info_logger.output)
self.assertIn("Finished processing resource_properties_data 1",
info_logger.output)
# only the resource with legacy properties data is processed
self.assertIn("Processing resource 2", info_logger.output)
self.assertIn("Finished processing resource 2", info_logger.output)
info_logger2 = self.useFixture(
fixtures.FakeLogger(level=logging.INFO,
format="%(levelname)8s [%(name)s] "
"%(message)s"))
db_api.db_decrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key, verbose=True)
self.assertIn("Processing raw_template 1", info_logger2.output)
self.assertIn("Finished decrypt processing of raw_template 1",
info_logger2.output)
self.assertIn("Processing resource_properties_data 1",
info_logger.output)
self.assertIn("Finished processing resource_properties_data 1",
info_logger.output)
# only the resource with legacy properties data is processed
self.assertIn("Processing resource 2", info_logger2.output)
self.assertIn("Finished processing resource 2", info_logger2.output)
def test_db_encrypt_decrypt_verbose_off(self):
info_logger = self.useFixture(
fixtures.FakeLogger(level=logging.INFO,
format="%(levelname)8s [%(name)s] "
"%(message)s"))
ctx = utils.dummy_context()
template = self._create_template()
user_creds = create_user_creds(ctx)
stack = create_stack(ctx, template, user_creds)
create_resource(ctx, stack, name='res1')
db_api.db_encrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key, verbose=False)
self.assertNotIn("Processing raw_template 1", info_logger.output)
self.assertNotIn("Processing resource 1", info_logger.output)
self.assertNotIn("Successfully processed raw_template 1",
info_logger.output)
self.assertNotIn("Successfully processed resource 1",
info_logger.output)
info_logger2 = self.useFixture(
fixtures.FakeLogger(level=logging.INFO,
format="%(levelname)8s [%(name)s] "
"%(message)s"))
db_api.db_decrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key, verbose=False)
self.assertNotIn("Processing raw_template 1", info_logger2.output)
self.assertNotIn("Processing resource 1", info_logger2.output)
self.assertNotIn("Successfully processed raw_template 1",
info_logger2.output)
self.assertNotIn("Successfully processed resource 1",
info_logger2.output)
def test_db_encrypt_no_param_schema(self):
t = copy.deepcopy(self.t)
del(t['parameters']['param2'])
template = {
'template': t,
'files': {'foo': 'bar'},
'environment': {'encrypted_param_names': [],
'parameters': {'param2': 'foo'}}}
db_api.raw_template_create(self.ctx, template)
self.assertEqual([], db_api.db_encrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key))
def test_db_encrypt_non_string_param_type(self):
t = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
param3:
type: string
description: value3
hidden: true
default: 1234
resources:
a_resource:
type: GenericResourceType
''')
template = {
'template': t,
'files': {},
'environment': {'parameters': {
'param1': 'foo',
'param2': 'bar',
'param3': 12345}}}
tmpl = db_api.raw_template_create(self.ctx, template)
self.assertEqual([], db_api.db_encrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key))
tmpl = db_api.raw_template_get(self.ctx, tmpl.id)
enc_params = copy.copy(tmpl.environment['parameters'])
self.assertEqual([], db_api.db_decrypt_parameters_and_properties(
self.ctx, cfg.CONF.auth_encryption_key, batch_size=50))
tmpl = db_api.raw_template_get(self.ctx, tmpl.id)
dec_params = tmpl.environment['parameters']
self.assertNotEqual(enc_params['param3'], dec_params['param3'])
self.assertEqual('bar', dec_params['param2'])
self.assertEqual('12345', dec_params['param3'])
class ResetStackStatusTests(common.HeatTestCase):
def setUp(self):
super(ResetStackStatusTests, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
def test_status_reset(self):
db_api.stack_update(self.ctx, self.stack.id, {'status': 'IN_PROGRESS'})
db_api.stack_lock_create(self.ctx, self.stack.id, UUID1)
db_api.reset_stack_status(self.ctx, self.stack.id)
stack = db_api.stack_get(self.ctx, self.stack.id)
self.assertEqual('FAILED', stack.status)
self.assertEqual('Stack status manually reset',
stack.status_reason)
self.assertEqual(True, db_api.stack_lock_release(self.ctx,
self.stack.id,
UUID1))
def test_resource_reset(self):
resource_progress = create_resource(self.ctx, self.stack,
status='IN_PROGRESS',
engine_id=UUID2)
resource_complete = create_resource(self.ctx, self.stack)
db_api.reset_stack_status(self.ctx, self.stack.id)
resource_complete = db_api.resource_get(self.ctx, resource_complete.id)
resource_progress = db_api.resource_get(self.ctx, resource_progress.id)
self.assertEqual('complete', resource_complete.status)
self.assertEqual('FAILED', resource_progress.status)
self.assertIsNone(resource_progress.engine_id)
def test_hook_reset(self):
resource = create_resource(self.ctx, self.stack)
resource.context = self.ctx
create_resource_data(self.ctx, resource, key="pre-create")
create_resource_data(self.ctx, resource)
db_api.reset_stack_status(self.ctx, self.stack.id)
vals = db_api.resource_data_get_all(self.ctx, resource.id)
self.assertEqual({'test_resource_key': 'test_value'}, vals)
def test_nested_stack(self):
db_api.stack_update(self.ctx, self.stack.id, {'status': 'IN_PROGRESS'})
child = create_stack(self.ctx, self.template, self.user_creds,
owner_id=self.stack.id)
grandchild = create_stack(self.ctx, self.template, self.user_creds,
owner_id=child.id, status='IN_PROGRESS')
resource = create_resource(self.ctx, grandchild, status='IN_PROGRESS',
engine_id=UUID2)
db_api.reset_stack_status(self.ctx, self.stack.id)
grandchild = db_api.stack_get(self.ctx, grandchild.id)
self.stack = db_api.stack_get(self.ctx, self.stack.id)
resource = db_api.resource_get(self.ctx, resource.id)
self.assertEqual('FAILED', grandchild.status)
self.assertEqual('FAILED', resource.status)
self.assertIsNone(resource.engine_id)
self.assertEqual('FAILED', self.stack.status)
| apache-2.0 | -1,321,193,678,985,674,200 | 41.505918 | 81 | 0.569142 | false |
vpp-dev/vpp | test/test_vxlan_gpe.py | 1 | 10187 | #!/usr/bin/env python3
import socket
from util import ip4n_range, ip4_range
import unittest
from framework import VppTestCase, VppTestRunner, running_extended_tests
from template_bd import BridgeDomain
from scapy.layers.l2 import Ether
from scapy.packet import Raw
from scapy.layers.inet import IP, UDP
from scapy.layers.vxlan import VXLAN
from scapy.utils import atol
from vpp_ip_route import VppIpRoute, VppRoutePath
from vpp_ip import INVALID_INDEX
@unittest.skipUnless(running_extended_tests, "part of extended tests")
class TestVxlanGpe(BridgeDomain, VppTestCase):
""" VXLAN-GPE Test Case """
def __init__(self, *args):
BridgeDomain.__init__(self)
VppTestCase.__init__(self, *args)
def encapsulate(self, pkt, vni):
"""
Encapsulate the original payload frame by adding VXLAN-GPE header
with its UDP, IP and Ethernet fields
"""
return (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg0.local_ip4) /
UDP(sport=self.dport, dport=self.dport, chksum=0) /
VXLAN(vni=vni, flags=self.flags) /
pkt)
def ip_range(self, start, end):
""" range of remote ip's """
return ip4_range(self.pg0.remote_ip4, start, end)
def encap_mcast(self, pkt, src_ip, src_mac, vni):
"""
Encapsulate the original payload frame by adding VXLAN-GPE header
with its UDP, IP and Ethernet fields
"""
return (Ether(src=src_mac, dst=self.mcast_mac) /
IP(src=src_ip, dst=self.mcast_ip4) /
UDP(sport=self.dport, dport=self.dport, chksum=0) /
VXLAN(vni=vni, flags=self.flags) /
pkt)
def decapsulate(self, pkt):
"""
Decapsulate the original payload frame by removing VXLAN-GPE header
"""
# check if is set I and P flag
self.assertEqual(pkt[VXLAN].flags, 0x0c)
return pkt[VXLAN].payload
# Method for checking VXLAN-GPE encapsulation.
#
def check_encapsulation(self, pkt, vni, local_only=False, mcast_pkt=False):
# Verify source MAC is VPP_MAC and destination MAC is MY_MAC resolved
# by VPP using ARP.
self.assertEqual(pkt[Ether].src, self.pg0.local_mac)
if not local_only:
if not mcast_pkt:
self.assertEqual(pkt[Ether].dst, self.pg0.remote_mac)
else:
self.assertEqual(pkt[Ether].dst, type(self).mcast_mac)
# Verify VXLAN-GPE tunnel src IP is VPP_IP and dst IP is MY_IP.
self.assertEqual(pkt[IP].src, self.pg0.local_ip4)
if not local_only:
if not mcast_pkt:
self.assertEqual(pkt[IP].dst, self.pg0.remote_ip4)
else:
self.assertEqual(pkt[IP].dst, type(self).mcast_ip4)
# Verify UDP destination port is VXLAN-GPE 4790, source UDP port
# could be arbitrary.
self.assertEqual(pkt[UDP].dport, type(self).dport)
# Verify VNI
self.assertEqual(pkt[VXLAN].vni, vni)
@classmethod
def create_vxlan_gpe_flood_test_bd(cls, vni, n_ucast_tunnels):
# Create 10 ucast vxlan tunnels under bd
ip_range_start = 10
ip_range_end = ip_range_start + n_ucast_tunnels
next_hop_address = cls.pg0.remote_ip4
for dest_ip4 in ip4_range(next_hop_address, ip_range_start,
ip_range_end):
# add host route so dest_ip4n will not be resolved
rip = VppIpRoute(cls, dest_ip4, 32,
[VppRoutePath(next_hop_address,
INVALID_INDEX)],
register=False)
rip.add_vpp_config()
dest_ip4n = socket.inet_pton(socket.AF_INET, dest_ip4)
r = cls.vapi.vxlan_gpe_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=dest_ip4n,
vni=vni)
cls.vapi.sw_interface_set_l2_bridge(rx_sw_if_index=r.sw_if_index,
bd_id=vni)
@classmethod
def add_del_shared_mcast_dst_load(cls, is_add):
"""
add or del tunnels sharing the same mcast dst
to test vxlan_gpe ref_count mechanism
"""
n_shared_dst_tunnels = 20
vni_start = 1000
vni_end = vni_start + n_shared_dst_tunnels
for vni in range(vni_start, vni_end):
r = cls.vapi.vxlan_gpe_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=cls.mcast_ip4n,
mcast_sw_if_index=1,
vni=vni,
is_add=is_add)
if r.sw_if_index == 0xffffffff:
raise ValueError("bad sw_if_index: ~0")
@classmethod
def add_shared_mcast_dst_load(cls):
cls.add_del_shared_mcast_dst_load(is_add=1)
@classmethod
def del_shared_mcast_dst_load(cls):
cls.add_del_shared_mcast_dst_load(is_add=0)
@classmethod
def add_del_mcast_tunnels_load(cls, is_add):
"""
add or del tunnels to test vxlan_gpe stability
"""
n_distinct_dst_tunnels = 20
ip_range_start = 10
ip_range_end = ip_range_start + n_distinct_dst_tunnels
for dest_ip4n in ip4n_range(cls.mcast_ip4n, ip_range_start,
ip_range_end):
vni = bytearray(dest_ip4n)[3]
cls.vapi.vxlan_gpe_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=dest_ip4n,
mcast_sw_if_index=1,
vni=vni,
is_add=is_add)
@classmethod
def add_mcast_tunnels_load(cls):
cls.add_del_mcast_tunnels_load(is_add=1)
@classmethod
def del_mcast_tunnels_load(cls):
cls.add_del_mcast_tunnels_load(is_add=0)
# Class method to start the VXLAN-GPE test case.
# Overrides setUpClass method in VppTestCase class.
# Python try..except statement is used to ensure that the tear down of
# the class will be executed even if exception is raised.
# @param cls The class pointer.
@classmethod
def setUpClass(cls):
super(TestVxlanGpe, cls).setUpClass()
try:
cls.dport = 4790
cls.flags = 0x0c
# Create 2 pg interfaces.
cls.create_pg_interfaces(range(4))
for pg in cls.pg_interfaces:
pg.admin_up()
# Configure IPv4 addresses on VPP pg0.
cls.pg0.config_ip4()
# Resolve MAC address for VPP's IP address on pg0.
cls.pg0.resolve_arp()
# Our Multicast address
cls.mcast_ip4 = '239.1.1.1'
cls.mcast_ip4n = socket.inet_pton(socket.AF_INET, cls.mcast_ip4)
iplong = atol(cls.mcast_ip4)
cls.mcast_mac = "01:00:5e:%02x:%02x:%02x" % (
(iplong >> 16) & 0x7F, (iplong >> 8) & 0xFF, iplong & 0xFF)
# Create VXLAN-GPE VTEP on VPP pg0, and put vxlan_gpe_tunnel0
# and pg1 into BD.
cls.single_tunnel_bd = 11
r = cls.vapi.vxlan_gpe_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=cls.pg0.remote_ip4n,
vni=cls.single_tunnel_bd)
cls.vapi.sw_interface_set_l2_bridge(rx_sw_if_index=r.sw_if_index,
bd_id=cls.single_tunnel_bd)
cls.vapi.sw_interface_set_l2_bridge(
rx_sw_if_index=cls.pg1.sw_if_index, bd_id=cls.single_tunnel_bd)
# Setup vni 2 to test multicast flooding
cls.n_ucast_tunnels = 10
cls.mcast_flood_bd = 12
cls.create_vxlan_gpe_flood_test_bd(cls.mcast_flood_bd,
cls.n_ucast_tunnels)
r = cls.vapi.vxlan_gpe_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=cls.mcast_ip4n,
mcast_sw_if_index=1,
vni=cls.mcast_flood_bd)
cls.vapi.sw_interface_set_l2_bridge(rx_sw_if_index=r.sw_if_index,
bd_id=cls.mcast_flood_bd)
cls.vapi.sw_interface_set_l2_bridge(
rx_sw_if_index=cls.pg2.sw_if_index, bd_id=cls.mcast_flood_bd)
# Add and delete mcast tunnels to check stability
cls.add_shared_mcast_dst_load()
cls.add_mcast_tunnels_load()
cls.del_shared_mcast_dst_load()
cls.del_mcast_tunnels_load()
# Setup vni 3 to test unicast flooding
cls.ucast_flood_bd = 13
cls.create_vxlan_gpe_flood_test_bd(cls.ucast_flood_bd,
cls.n_ucast_tunnels)
cls.vapi.sw_interface_set_l2_bridge(
rx_sw_if_index=cls.pg3.sw_if_index, bd_id=cls.ucast_flood_bd)
except Exception:
super(TestVxlanGpe, cls).tearDownClass()
raise
@classmethod
def tearDownClass(cls):
super(TestVxlanGpe, cls).tearDownClass()
@unittest.skip("test disabled for vxlan-gpe")
def test_mcast_flood(self):
""" inherited from BridgeDomain """
pass
@unittest.skip("test disabled for vxlan-gpe")
def test_mcast_rcv(self):
""" inherited from BridgeDomain """
pass
# Method to define VPP actions before tear down of the test case.
# Overrides tearDown method in VppTestCase class.
# @param self The object pointer.
def tearDown(self):
super(TestVxlanGpe, self).tearDown()
def show_commands_at_teardown(self):
self.logger.info(self.vapi.cli("show bridge-domain 11 detail"))
self.logger.info(self.vapi.cli("show bridge-domain 12 detail"))
self.logger.info(self.vapi.cli("show bridge-domain 13 detail"))
self.logger.info(self.vapi.cli("show int"))
self.logger.info(self.vapi.cli("show vxlan-gpe"))
self.logger.info(self.vapi.cli("show trace"))
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| apache-2.0 | -3,846,960,124,925,577,000 | 37.587121 | 79 | 0.57112 | false |
pyrou/RC-RESTserver | rcserver.py | 1 | 2372 | #!/usr/bin/env python
### BEGIN INIT INFO
# Provides: RCServer
# Required-Start: $syslog
# Required-Stop: $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start daemon at boot time
# Description: Enable service provided by daemon.
### END INIT INFO
import sys, time
from daemon import Daemon
from subprocess import call
import json, os, bottle
class MyDaemon(Daemon):
@bottle.hook("after_request")
def setup_response():
bottle.response.headers["Access-Control-Allow-Origin"] = "*"
bottle.response.content_type="application/json"
@bottle.route('/blyss/<key:re:[0-9A-F]{6}>/<channel:re:[0-5]>/<status:re:on|off>')
def blyss(key,channel,status):
call(["sudo", "/home/pi/RC-RESTserver/BlyssController/send", str(key), str(channel), ("1" if status == 'on' else "0")])
return json.dumps({'data':"ok"})
@bottle.route('/x10/<device:re:[A-Pa-p]>/<unit:re:[1-16]>/<status:re:on|off>')
def x10rf(device,unit,status):
call(["sudo", "/home/pi/RC-RESTserver/X10RF-raspberry/send", str(device), str(unit), ("1" if status == 'on' else "0")])
return json.dumps({'data':"ok"})
@bottle.route('/tristate/<state:re:[01F]{12}>')
def tristate(state):
call(["sudo", "/home/pi/RC-RESTserver/rcswitch-pi/tristate", str(state)])
return json.dumps({'data':"ok"})
@bottle.route('/switch/<device:re:[0-1]{5}>/<unit:re:[1-5]>/<status:re:on|off>')
def rcswitch(device,unit,status):
call(["sudo", "/home/pi/RC-RESTserver/rcswitch-pi/send", str(device), str(unit), ("1" if status == 'on' else "0")])
return json.dumps({'data':"ok"})
@bottle.error(404)
def error404(error):
bottle.setup_response()
return json.dumps({'error':"404"})
def run(self):
bottle.run(host="0.0.0.0", port=int(os.environ.get("PORT", 80)))
if __name__ == "__main__":
daemon = MyDaemon('/tmp/rcserver.pid') #, stderr="/dev/pts/0")
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'debug' == sys.argv[1]:
daemon.run()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart" % sys.argv[0]
sys.exit(2)
| mit | 1,089,731,407,478,726,400 | 33.882353 | 127 | 0.60371 | false |
Sterncat/opticspy | opticspy/asphere.py | 1 | 2551 | from __future__ import division as __division__
import numpy as __np__
from numpy import sqrt as __sqrt__
from numpy import cos as __cos__
from numpy import sin as __sin__
import matplotlib.pyplot as __plt__
from matplotlib import cm as __cm__
class Coefficient(object):
"""
Return a set of Asphere Coefficient
R,k,a2,a3,a4,a5,a6,a7,a8,a9,a10
"""
__coefficients__ = []
def __init__(self,R=0,k=0,a2=0,a3=0,a4=0,a5=0,a6=0,a7=0,a8=0,a9=0,a10=0):
if type(R) == list:
self.__coefficients__ = R + [0]*(11-len(R))
else:
self.__coefficients__ = [R,k,a2,a3,a4,a5,a6,a7,a8,a9,a10]
def outputcoefficient(self):
return self.__coefficients__
def aspheresurface(self):
"""
Show the surface of an asphere.
=============================================================
Try:
A = opticspy.asphere.Coefficient(R=50,a2=0.18*10**(-8),a3 = 0.392629*10**(-13))
"""
R = self.__coefficients__[0]
theta = __np__.linspace(0, 2*__np__.pi, 100)
rho = __np__.linspace(0, R, 100)
[u,r] = __np__.meshgrid(theta,rho)
X = r*__cos__(u)
Y = r*__sin__(u)
Z = __aspherepolar__(self.__coefficients__,r)
fig = __plt__.figure(figsize=(12, 8), dpi=80)
ax = fig.gca(projection='3d')
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=__cm__.RdYlGn,
linewidth=0, antialiased=False, alpha = 0.6)
__plt__.show()
return 0
def aspherematrix(self):
l = 100
R = self.__coefficients__[0]
x1 = __np__.linspace(-R, R, l)
[X,Y] = __np__.meshgrid(x1,x1)
r = __sqrt__(X**2+Y**2)
Z = __aspherepolar__(self.__coefficients__,r)
for i in range(l):
for j in range(l):
if x1[i]**2+x1[j]**2 > R**2:
Z[i][j] = 0
fig = __plt__.figure(figsize=(12, 8), dpi=80)
ax = fig.gca(projection='3d')
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=__cm__.RdYlGn,
linewidth=0, antialiased=False, alpha = 0.6)
__plt__.show()
return Z
def asphereline(self):
R,k,a2,a3,a4,a5,a6,a7,a8,a9,a10 = self.__coefficients__
r = __np__.linspace(-R,R,100)
C = 1/R
Z = C*r**2*(1+__sqrt__(1-(1+k)*r**2*C**2)) + a2*r**4 + a3*r**6 + a4*r**8 + \
+ a5*r**10 + a6*r**12 + a7*r**14 + a8*r**16 + a9*r**18 + a10*r**20
Z = -Z
fig = __plt__.figure(figsize=(12, 8), dpi=80)
__plt__.plot(r,Z)
__plt__.axis('equal')
__plt__.show()
def __aspherepolar__(coefficient,r):
R,k,a2,a3,a4,a5,a6,a7,a8,a9,a10 = coefficient
C = 1/R
Z = C*r**2*(1+__sqrt__(1-(1+k)*r**2*C**2)) + a2*r**4 + a3*r**6 + a4*r**8 + \
+ a5*r**10 + a6*r**12 + a7*r**14 + a8*r**16 + a9*r**18 + a10*r**20
return -Z
| mit | -7,405,621,237,707,080,000 | 26.138298 | 81 | 0.548412 | false |
tbtraltaa/medianshape | medianshape/experiment/median/test.py | 1 | 11430 | # encoding: utf-8
'''
2D Median surface embedded in 3D
--------------------------------
'''
from __future__ import absolute_import
import importlib
import os
import numpy as np
from scipy.spatial import Delaunay
from medianshape.simplicial import pointgen3d, mesh
from medianshape.simplicial.meshgen import meshgen2d, meshgen3d, get_mesh_surface
from medianshape import inout
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from matplotlib import cm
from medianshape.viz import plot2d, plot3d
from distmesh.plotting import axes_simpplot3d
from meshpy.tet import MeshInfo, Options, build
from medianshape.simplicial.utils import boundary_points
def func(x, y, sign=1):
return np.sin(np.pi*x)*np.cos(np.pi*y)
def sample_surf(scale, step=0.2):
'''
Return a tuple X, Y, Z with a test surface.
'''
x = y = np.arange(-4.0, 4.0, step)
X, Y = np.meshgrid(x, y)
from matplotlib.mlab import bivariate_normal
'''
Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0)
Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1)
#Z3 = bivariate_normal(X, Y, 1, 1, -2, -2)
Z = Z2 - Z1
'''
# Ups
ZU1 = bivariate_normal(X,Y, 1.5, 1, 0,-2)
ZU2 = bivariate_normal(X, Y, 1.5, 1.5, 4, 1)
ZU3 = bivariate_normal(X, Y, 1, 1, -4, 1)
#ZU4 = bivariate_normal(X, Y, 1.5, 1.5, -4, -4)
#ZU5 = bivariate_normal(X, Y, 1, 1, 4, -4)
ZU4 = bivariate_normal(X, Y, 4, 0.5, 0, -4)
# Downs
ZD1 = bivariate_normal(X, Y, 1.5, 1, 0, 1)
ZD2 = bivariate_normal(X, Y, 1.5, 1.5, -4, -2)
ZD3 = bivariate_normal(X, Y, 1, 1, 4, -2)
ZD4 = bivariate_normal(X, Y, 4, 1, 0, 4)
Z = ZU1 + ZU2 + ZU3 - ZD1 - ZD2 - ZD3 - ZD4
Zmax = np.amax(Z)
X = X * scale[0]/4.0
Y = Y * scale[1]/4.0
Z = Z/Zmax * scale[2]
return X, Y, Z
def interpolate_surf(points, values, ipoints, method = "cubic"):
from scipy.interpolate import griddata
return griddata(points, values, ipoints, method= method)
def surfaces(bbox=[-10,-10,-10, 10,10,10], l=0.5, overlaps =[0.4, 0.7]):
'''
'''
# Generating point grids for two surfaces
xmin = bbox[0]
xmax = bbox[3]
ymin = bbox[1]
ymax = bbox[4]
zmin = bbox[2]
zmax = bbox[5]
xlen = xmax - xmin
y = np.arange(ymin, ymax, l)
y = np.append(y, ymax)
xmin_points = np.ndarray((len(y), 2))
xmin_points[:,0] = xmin
xmin_points[:, 1] = y
xmax_points = np.ndarray((len(y), 2))
xmax_points[:,0] = xmax
xmax_points[:, 1] = y
xoverlaps = [xmin + xlen*o for o in overlaps]
xo_points = None
for i, o in enumerate(xoverlaps):
xo_tmp = np.ndarray((len(y), 2))
xo_tmp[:,0] = o
xo_tmp[:, 1] = y
if i == 0:
xo_points = xo_tmp
else:
xo_points = np.vstack((xo_points, xo_tmp))
fixed_points = np.concatenate((xmin_points, xmax_points, xo_points), axis=0)
print fixed_points
mesh = meshgen2d([xmin, ymin, xmax, ymax], l, fixed_points, include_corners=False)
#plot2d.plotmesh2d(mesh)
X, Y, Z1 = sample_surf([xmax*0.8, ymax*0.8, zmax*0.2])
Z2 = -Z1 - zmax*0.3
Z1 = Z1 + zmax*0.3
#z2 = elevate_surf(mesh.points[:,0], mesh.points[:,1])
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.plot_surface(X, Y, Z1, rstride=1, cstride=1, cmap=cm.winter,
linewidth=0, antialiased=False)
#surf = ax.plot_surface(X, Y, Z2, rstride=1, cstride=1, cmap=cm.autumn,
# linewidth=0, antialiased=False)
plt.show()
sample_points = np.hstack((X.reshape(-1,1), Y.reshape(-1,1)))
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.scatter(X, Y, Z1.reshape(-1,1), color='b')
surf = ax.scatter(X, Y, Z2.reshape(-1,1), color='r')
plt.show()
Z1 = interpolate_surf(sample_points, Z1.reshape(-1,1), mesh.points)
Z2 = interpolate_surf(sample_points, Z2.reshape(-1,1), mesh.points)
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.scatter(mesh.points[:,0], mesh.points[:,1], Z1, color='b')
surf = ax.scatter(mesh.points[:,0],mesh.points[:,1], Z2, color='r')
plt.show()
Z1[np.argwhere(mesh.points[:,1]==ymin)] = 0
Z1[np.argwhere(mesh.points[:,1]==ymax)] = 0
'''
for xo in xoverlaps:
Z1[np.argwhere(mesh.points[:,0]==xo)] = 0
'''
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.scatter(mesh.points[:,0], mesh.points[:,1], Z1, color='b')
surf = ax.scatter(mesh.points[:,0],mesh.points[:,1], Z2, color='r')
plt.show()
exit()
#surf = ax.scatter(mesh.points[:,0], mesh.points[:,1], z2, color="r")
ax.set_zlim(-1, 1)
plt.show()
X, Y = np.meshgrid(x, y, sparse=False)
# Z coordinate of surface1
Z1 = 7*np.sin(X)
# Z coordinate of surface2
Z2 = -7*np.sin(X)
# Illustrating the surfaces
# Triangulating the surfaces
s1 = np.concatenate((X.reshape(-1,1), Y.reshape(-1,1), Z1.reshape(-1,1)), axis=1).reshape(-1,3)
s2 = np.concatenate((X.reshape(-1,1), Y.reshape(-1,1), Z2.reshape(-1,1)), axis=1).reshape(-1,3)
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.scatter(s1[:,0], s1[:,1], s1[:,2])
surf = ax.scatter(s2[:,0], s2[:,1], s2[:,2])
ax.set_zlim(-10, 10)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
plt.show()
leftline1 = np.where(s1[:,0]==xmin)[0]
rightline1 = np.where(s1[:, 0] == xmax)[0]
backline1 = np.where(s1[:,1]==xmin)[0]
frontline1 = np.where(s1[:, 1] == xmax)[0]
b1 = np.unique(np.concatenate((leftline1, rightline1, backline1, frontline1), axis=0))
print b1
leftline2 = np.where(s2[:,0]==xmin)[0]
rightline2 = np.where(s2[:, 0] == xmax)[0]
backline2 = np.where(s2[:,1]==xmin)[0]
frontline2 = np.where(s2[:, 1] == xmax)[0]
b2 = np.unique(np.concatenate((leftline2, rightline2, backline2, frontline2), axis=0))
intersection = np.where(s1[:,0]== intersect)[0]
closed_boundary = np.concatenate((leftline1, rightline2), axis=0)
print b2
print leftline1
print rightline1
print leftline2
print leftline2
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.scatter(backline1[:,0], backline1[:,1], backline1[:,2])
surf = ax.scatter(frontline1[:,0],frontline1[:,1],frontline1[:,2])
surf = ax.scatter(leftline1[:,0], leftline1[:,1], leftline1[:,2])
surf = ax.scatter(rightline1[:,0], rightline1[:,1], rightline1[:,2])
surf = ax.scatter(backline2[:,0], backline2[:,1], backline2[:,2])
surf = ax.scatter(frontline2[:,0],frontline2[:,1],frontline2[:,2])
surf = ax.scatter(leftline2[:,0], leftline2[:,1], leftline2[:,2])
surf = ax.scatter(rightline2[:,0], rightline2[:,1], rightline2[:,2])
ax.set_zlim(-10, 10)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
plt.show()
s1_complex = Delaunay(s1[:,:-1])
s2_complex = Delaunay(s2[:,:-1])
fig = plt.figure()
ax = fig.gca(projection="3d")
ax.set_zlim(-10, 10)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
ax.plot_trisurf(s1[:,0], s1[:,1], s1[:,2], triangles = s1_complex.simplices, cmap=cm.autumn)
ax.plot_trisurf(s2[:,0], s2[:,1], s2[:,2], triangles = s2_complex.simplices, cmap=cm.winter)
plt.show()
exit()
s_points = np.vstack((s1, s2))
s1_triangles = s1_complex.simplices
s2_triangles = s2_complex.simplices + len(s1_complex.points)
surfaces = np.vstack((s1_triangles, s2_triangles))
# Plotting the surfaces
fig = plt.figure()
ax = fig.gca(projection="3d")
ax.set_zlim(-10, 10)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
ax.plot_trisurf(s_points[:,0], s_points[:,1], s_points[:,2], triangles = s1_triangles, cmap=cm.autumn)
ax.plot_trisurf(s_points[:,0], s_points[:,1], s_points[:,2], triangles = s2_triangles, cmap=cm.winter)
plt.show()
'''
# Plotting the surfaces
fig = plt.figure()
ax = fig.gca(projection="3d")
ax.set_zlim(-10, 10)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
ax.plot_trisurf(points[:,0], points[:,1], points[:,2], triangles = surfaces, cmap=cm.autumn)
plt.show()
'''
# Grid Points sampled from boundary box
corners = boundary_points(bbox)
box = np.array(bbox).reshape(2, -1)
points = np.mgrid[tuple(slice(min, max+1, 1) for min, max in box.T)]
points = points.reshape(3, -1).T
# Points of PLC.
# Uncomment the line below to include sample poinds in bbox along with the surface points
#points = np.concatenate((s_points, points), axis=0).reshape(-1,3)
points = np.concatenate((s_points, corners), axis=0).reshape(-1,3)
fig = plt.figure()
ax = fig.gca(projection="3d")
surf = ax.scatter(points[:,0], points[:,1], points[:,2])
ax.set_zlim(-12, 12)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
plt.show()
# Saving the current PLC to a file in .smesh format
with open('surf.smesh', 'w') as f:
f.write('%d %d %d %d\n'%(len(points), 3, 0,0))
for i, p in enumerate(points):
f.write("%d %f %f %f\n"%(i, p[0], p[1], p[2]))
f.write('%d %d\n'%(len(surfaces), 0))
for t in surfaces:
f.write("%d %d %d %d\n"%(3, t[0], t[1], t[2]))
f.write("%d\n"%0)
f.write("%d\n"%0)
np.savetxt("surface.face", surfaces)
np.savetxt("points.node", points)
# Build the mesh using Tetgen
mesh_info = MeshInfo()
mesh_info.set_points(points.tolist())
mesh_info.set_facets((surfaces.reshape(-1, 3)).tolist())
print len(mesh_info.facets)
opts = Options("YVfeq", verbose=True, nobisect=True, facesout=True, edgesout=True, docheck=True) # Overriding 'pq' with no options or flags
#opts = Options("", verbose=True, nobisect=True, facesout=True, edgesout=True, docheck=True, insertaddpoints=True) # Overriding 'pq' with no options or flags
mesh = build(mesh_info, options=opts, volume_constraints=True, max_volume=1)
# Plot the mesh
ax = plt.gca(projection='3d')
fig = ax.figure
m1 = np.amin(bbox[0:3])
m2 = np.amax(bbox[3:])
ax.set_xlim([m1, m2])
ax.set_ylim([m1, m2])
ax.set_zlim([m1, m2])
ax.set_aspect('equal')
axes_simpplot3d(ax, np.array(list(mesh.points)), np.array(list(mesh.elements)))
plt.show()
# Write it as a file in vtk format, so you can use Paraview to see it.
mesh.write_vtk("test.vtk")
input_surfaces = np.zeros((2,len(mesh.faces)))
inputs = list (s1_triangles, s2_triangles)
for i, s in enumerate(inputs):
for j, t in enumerate(np.array(mesh.faces)):
if np.all(s==t):
input_surfaces[i,j] = 1
lambdas = [0.001]
mus = [0.00001]
mesh1 = Mesh3D()
mesh1.simplices = np.array(mesh.elements)
mesh1.triangles = np.array(mesh.faces)
mesh1.edges = np.array(mesh.edges)
mesh1.points = np.array(mesh.points)
return mesh1, mesh1.simplices, mesh1.triangles, input_surfaces, lambdas, mus
if __name__ == "__main__":
surfaces()
| gpl-3.0 | 5,825,475,850,444,854,000 | 36.973422 | 161 | 0.606299 | false |
andreabrambilla/libres | python/res/fm/rms/rms_run.py | 1 | 5303 | from __future__ import print_function
import json
import os
import sys
import os.path
import time
import random
from contextlib import contextmanager
from .rms_config import RMSConfig
@contextmanager
def pushd(path):
cwd0 = os.getcwd()
os.chdir(path)
yield
os.chdir(cwd0)
class RMSRun(object):
_single_seed_file = "RMS_SEED"
_multi_seed_file = "random.seeds"
_max_seed = 2146483648
_seed_factor = 7907
def __init__(self, iens, project, workflow, run_path="rms",target_file=None, export_path="rmsEXPORT", import_path="rmsIMPORT", version=None, readonly=True):
if not os.path.isdir(project):
raise OSError("The project:{} does not exist as a directory.".format(project))
self.config = RMSConfig()
self.project = os.path.abspath(project)
self.workflow = workflow
self.run_path = run_path
self.version = version
self.readonly = readonly
self.import_path = import_path
self.export_path = export_path
if target_file is None:
self.target_file = None
else:
if os.path.isabs(target_file):
self.target_file = target_file
else:
self.target_file = os.path.join(os.getcwd(), target_file)
if os.path.isfile(self.target_file):
st = os.stat(self.target_file)
self.target_file_mtime = st.mtime
else:
self.target_file_mtime = None
self.init_seed(iens)
def init_seed(self, iens):
if "RMS_SEED" in os.environ:
seed = int(os.getenv("RMS_SEED"))
for x in range(iens):
seed *= RMSRun._seed_factor
else:
single_seed_file = os.path.join( self.run_path , RMSRun._single_seed_file)
multi_seed_file = os.path.join( self.run_path , RMSRun._multi_seed_file)
if os.path.exists( single_seed_file ):
# Using existing single seed file
with open(single_seed_file) as fileH:
seed = int( float(fileH.readline( )) )
elif os.path.exists( multi_seed_file ):
with open( multi_seed_file) as fileH:
seed_list = [ int(x) for x in fileH.readlines() ]
seed = seed_list[iens + 1]
else:
random.seed( )
seed = random.randint( 0 , RMSRun._max_seed )
self.seed = seed % RMSRun._max_seed
def run(self):
child_pid = os.fork( )
if child_pid == 0:
self._run_child()
else:
self.wait( child_pid )
def _run_child(self):
if not os.path.exists( self.run_path ):
os.makedirs( self.run_path )
self_exe, _ = os.path.splitext(os.path.basename(sys.argv[0]))
exec_env_file = "%s_exec_env.json" % self_exe
exec_env = os.environ.copy()
if os.path.isfile(exec_env_file):
exec_env.update(json.load(open(exec_env_file)))
with pushd(self.run_path):
fileH = open("RMS_SEED_USED", "a+")
fileH.write("%s ... %d\n" % (time.strftime("%d-%m-%Y %H:%M:%S" , time.localtime(time.time())) , self.seed))
fileH.close()
if not os.path.exists( self.export_path ):
os.makedirs( self.export_path )
if not os.path.exists( self.import_path ):
os.makedirs( self.import_path )
self.exec_rms(exec_env)
def wait(self, child_pid):
_, wait_status = os.waitpid(child_pid, 0)
exit_status = os.WEXITSTATUS(wait_status)
if exit_status != 0:
raise Exception("The RMS run failed with exit status: {}".format(exit_status))
if self.target_file is None:
return
if not os.path.isfile(self.target_file):
raise Exception("The RMS run did not produce the expected file: {}".format(self.target_file))
if self.target_file_mtime is None:
return
st = os.stat(self.target_file)
if st.mtime == self.target_file_mtime:
raise Exception("The target file:{} is unmodified - interpreted as failure".format(self.target_file))
def exec_rms(self, exec_env):
args = [self.config.executable,
"-project", self.project,
"-seed", str(self.seed),
"-nomesa",
"-export_path", self.export_path,
"-import_path", self.import_path,
"-batch", self.workflow]
if self.version:
args += ["-v", self.version]
if self.readonly:
args += ["-readonly"]
if self.config.threads:
args += ["-threads", str(self.config.threads)]
if exec_env:
env = os.environ.copy()
for key, value in exec_env.items():
if value is None:
env.pop(key)
continue
value = str(value).strip()
if len(value) == 0:
env.pop(key)
continue
env[key] = value
os.execve( self.config.executable , args, env )
else:
os.execv(self.config.executable, args )
| gpl-3.0 | 6,845,504,058,369,396,000 | 29.477011 | 160 | 0.538375 | false |
cmjatai/cmj | sapl/api/deprecated.py | 1 | 22559 |
import logging
import logging
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.db.models import Q
from django.forms.fields import CharField, MultiValueField
from django.forms.widgets import MultiWidget, TextInput
from django.http import Http404
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext_lazy as _
from django_filters.filters import CharFilter, ModelChoiceFilter, DateFilter
from django_filters.rest_framework.backends import DjangoFilterBackend
from django_filters.rest_framework.filterset import FilterSet
from rest_framework import serializers
from rest_framework import serializers
from rest_framework.generics import ListAPIView
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
from rest_framework.permissions import (IsAuthenticated,
IsAuthenticatedOrReadOnly, AllowAny)
from rest_framework.viewsets import GenericViewSet
from sapl.api.serializers import ModelChoiceSerializer, AutorSerializer,\
ChoiceSerializer
from sapl.base.models import TipoAutor, Autor, CasaLegislativa
from sapl.materia.models import MateriaLegislativa
from sapl.parlamentares.models import Legislatura
from sapl.sessao.models import SessaoPlenaria, OrdemDia
from sapl.utils import SaplGenericRelation
from sapl.utils import generic_relations_for_model
class SaplGenericRelationSearchFilterSet(FilterSet):
q = CharFilter(method='filter_q')
def filter_q(self, queryset, name, value):
query = value.split(' ')
if query:
q = Q()
for qtext in query:
if not qtext:
continue
q_fs = Q(nome__icontains=qtext)
order_by = []
for gr in generic_relations_for_model(self._meta.model):
sgr = gr[1]
for item in sgr:
if item.related_model != self._meta.model:
continue
flag_order_by = True
for field in item.fields_search:
if flag_order_by:
flag_order_by = False
order_by.append('%s__%s' % (
item.related_query_name(),
field[0])
)
# if len(field) == 3 and field[2](qtext) is not
# None:
q_fs = q_fs | Q(**{'%s__%s%s' % (
item.related_query_name(),
field[0],
field[1]): qtext if len(field) == 2
else field[2](qtext)})
q = q & q_fs
if q:
queryset = queryset.filter(q).order_by(*order_by)
return queryset
class SearchForFieldWidget(MultiWidget):
def decompress(self, value):
if value is None:
return [None, None]
return value
def __init__(self, attrs=None):
widgets = (TextInput, TextInput)
MultiWidget.__init__(self, widgets, attrs)
class SearchForFieldField(MultiValueField):
widget = SearchForFieldWidget
def __init__(self, *args, **kwargs):
fields = (
CharField(),
CharField())
super(SearchForFieldField, self).__init__(fields, *args, **kwargs)
def compress(self, parameters):
if parameters:
return parameters
return None
class SearchForFieldFilter(CharFilter):
field_class = SearchForFieldField
class AutorChoiceFilterSet(SaplGenericRelationSearchFilterSet):
q = CharFilter(method='filter_q')
tipo = ModelChoiceFilter(queryset=TipoAutor.objects.all())
class Meta:
model = Autor
fields = ['q',
'tipo',
'nome', ]
def filter_q(self, queryset, name, value):
return super().filter_q(
queryset, name, value).distinct('nome').order_by('nome')
class AutorSearchForFieldFilterSet(AutorChoiceFilterSet):
q = SearchForFieldFilter(method='filter_q')
class Meta(AutorChoiceFilterSet.Meta):
pass
def filter_q(self, queryset, name, value):
value[0] = value[0].split(',')
value[1] = value[1].split(',')
params = {}
for key, v in list(zip(value[0], value[1])):
if v in ['True', 'False']:
v = '1' if v == 'True' else '0'
params[key] = v
return queryset.filter(**params).distinct('nome').order_by('nome')
class AutoresPossiveisFilterSet(FilterSet):
logger = logging.getLogger(__name__)
data_relativa = DateFilter(method='filter_data_relativa')
tipo = CharFilter(method='filter_tipo')
class Meta:
model = Autor
fields = ['data_relativa', 'tipo', ]
def filter_data_relativa(self, queryset, name, value):
return queryset
def filter_tipo(self, queryset, name, value):
try:
self.logger.debug(
"Tentando obter TipoAutor correspondente à pk {}.".format(value))
tipo = TipoAutor.objects.get(pk=value)
except:
self.logger.error("TipoAutor(pk={}) inexistente.".format(value))
raise serializers.ValidationError(_('Tipo de Autor inexistente.'))
qs = queryset.filter(tipo=tipo)
return qs
@property
def qs(self):
qs = super().qs
data_relativa = self.form.cleaned_data['data_relativa'] \
if 'data_relativa' in self.form.cleaned_data else None
tipo = self.form.cleaned_data['tipo'] \
if 'tipo' in self.form.cleaned_data else None
if not tipo:
return qs
tipo = TipoAutor.objects.get(pk=tipo)
if not tipo.content_type:
return qs
filter_for_model = 'filter_%s' % tipo.content_type.model
if not hasattr(self, filter_for_model):
return qs
if not data_relativa:
data_relativa = timezone.now()
return getattr(self, filter_for_model)(qs, data_relativa).distinct()
def filter_parlamentar(self, queryset, data_relativa):
# não leva em conta afastamentos
legislatura_relativa = Legislatura.objects.filter(
data_inicio__lte=data_relativa,
data_fim__gte=data_relativa).first()
q = Q(
parlamentar_set__mandato__data_inicio_mandato__lte=data_relativa,
parlamentar_set__mandato__data_fim_mandato__isnull=True) | Q(
parlamentar_set__mandato__data_inicio_mandato__lte=data_relativa,
parlamentar_set__mandato__data_fim_mandato__gte=data_relativa)
if legislatura_relativa.atual():
q = q & Q(parlamentar_set__ativo=True)
return queryset.filter(q)
def filter_comissao(self, queryset, data_relativa):
return queryset.filter(
Q(comissao_set__data_extincao__isnull=True,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__isnull=True,
comissao_set__data_fim_comissao__gte=data_relativa) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__gte=data_relativa),
comissao_set__data_criacao__lte=data_relativa)
def filter_frente(self, queryset, data_relativa):
return queryset.filter(
Q(frente_set__data_extincao__isnull=True) |
Q(frente_set__data_extincao__gte=data_relativa),
frente_set__data_criacao__lte=data_relativa)
def filter_bancada(self, queryset, data_relativa):
return queryset.filter(
Q(bancada_set__data_extincao__isnull=True) |
Q(bancada_set__data_extincao__gte=data_relativa),
bancada_set__data_criacao__lte=data_relativa)
def filter_bloco(self, queryset, data_relativa):
return queryset.filter(
Q(bloco_set__data_extincao__isnull=True) |
Q(bloco_set__data_extincao__gte=data_relativa),
bloco_set__data_criacao__lte=data_relativa)
def filter_orgao(self, queryset, data_relativa):
# na implementação, não havia regras a implementar para orgao
return queryset
class AutorChoiceSerializer(ModelChoiceSerializer):
def get_text(self, obj):
return obj.nome
class Meta:
model = Autor
fields = ['id', 'nome']
class MateriaLegislativaOldSerializer(serializers.ModelSerializer):
class Meta:
model = MateriaLegislativa
fields = '__all__'
class SessaoPlenariaOldSerializer(serializers.ModelSerializer):
codReuniao = serializers.SerializerMethodField('get_pk_sessao')
codReuniaoPrincipal = serializers.SerializerMethodField('get_pk_sessao')
txtTituloReuniao = serializers.SerializerMethodField('get_name')
txtSiglaOrgao = serializers.SerializerMethodField('get_sigla_orgao')
txtApelido = serializers.SerializerMethodField('get_name')
txtNomeOrgao = serializers.SerializerMethodField('get_nome_orgao')
codEstadoReuniao = serializers.SerializerMethodField(
'get_estadoSessaoPlenaria')
txtTipoReuniao = serializers.SerializerMethodField('get_tipo_sessao')
txtObjeto = serializers.SerializerMethodField('get_assunto_sessao')
txtLocal = serializers.SerializerMethodField('get_endereco_orgao')
bolReuniaoConjunta = serializers.SerializerMethodField(
'get_reuniao_conjunta')
bolHabilitarEventoInterativo = serializers.SerializerMethodField(
'get_iterativo')
idYoutube = serializers.SerializerMethodField('get_url')
codEstadoTransmissaoYoutube = serializers.SerializerMethodField(
'get_estadoTransmissaoYoutube')
datReuniaoString = serializers.SerializerMethodField('get_date')
# Constantes SessaoPlenaria (de 1-9) (apenas 3 serão usados)
SESSAO_FINALIZADA = 4
SESSAO_EM_ANDAMENTO = 3
SESSAO_CONVOCADA = 2
# Constantes EstadoTranmissaoYoutube (de 0 a 2)
TRANSMISSAO_ENCERRADA = 2
TRANSMISSAO_EM_ANDAMENTO = 1
SEM_TRANSMISSAO = 0
class Meta:
model = SessaoPlenaria
fields = (
'codReuniao',
'codReuniaoPrincipal',
'txtTituloReuniao',
'txtSiglaOrgao',
'txtApelido',
'txtNomeOrgao',
'codEstadoReuniao',
'txtTipoReuniao',
'txtObjeto',
'txtLocal',
'bolReuniaoConjunta',
'bolHabilitarEventoInterativo',
'idYoutube',
'codEstadoTransmissaoYoutube',
'datReuniaoString'
)
def __init__(self, *args, **kwargs):
super(SessaoPlenariaOldSerializer, self).__init__(args, kwargs)
def get_pk_sessao(self, obj):
return obj.pk
def get_name(self, obj):
return obj.__str__()
def get_estadoSessaoPlenaria(self, obj):
if obj.finalizada:
return self.SESSAO_FINALIZADA
elif obj.iniciada:
return self.SESSAO_EM_ANDAMENTO
else:
return self.SESSAO_CONVOCADA
def get_tipo_sessao(self, obj):
return obj.tipo.__str__()
def get_url(self, obj):
return obj.url_video if obj.url_video else None
def get_iterativo(self, obj):
return obj.interativa if obj.interativa else False
def get_date(self, obj):
return "{} {}{}".format(
obj.data_inicio.strftime("%d/%m/%Y"),
obj.hora_inicio,
":00"
)
def get_estadoTransmissaoYoutube(self, obj):
if obj.url_video:
if obj.finalizada:
return self.TRANSMISSAO_ENCERRADA
else:
return self.TRANSMISSAO_EM_ANDAMENTO
else:
return self.SEM_TRANSMISSAO
def get_assunto_sessao(self, obj):
pauta_sessao = ''
ordem_dia = OrdemDia.objects.filter(sessao_plenaria=obj.pk)
pauta_sessao = ', '.join([i.materia.__str__() for i in ordem_dia])
return str(pauta_sessao)
def get_endereco_orgao(self, obj):
return self.casa().endereco
def get_reuniao_conjunta(self, obj):
return False
def get_sigla_orgao(self, obj):
return self.casa().sigla
def get_nome_orgao(self, obj):
return self.casa().nome
def casa(self):
casa = CasaLegislativa.objects.first()
return casa
class ModelChoiceView(ListAPIView):
"""
Deprecated
TODO Migrar para customização na api automática
"""
# FIXME aplicar permissão correta de usuário
permission_classes = (IsAuthenticated,)
serializer_class = ModelChoiceSerializer
def get(self, request, *args, **kwargs):
self.model = ContentType.objects.get_for_id(
self.kwargs['content_type']).model_class()
pagination = request.GET.get('pagination', '')
if pagination == 'False':
self.pagination_class = None
return ListAPIView.get(self, request, *args, **kwargs)
def get_queryset(self):
return self.model.objects.all()
class AutorListView(ListAPIView):
"""
Deprecated
TODO Migrar para customização na api automática
Listagem de Autores com filtro para autores já cadastrados
e/ou possíveis autores.
- tr - tipo do resultado
Prepera Lista de Autores para 2 cenários distintos
- default = 1
= 1 -> para (value, text) usados geralmente
em combobox, radiobox, checkbox, etc com pesquisa básica
de Autores feita pelo django-filter
-> processo usado nas pesquisas, o mais usado.
= 3 -> Devolve instancias da classe Autor filtradas pelo
django-filter
- tipo - chave primária do Tipo de Autor a ser filtrado
- q - busca textual no nome do Autor ou em fields_search
declarados no field SaplGenericRelation das GenericFks
A busca textual acontece via django-filter com a
variável `tr` igual 1 ou 3. Em caso contrário,
o django-filter é desativado e a busca é feita
no model do ContentType associado ao tipo.
- q_0 / q_1 - q_0 é opcional e quando usado, faz o código ignorar "q"...
q_0 -> campos lookup a serem filtrados em qualquer Model
que implemente SaplGenericRelation
q_1 -> o valor que será pesquisado no lookup de q_0
q_0 e q_1 podem ser separados por ","... isso dará a
possibilidade de filtrar mais de um campo.
http://localhost:8000
/api/autor?tr=1&q_0=parlamentar_set__ativo&q_1=False
/api/autor?tr=1&q_0=parlamentar_set__ativo&q_1=True
/api/autor?tr=3&q_0=parlamentar_set__ativo&q_1=False
/api/autor?tr=3&q_0=parlamentar_set__ativo&q_1=True
http://localhost:8000
/api/autor?tr=1
&q_0=parlamentar_set__nome_parlamentar__icontains,
parlamentar_set__ativo
&q_1=Carvalho,False
/api/autor?tr=1
&q_0=parlamentar_set__nome_parlamentar__icontains,
parlamentar_set__ativo
&q_1=Carvalho,True
/api/autor?tr=3
&q_0=parlamentar_set__nome_parlamentar__icontains,
parlamentar_set__ativo
&q_1=Carvalho,False
/api/autor?tr=3
&q_0=parlamentar_set__nome_parlamentar__icontains,
parlamentar_set__ativo
&q_1=Carvalho,True
não importa o campo que vc passe de qualquer dos Models
ligados... é possível ver que models são esses,
na ocasião do commit deste texto, executando:
In [6]: from sapl.utils import models_with_gr_for_model
In [7]: models_with_gr_for_model(Autor)
Out[7]:
[sapl.parlamentares.models.Parlamentar,
sapl.parlamentares.models.Frente,
sapl.comissoes.models.Comissao,
sapl.materia.models.Orgao,
sapl.sessao.models.Bancada,
sapl.sessao.models.Bloco]
qualquer atributo destes models podem ser passados
para busca
"""
logger = logging.getLogger(__name__)
TR_AUTOR_CHOICE_SERIALIZER = 1
TR_AUTOR_SERIALIZER = 3
permission_classes = (IsAuthenticatedOrReadOnly,)
queryset = Autor.objects.all()
model = Autor
filter_class = AutorChoiceFilterSet
filter_backends = (DjangoFilterBackend, )
serializer_class = AutorChoiceSerializer
@property
def tr(self):
username = self.request.user.username
try:
tr = int(self.request.GET.get
('tr', AutorListView.TR_AUTOR_CHOICE_SERIALIZER))
if tr not in (AutorListView.TR_AUTOR_CHOICE_SERIALIZER,
AutorListView.TR_AUTOR_SERIALIZER):
return AutorListView.TR_AUTOR_CHOICE_SERIALIZER
except Exception as e:
self.logger.error('user=' + username + '. ' + str(e))
return AutorListView.TR_AUTOR_CHOICE_SERIALIZER
return tr
def get(self, request, *args, **kwargs):
if self.tr == AutorListView.TR_AUTOR_SERIALIZER:
self.serializer_class = AutorSerializer
self.permission_classes = (IsAuthenticated,)
if self.filter_class and 'q_0' in request.GET:
self.filter_class = AutorSearchForFieldFilterSet
return ListAPIView.get(self, request, *args, **kwargs)
class AutoresProvaveisListView(ListAPIView):
"""
Deprecated
TODO Migrar para customização na api automática
"""
logger = logging.getLogger(__name__)
permission_classes = (IsAuthenticatedOrReadOnly,)
queryset = Autor.objects.all()
model = Autor
filter_class = None
filter_backends = []
serializer_class = ChoiceSerializer
def get_queryset(self):
params = {'content_type__isnull': False}
username = self.request.user.username
tipo = ''
try:
tipo = int(self.request.GET.get('tipo', ''))
if tipo:
params['id'] = tipo
except Exception as e:
self.logger.error('user= ' + username + '. ' + str(e))
pass
tipos = TipoAutor.objects.filter(**params)
if not tipos.exists() and tipo:
raise Http404()
r = []
for tipo in tipos:
q = self.request.GET.get('q', '').strip()
model_class = tipo.content_type.model_class()
fields = list(filter(
lambda field: isinstance(field, SaplGenericRelation) and
field.related_model == Autor,
model_class._meta.get_fields(include_hidden=True)))
"""
fields - é um array de SaplGenericRelation que deve possuir o
atributo fields_search. Verifique na documentação da classe
a estrutura de fields_search.
"""
assert len(fields) >= 1, (_(
'Não foi encontrado em %(model)s um atributo do tipo '
'SaplGenericRelation que use o model %(model_autor)s') % {
'model': model_class._meta.verbose_name,
'model_autor': Autor._meta.verbose_name})
qs = model_class.objects.all()
q_filter = Q()
if q:
for item in fields:
if item.related_model != Autor:
continue
q_fs = Q()
for field in item.fields_search:
q_fs = q_fs | Q(**{'%s%s' % (
field[0],
field[1]): q})
q_filter = q_filter & q_fs
qs = qs.filter(q_filter).distinct(
fields[0].fields_search[0][0]).order_by(
fields[0].fields_search[0][0])
else:
qs = qs.order_by(fields[0].fields_search[0][0])
qs = qs.values_list(
'id', fields[0].fields_search[0][0])
r += list(qs)
if tipos.count() > 1:
r.sort(key=lambda x: x[1].upper())
return r
class AutoresPossiveisListView(ListAPIView):
"""
Deprecated
TODO Migrar para customização na api automática
"""
permission_classes = (IsAuthenticatedOrReadOnly,)
queryset = Autor.objects.all()
model = Autor
pagination_class = None
filter_class = AutoresPossiveisFilterSet
serializer_class = AutorChoiceSerializer
class MateriaLegislativaViewSet(ListModelMixin,
RetrieveModelMixin,
GenericViewSet):
"""
Deprecated
TODO Migrar para customização na api automática
"""
permission_classes = (IsAuthenticated,)
serializer_class = MateriaLegislativaOldSerializer
queryset = MateriaLegislativa.objects.all()
filter_backends = (DjangoFilterBackend,)
filter_fields = ('numero', 'ano', 'tipo', )
class SessaoPlenariaViewSet(ListModelMixin,
RetrieveModelMixin,
GenericViewSet):
"""
Deprecated
TODO Migrar para customização na api automática
"""
permission_classes = (AllowAny,)
serializer_class = SessaoPlenariaOldSerializer
queryset = SessaoPlenaria.objects.all()
filter_backends = (DjangoFilterBackend,)
filter_fields = ('data_inicio', 'data_fim', 'interativa')
| gpl-3.0 | -3,141,493,697,441,615,400 | 32.598507 | 81 | 0.585136 | false |
geopython/GeoHealthCheck | GeoHealthCheck/plugins/check/checks.py | 1 | 7291 | import sys
from owslib.etree import etree
from GeoHealthCheck.util import CONFIG
from GeoHealthCheck.plugin import Plugin
from GeoHealthCheck.check import Check
from html import escape
""" Contains basic Check classes for a Probe object."""
class HttpStatusNoError(Check):
"""
Checks if HTTP status code is not in the 400- or 500-range.
"""
NAME = 'HTTP status should not be errored'
DESCRIPTION = 'Response should not contain a HTTP 400 or 500 range Error'
def __init__(self):
Check.__init__(self)
def perform(self):
"""Default check: Resource should at least give no error"""
status = self.probe.response.status_code
overall_status = status // 100
if overall_status in [4, 5]:
self.set_result(False, 'HTTP Error status=%d' % status)
class HttpHasHeaderValue(Check):
"""
Checks if header exists and has given header value.
See http://docs.python-requests.org/en/master/user/quickstart
"""
NAME = 'Has specific HTTP Header value'
DESCRIPTION = 'HTTP response has specific HTTP Header value'
PARAM_DEFS = {
'header_name': {
'type': 'string',
'description': 'The HTTP header name',
'default': None,
'required': True,
'range': None
},
'header_value': {
'type': 'string',
'description': 'The HTTP header value',
'default': None,
'required': True,
'range': None
}
}
"""Param defs"""
def __init__(self):
Check.__init__(self)
def perform(self):
result = True
msg = 'OK'
name = self.get_param('header_name')
value = self.get_param('header_value')
headers = self.probe.response.headers
if name not in headers:
result = False
msg = 'HTTP response has no header %s' % name
elif headers[name] != value:
result = False
msg = 'HTTP response header %s has no value %s' % (name, value)
self.set_result(result, msg)
class HttpHasContentType(HttpHasHeaderValue):
"""
Checks if HTTP response has content type.
"""
NAME = 'Has specific Content-Type'
DESCRIPTION = 'HTTP response has specific Content-Type'
PARAM_DEFS = Plugin.merge(HttpHasHeaderValue.PARAM_DEFS, {
'header_name': {
'value': 'content-type'
}
})
"""Params defs for header content type."""
def __init__(self):
HttpHasHeaderValue.__init__(self)
def perform(self):
HttpHasHeaderValue.perform(self)
class HttpHasImageContentType(Check):
"""
Checks if HTTP response has image content type.
"""
NAME = 'HTTP response is image'
DESCRIPTION = 'HTTP response has image/* Content-Type'
def __init__(self):
Check.__init__(self)
"""
Check if HTTP response has image/ ContentType header value
"""
def perform(self):
result = True
msg = 'OK'
name = 'content-type'
response = self.probe.response
headers = response.headers
if name not in headers:
result = False
msg = 'HTTP response has no header %s' % name
elif 'image/' not in headers[name]:
result = False
msg = 'HTTP response header %s is not image type' % name
if type(response.content) is str:
rsp_str = response.content
if len(rsp_str) > 256:
rsp_str = rsp_str[-256:]
msg += ' - error: ' + escape(rsp_str)
self.set_result(result, msg)
class XmlParse(Check):
"""
Checks if HTTP response is valid XML.
"""
NAME = 'Valid XML response'
DESCRIPTION = 'HTTP response contains valid XML'
def __init__(self):
Check.__init__(self)
def perform(self):
try:
etree.fromstring(
self.probe.response.content,
parser=etree.XMLParser(huge_tree=CONFIG['GHC_LARGE_XML']))
except Exception:
self.set_result(False, str(sys.exc_info()))
class JsonParse(Check):
"""
Checks if HTTP response is valid JSON.
"""
NAME = 'Valid JSON response'
DESCRIPTION = 'HTTP response contains valid JSON'
def __init__(self):
Check.__init__(self)
def perform(self):
import json
try:
json.loads(self.probe.response.content)
except Exception:
self.set_result(False, str(sys.exc_info()))
class ContainsStrings(Check):
"""
Checks if HTTP response contains given strings (keywords).
"""
NAME = 'Response contains strings'
DESCRIPTION = \
'HTTP response contains all (comma-separated) strings specified'
PARAM_DEFS = {
'strings': {
'type': 'stringlist',
'description':
'The string text(s) that should be contained \
in response (comma-separated)',
'default': None,
'required': True,
'range': None
}
}
"""Param defs"""
def __init__(self):
Check.__init__(self)
def perform(self):
result = True
msg = 'OK'
for text in self.get_param('strings'):
try:
result = text in self.probe.response.text
if result is False:
msg = '%s not in response text' % text
break
except Exception:
result = False
msg = str(sys.exc_info())
break
self.set_result(result, msg)
class NotContainsStrings(ContainsStrings):
"""
Checks if HTTP response NOT contains given strings (keywords).
"""
NAME = 'Response NOT contains strings'
DESCRIPTION = """
HTTP response does not contain any of the
(comma-separated) strings specified
"""
PARAM_DEFS = {
'strings': {
'type': 'stringlist',
'description':
"""The string text(s) that should NOT be
contained in response (comma-separated)""",
'default': None,
'required': True,
'range': None
}
}
"""Param defs"""
def __init__(self):
ContainsStrings.__init__(self)
def perform(self):
result = True
msg = 'OK'
for text in self.get_param('strings'):
try:
result = text not in self.probe.response.text
if result is False:
msg = '%s in response text' % text
break
except Exception:
result = False
msg = str(sys.exc_info())
break
self.set_result(result, msg)
class NotContainsOwsException(NotContainsStrings):
"""
Checks if HTTP response NOT contains given OWS Exceptions.
"""
NAME = 'Response NOT contains OWS Exception'
DESCRIPTION = 'HTTP response does not contain an OWS Exception'
PARAM_DEFS = Plugin.merge(ContainsStrings.PARAM_DEFS, {
'strings': {
'value': ['ExceptionReport>', 'ServiceException>']
}
})
"""Param defs"""
| mit | -7,148,500,976,442,429,000 | 25.904059 | 77 | 0.551913 | false |
sourcesimian/vpn-porthole | vpnporthole/session.py | 1 | 8579 | import os
from docker.client import from_env
from pkg_resources import resource_stream
from vpnporthole.ip import IPv4Subnet
from vpnporthole.system import TmpDir, SystemCalls
class Session(object):
__dnsmasq_port = 53
__ip = None
def __init__(self, settings):
self.__settings = settings
self.__sc = SystemCalls(self._name(), self.__settings)
self.__dc = from_env(environment=self.__sc.get_docker_env()).api
def _local_user(self):
return os.environ['USER']
def _name(self):
return "vpnp/%s_%s" % (self.__settings.profile_name, self.__settings.ctx.local.user.name,)
def build(self):
name = self._name()
with TmpDir() as tmp:
hook_dir = os.path.join(tmp.path, 'vpnp')
os.makedirs(hook_dir)
hook_files = self.__settings.run_hook_files()
hook_files['exec'] = resource_stream("vpnporthole", "resources/exec").read().decode('utf-8')
for hook, content in hook_files.items():
hook_file = os.path.join(hook_dir, hook)
with open(hook_file, 'wt') as fh:
fh.write(content)
os.utime(hook_file, (0, 0))
for filename, content in self.__settings.build_files().items():
user_file = os.path.join(tmp.path, filename)
with open(user_file, 'wt') as fh:
fh.write(content)
os.utime(user_file, (0, 0))
stream = self.__dc.build(tmp.path, tag=name)
import json
for buf in stream:
block = json.loads(buf.decode('utf-8'))
if 'stream' in block:
self.__sc.stdout.write(block['stream'])
if 'error' in block:
self.__sc.stdout.write(block['error'] + '\n')
exit(3)
# image = block['stream'].split()[2]
print("Name: %s" % name)
return True
def start(self):
if self.run():
return self.local_up()
return False
def run(self):
if self.status():
self.__sc.stderr.write("Already running\n")
return False
if not self._images():
self.build()
self.__ip = None
self.__sc.container_ip(None)
self._container_hook('start')
self._container()
if not self.__ip:
self.__sc.stderr.write("Failed to start\n")
return False
self._container_hook('up')
self.__sc.on_connect()
return True
def local_up(self):
self._container()
for subnet in self.__settings.subnets():
self.__sc.add_route(subnet)
for domain in self.__settings.domains():
self.__sc.add_domain(domain)
return True
def add_route(self, subnet):
subnet = IPv4Subnet(subnet)
self._container()
self.__sc.add_route(subnet)
return True
def del_route(self, subnet):
subnet = IPv4Subnet(subnet)
self._container()
for sn in self.__sc.list_routes():
if sn in subnet:
self.__sc.del_route(sn)
return True
def add_domain(self, domain):
self._container()
self.__sc.add_domain(domain)
return True
def del_domain(self, domain):
self._container()
domains = self.__sc.list_domains()
if domain in domains:
self.__sc.del_domain(domain)
return True
def status(self):
if self._container():
return True
return False
def stop(self):
self.local_down()
self._container_hook('stop')
self.__sc.container_ip(None)
running = [c['Id'] for c in self._containers() if c['State'] == 'running']
for id in running:
try:
self.__dc.stop(id)
except Exception as e:
self.__sc.stderr.write("Error stopping: %s\n%s" % (id, e))
not_running = [c['Id'] for c in self._containers() if c['State'] != 'running']
for id in not_running:
self.__dc.remove_container(id)
return True
def local_down(self):
self._container()
self.__sc.del_all_domains()
self.__sc.del_all_routes(self.__settings.subnets())
self.__sc.on_disconnect()
return True
def purge(self):
self.stop()
for image in self._images():
self.__dc.remove_image(image, force=True)
return True
def shell(self):
container = self._container()
if not container:
return False
self.__sc.docker_shell(container['Id'])
return True
def info(self):
for image in self._images():
print('Image: %s\t%s\t%.1f MB' % (image['RepoTags'][0],
image['Id'][7:19],
image['Size'] / 1024 / 1024,))
container = self._container()
if self.__ip is None:
return True
print('Container: %s\t%s\t%s' % (container['Image'],
container['State'],
container['Id'][7:19],))
if container:
print('IP: %s' % self.__ip)
subnets = self.__sc.list_routes()
for subnet in subnets:
print('Route: %s' % subnet)
domains = self.__sc.list_domains()
for domain in domains:
print('Domain: %s' % domain)
return True
def _images(self):
tag = self._name()
all_images = self.__dc.images()
filtered_images = []
for image in all_images:
tags = image['RepoTags']
if tags:
if any([True for t in tags if t.startswith(tag)]):
filtered_images.append(image)
return filtered_images
def _containers(self):
name = self._name()
all_containers = self.__dc.containers(all=True)
return [c for c in all_containers
if c['Image'] == name]
def _container(self):
running = [c for c in self._containers()
if c['State'] == 'running']
if not running:
self.__ip = None
return None
if len(running) > 1:
print('WARNING: there is more than one container: %s' % running)
container = running[0]
info = self.__dc.inspect_container(container)
if info:
self.__ip = info['NetworkSettings']['IPAddress']
else:
self.__ip = None
self.__sc.container_ip(self.__ip)
return container
def _container_hook(self, hook):
if hook == 'start':
args = ['/vpnp/start']
name = self._name()
pe = self.__sc.docker_run_expect(name, args)
try:
old_pwd = None
while True:
i = pe.expect(['Username:', 'Password:', 'Established', 'Login failed.'])
if i < 0:
pe.wait()
return pe.exitstatus
if i == 0:
pe.sendline(self.__settings.username())
if i == 1:
pwd = self.__settings.password()
if old_pwd == pwd: # Prevent lockout
self.__sc.stderr.write(" <password was same as previous attempt> \n")
pe.send(chr(3))
pe.wait()
return 3
old_pwd = pwd
pe.sendline('%s' % pwd)
if i == 2:
break
if i == 3:
pass
except (Exception, KeyboardInterrupt) as e:
pe.send(chr(3))
pe.wait()
self.__sc.stderr.write('%s\n' % e)
raise
return 0
else:
container = self._container()
if container:
return self.__sc.docker_exec(self.__dc, container['Id'], ['/vpnp/%s' % hook])
def health(self):
if self._container():
return self._container_hook('health')
return 127 # "command not found"
def refresh(self):
if self._container():
return self._container_hook('refresh')
return 127 # "command not found"
| mit | 8,515,758,771,679,748,000 | 30.774074 | 104 | 0.484555 | false |
bartoldeman/easybuild-framework | easybuild/toolchains/cgompi.py | 1 | 1572 | ##
# Copyright 2013-2018 Ghent University
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for cgompi compiler toolchain (includes Clang, GFortran and OpenMPI).
:author: Dmitri Gribenko (National Technical University of Ukraine "KPI")
"""
from easybuild.toolchains.clanggcc import ClangGcc
from easybuild.toolchains.mpi.openmpi import OpenMPI
class Cgompi(ClangGcc, OpenMPI):
"""Compiler toolchain with Clang, GFortran and OpenMPI."""
NAME = 'cgompi'
SUBTOOLCHAIN = ClangGcc.NAME
| gpl-2.0 | 8,352,324,462,698,308,000 | 37.341463 | 96 | 0.753817 | false |
atomai/lz-viewer | lz_assoc_viewer/minimum_solution.py | 1 | 5657 | import gzip
import Data_reader
##REQUIRES filename is a tabix file, names is the header of the file
##MODIFIES nothing
##EFFECTS finds the position of the minimum pvalue
def find_min_pvals(filename, filetype, num_minimums, region_buffer):
#create a file reader from the file
file_reader = Data_reader.Data_reader.factory(filename, filetype)
#skip the header
file_reader.skip_header()
#create the minimums dictionary
minimums = create_baseline_minimums(num_minimums)
#find the highest of the minimums
highest_min, highest_min_index = find_highest_min(minimums, num_minimums)
#loops through the lines in the file
line = file_reader.get_line()
while line != '#Genomic' or line != '':
if line == '' or line.split()[0] == '#Genomic':
break
#if the pvalue is not available
if file_reader.get_pval() == 'NA':
line = file_reader.get_line()
continue
#if the pvalue is equal to the highest minimum, we do not add it to dictionary
elif float(file_reader.get_pval()) >= highest_min:
line = file_reader.get_line()
continue
#lastly, we must check other attributes of this pval if we want to add it to the dictionary
else:
#determine if this pvalue shares a region with another minimum
shares_region, shared_index = index_of_shared_region(minimums, num_minimums, long(file_reader.get_pos()), region_buffer)
#if it does share a region:
if shares_region:
#determine which is smaller, and place the smaller minimum in the list
if float(file_reader.get_pval()) < minimums['value'][shared_index]:
minimums = replace_minimum(minimums, long(file_reader.get_pos()), float(file_reader.get_pval()), int(file_reader.get_chrom()), shared_index)
highest_min, highest_min_index = find_highest_min(minimums, num_minimums)
else:
line = file_reader.get_line()
continue
#if it does not share a region, place replace the previous highest minimum with the new minimum
else:
minimums = replace_minimum(minimums, long(file_reader.get_pos()), float(file_reader.get_pval()), int(file_reader.get_chrom()), highest_min_index)
highest_min, highest_min_index = find_highest_min(minimums, num_minimums)
line = file_reader.get_line()
minimums = sort_minimums(minimums, num_minimums)
return minimums
##REQUIRES minimums has at least two minimums
##MODIFIES minimums
##EFFECTS sorts (decreasing order) the dictionary of minimums based on pvalue
def sort_minimums(minimums, num_minimums):
new_minimums = create_baseline_minimums(num_minimums)
index = 0
for min in minimums['value']:
best = find_min_of_mins(minimums)
new_minimums['position'][index] = minimums['position'][best]
new_minimums['value'][index] = minimums['value'][best]
new_minimums['chromosome'][index] = minimums['chromosome'][best]
minimums['value'][best] = 1
index += 1
return new_minimums
##REQUIRES minimums has at least 1 minimum
##MODIFIES minimums
##EFFECTS returns an updated dictionary of minimums
def replace_minimum(minimums, position, pvalue, chromosome, index):
minimums['position'][index] = position
minimums['value'][index] = pvalue
minimums['chromosome'][index] = chromosome
return minimums
##REQUIRES minimums has at least 1 minimum
##MODIFIES nothing
##EFFECTS returns a bool and a index, denoting that the current position is within a certain buffer region of another minimum
def index_of_shared_region(minimums, num_minimums, position, region_buffer):
for x in range(0, num_minimums):
position_diff = abs( position - minimums['position'][x] )
if position_diff < region_buffer:
return True, x
return False, -1
##REQUIRES minimums has a least one 'minimum' in it
##MODIFIES
##EFFECTS returns the highest minimum and the index it is stored at
def find_highest_min(minimums, num_minimums):
current_max = 0
for x in range(0, num_minimums):
if minimums['value'][x] > current_max:
current_max = minimums['value'][x]
current_position = x
return current_max, current_position
##REQUIRES num_minimums is > 0
##MODIFIES nothing
##EFFECTS creates a minimums dictionary, including position, value and chromosome
def create_baseline_minimums(num_minimums):
minimums = {'position' : [], 'value' : [], 'chromosome' : [] }
for x in range( 0 , num_minimums ):
minimums['position'].append(-1000000)
minimums['value'].append(1)
minimums['chromosome'].append(0)
return minimums
##REQUIRES minimums is a dictionary of minimums
##MODIFIES nothing
##EFFECTS finds the index of the minimum of the minimums
def find_min_of_mins(minimums):
current_min = 1
counter = 0
for min in minimums['value']:
if current_min > min:
current_min = min
current_position = counter
counter += 1
return current_position
##REQUIRES: minimums is a dictionary of minimums
##MODIFIES nothing
##EFFECTS creats a top hits list
def create_hits(minimums):
hits = []
##create the hits list for flask
for x in range(0, 10):
chr = minimums['chromosome'][x]
chr = str(chr)
pos = minimums['position'][x]
pos = str(pos)
hits.append([chr + ":" + pos, chr + ":" + pos])
return hits
##REQUIRES
##MODIFIES
##EFFECTS
def get_basic_region(filename, filetype):
#create a file reader from the file
file_reader = Data_reader.Data_reader.factory(filename, filetype)
#skip the header
file_reader.skip_header()
#get a line
line = file_reader.get_line()
chrom = file_reader.get_chrom()
position = file_reader.get_pos()
return str(chrom) + ":" + str(position) + "-" + str(int(position) + 200000) | mit | 4,380,884,550,379,712,500 | 31.886228 | 149 | 0.696836 | false |
Onirik79/aaritmud | src/commands/command_list.py | 1 | 9334 | # -*- coding: utf-8 -*-
"""
Modulo del comando list, che serve a visualizzare la lista delle entità
comprabili di un negoziante.
"""
#= IMPORT ======================================================================
from src.color import remove_colors
from src.enums import SHOP, TO
from src.database import database
from src.find_entity import INSTANCE
from src.gamescript import check_trigger
from src.interpret import translate_input
from src.log import log
from src.utility import one_argument
from src.web_resource import create_tooltip, create_icon
from src.entitypes.money import pretty_money_icons, pretty_money_value
#= FUNZIONI ====================================================================
def command_list(entity, argument="", behavioured=False):
"""
Permette di comprare entità da un commerciante.
"""
# Può essere normale se il comando è stato deferrato
if not entity:
return False
entity = entity.split_entity(1)
if argument:
dealer = entity.find_entity_extensively(argument)
if not dealer:
entity.act("Non trovi nessun negoziante chiamato [white]%s[close]." % argument, TO.ENTITY)
entity.act("$n sembra cercare qualcuno un negoziante.", TO.OTHERS)
return False
if not dealer.shop:
entity.act("$N non sembra essere un negoziante.", TO.ENTITY, dealer)
entity.act("$n crede erroneamente che $N sia un negoziante.", TO.OTHERS, dealer)
entity.act("$n crede erroneamente che tu sia un negoziante.", TO.TARGET, dealer)
return False
# Altrimenti cerca il primo negoziante che si trova nella locazione del giocatore
else:
for dealer in entity.location.iter_contains():
if dealer.shop:
break
else:
entity.act("Qui non trovi nessun [white]negoziante[close].", TO.ENTITY)
entity.act("$n non sembra trovare nessun negoziante qui intorno.", TO.OTHERS)
return False
in_location = dealer.shop.in_location(dealer)
if not in_location and SHOP.DISPENSER not in dealer.shop.types:
entity.act("$N non ti mostra la merce perché non si trova nel suo negozio.", TO.ENTITY, dealer)
entity.act("$N non mostra la merce a $n perché non si trova nel suo negozio.", TO.OTHERS, dealer)
entity.act("Non mostri la tua merce a $n perché non ti trovi nel tuo negozio.", TO.TARGET, dealer)
return False
# Indica che un'entità vuole interagire con il dealer
if entity not in dealer.interactions:
dealer.interactions.append(entity)
storage = dealer.shop.get_storage(dealer)
if not storage:
if dealer.shop.proto_storages and dealer.shop.proto_storages[0].IS_MOB:
from_where = "da chi"
else:
from_where = "da dove"
entity.act("Non puoi avere la lista da $N perché non ha %s prendere la mercanzia!" % from_where, TO.ENTITY, dealer)
entity.act("$n non può avere la lista da $N perché non ha %s prendere la mercanzia!" % from_where, TO.OTHERS, dealer)
entity.act("$n non può avere la lista perché non hai %s prendere la mercanzia!" % from_where, TO.TARGET, dealer)
return False
if not dealer.shop.buyables:
entity.send_output("%s non possiede nessuna mercanzia" % dealer.get_name(looker=entity))
log.bug("Non è stato trovato nessun buyable impostato per %s" % dealer.code)
return False
# Controlla se il magazzino contiene almeno un oggetto comprabile dall'utente
if dealer.shop.storage_is_empty(storage):
entity.act("Ti accorgi che il negozio non possiede mercanzia, meglio tornare più tardi, dopo il rifornimento.", TO.ENTITY, dealer)
entity.act("$n si accorge che il negozio non possiede mercanzia.", TO.OTHERS, dealer)
entity.act("$n si accorge che il tuo negozio non possiede mercanzia.", TO.TARGET, dealer)
return False
force_return = check_trigger(entity, "before_list", entity, dealer, behavioured)
if force_return:
return True
force_return = check_trigger(dealer, "before_listed", entity, dealer, behavioured)
if force_return:
return True
if SHOP.DISPENSER in dealer.shop.types:
if not in_location:
entity.act("Leggi su di una targetta la lista delle mercanzie di $N anche se non si trova nel suo negozio.", TO.OTHERS, dealer)
entity.act("$n legge su di una targetta la lista delle mercanzie di $N anche se non si trova nel suo negozio.", TO.OTHERS, dealer)
entity.act("$n legge la tua targetta con la lista delle mercanzie anche se non si trova nel suo negozio.", TO.TARGET, dealer)
else:
entity.act("Leggi su di una targetta la lista delle mercanzie di $N.", TO.OTHERS, dealer)
entity.act("$n legge su di una targetta la lista delle mercanzie di $N.", TO.OTHERS, dealer)
entity.act("$n legge la tua targetta con la lista delle mercanzie.", TO.TARGET, dealer)
else:
entity.act("Chiedi la lista delle mercanzie di $N.", TO.OTHERS, dealer)
entity.act("$n chiede la lista delle mercanzie di $N.", TO.OTHERS, dealer)
entity.act("$n ti chiede la lista delle mercanzie.", TO.TARGET, dealer)
discount_exist = False
for buyable in dealer.shop.buyables:
if buyable.has_discount():
discount_exist = True
buy_translation = translate_input(entity, "buy", "en")
rows = []
rows.append('''<table class="mud">''')
discount_cell = ""
if SHOP.DISPENSER in dealer.shop.types:
name_cell = "Prodotti"
else:
name_cell = "Mercanzia"
if discount_exist:
discount_cell = '''<th>Sconto</th>'''
rows.append('''<tr><th></th><th>%s</th><th colspan="4">Prezzo</th><th>Livello</th><th></th><th></th>%s</tr>''' % (
name_cell, discount_cell))
for en in storage.get_list_of_entities(entity):
en = en[INSTANCE]
for buyable in dealer.shop.buyables:
if en.prototype != buyable.proto_entity:
continue
# Purtroppo però il sistema di mucchio visivo non permetterà di
# visualizzare quantità superiori ad 1 per oggetti di long uguali
# tra loro, la quantità si deve per forza basare sul mucchio fisico
quantity = 10
if buyable.has_discount():
quantity = buyable.discount_quantity
if en.quantity < quantity:
quantity = en.quantity
name = en.get_name(looker=entity)
single_price, dummy_discount = buyable.get_price(en, quantity=1)
block_price, dummy_discount = buyable.get_price(en, quantity=quantity)
mithril, gold, silver, copper = pretty_money_icons(single_price)
rows.append('''<tr><td>%s</td>''' % create_icon(en.get_icon(), add_span=False))
rows.append('''<td>%s </td>''' % create_tooltip(entity.get_conn(), en.get_descr(looker=entity), name))
rows.append('''<td align="right">%s</td>''' % mithril)
rows.append('''<td align="right">%s</td>''' % gold)
rows.append('''<td align="right">%s</td>''' % silver)
rows.append('''<td align="right">%s</td>''' % copper)
rows.append('''<td align="center">%d</td>''' % en.level)
rows.append('''<td><input type="submit" value="%s" onclick="sendInput('%s 1 %s')" title="Comprerai %s per un prezzo di %s"/></td>''' % (
buy_translation.capitalize(),
buy_translation,
en.get_numbered_keyword(looker=entity),
remove_colors(name),
remove_colors(pretty_money_value(single_price))))
rows.append('''<td><input type="submit" value="%s x %d" onclick="sendInput('%s %d %s')" title="Comprerai %d unità di %s per un prezzo di %s"/></td>''' % (
buy_translation.capitalize(),
quantity,
buy_translation,
quantity,
en.get_numbered_keyword(looker=entity),
quantity,
remove_colors(name),
remove_colors(pretty_money_value(block_price))))
if discount_exist:
if buyable.has_discount():
rows.append('''<td align="center">%d%% per quantità maggiori di %d</td>''' % (buyable.discount_percent, buyable.discount_quantity))
else:
rows.append('''<td align="center">Nessuno</td>''')
rows.append('''</tr>''')
rows.append('''</table>''')
entity.send_output("".join(rows), break_line=False)
force_return = check_trigger(entity, "after_list", entity, dealer, behavioured)
if force_return:
return True
force_return = check_trigger(dealer, "after_listed", entity, dealer, behavioured)
if force_return:
return True
return True
#- Fine Funzione -
def get_syntax_template(entity):
if not entity:
log.bug("entity non è un parametro valido: %r" % entity)
return ""
# -------------------------------------------------------------------------
syntax = "list\n"
syntax += "list <commerciante se più di uno nella stanza>\n"
return syntax
#- Fine Funzione -
| gpl-2.0 | -7,623,521,375,441,563,000 | 45.67 | 166 | 0.608099 | false |
tensorflow/tensor2tensor | tensor2tensor/data_generators/timeseries_data_generator.py | 1 | 2451 | # coding=utf-8
# Copyright 2021 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data generator for the timeseries problem."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
def generate_data(timeseries_length, timeseries_params):
"""Generates synthetic timeseries using input parameters.
Each generated timeseries has timeseries_length data points.
Parameters for each timeseries are specified by timeseries_params.
Args:
timeseries_length: Number of data points to generate for each timeseries.
timeseries_params: Parameters used to generate the timeseries. The following
parameters need to be specified for each timeseries:
m = Slope of the timeseries used to compute the timeseries trend.
b = y-intercept of the timeseries used to compute the timeseries trend.
A = Timeseries amplitude used to compute timeseries period.
freqcoeff = Frequency coefficient used to compute timeseries period.
rndA = Random amplitude used to inject noise into the timeseries.
fn = Base timeseries function (np.cos or np.sin).
Example params for two timeseries.
[{"m": 0.006, "b": 300.0, "A":50.0, "freqcoeff":1500.0, "rndA":15.0,
"fn": np.sin},
{"m": 0.000, "b": 500.0, "A":35.0, "freqcoeff":3500.0, "rndA":25.0,
"fn": np.cos}]
Returns:
Multi-timeseries (list of list).
"""
x = range(timeseries_length)
multi_timeseries = []
for p in timeseries_params:
# Trend
y1 = [p["m"] * i + p["b"] for i in x]
# Period
y2 = [p["A"] * p["fn"](i / p["freqcoeff"]) for i in x]
# Noise
y3 = np.random.normal(0, p["rndA"], timeseries_length).tolist()
# Sum of Trend, Period and Noise. Replace negative values with zero.
y = [max(a + b + c, 0) for a, b, c in zip(y1, y2, y3)]
multi_timeseries.append(y)
return multi_timeseries
| apache-2.0 | 255,356,974,705,447,200 | 37.904762 | 80 | 0.694818 | false |
calmjs/calmjs.parse | src/calmjs/parse/tests/test_utils.py | 1 | 4040 | # -*- coding: utf-8 -*-
import unittest
import tempfile
from os.path import join
from os.path import pardir
from os.path import sep
from collections import namedtuple
from calmjs.parse import utils
class UtilsTestCase(unittest.TestCase):
def setUp(self):
self.old_dist = utils.ply_dist
self.py_major = utils.py_major
def tearDown(self):
utils.ply_dist = self.old_dist
utils.py_major = self.py_major
def test_name_something(self):
# a quick and dirty
utils.ply_dist = namedtuple('Distribution', ['version'])('3.00')
utils.py_major = 2
lextab, yacctab = utils.generate_tab_names('some.package')
self.assertEqual(lextab, 'some.lextab_package_py2_ply3_00')
self.assertEqual(yacctab, 'some.yacctab_package_py2_ply3_00')
def test_name_unknown(self):
utils.ply_dist = None
utils.py_major = 3
lextab, yacctab = utils.generate_tab_names('some.package')
self.assertEqual(lextab, 'some.lextab_package_py3_plyunknown')
self.assertEqual(yacctab, 'some.yacctab_package_py3_plyunknown')
def test_repr_compat(self):
class fake_unicode(object):
def __repr__(self):
return "u'fake'"
previous = utils.unicode
self.addCleanup(setattr, utils, 'unicode', previous)
utils.unicode = fake_unicode
self.assertEqual("'fake'", utils.repr_compat(fake_unicode()))
utils.unicode = None
self.assertEqual("u'fake'", utils.repr_compat(fake_unicode()))
class FileNormTestCase(unittest.TestCase):
def test_find_common_same_base_same_level(self):
base = tempfile.mktemp()
source = join(base, 'src', 'file.js')
source_alt = join(base, 'src', 'alt', 'file.js')
source_min = join(base, 'src', 'file.min.js')
source_map = join(base, 'src', 'file.min.js.map')
# for generation of sourceMappingURL comment in source_min
self.assertEqual(
'file.min.js.map', utils.normrelpath(source_min, source_map))
# for pointing from source_map.source to the source
self.assertEqual(
'file.js', utils.normrelpath(source_map, source))
# for pointing from source_map.source to the source_min
self.assertEqual(
'file.min.js', utils.normrelpath(source_map, source_min))
self.assertEqual(
join('alt', 'file.js'), utils.normrelpath(source_map, source_alt))
def test_find_common_same_base_parents_common(self):
base = tempfile.mktemp()
source = join(base, 'src', 'file.js')
source_min = join(base, 'build', 'file.min.js')
source_map = join(base, 'build', 'file.min.js.map')
# mapping from source_map to source
self.assertEqual([pardir, 'src', 'file.js'], utils.normrelpath(
source_map, source).split(sep))
# for pointing from source_map.source to the source_min
self.assertEqual('file.min.js', utils.normrelpath(
source_map, source_min))
def test_find_double_parent(self):
base = tempfile.mktemp()
root = join(base, 'file.js')
nested = join(base, 'src', 'dir', 'blahfile.js')
self.assertEqual([pardir, pardir, 'file.js'], utils.normrelpath(
nested, root).split(sep))
self.assertEqual(['src', 'dir', 'blahfile.js'], utils.normrelpath(
root, nested).split(sep))
def test_find_same_prefix(self):
base = tempfile.mktemp()
src = join(base, 'basesrc', 'source.js')
tgt = join(base, 'basetgt', 'target.js')
self.assertEqual([pardir, 'basetgt', 'target.js'], utils.normrelpath(
src, tgt).split(sep))
def test_relative_dirs_ignored(self):
base = tempfile.mktemp()
absolute = join(base, 'file.js')
relative = join('somedir', 'file.js')
self.assertEqual(relative, utils.normrelpath(absolute, relative))
self.assertEqual(absolute, utils.normrelpath(relative, absolute))
| mit | 747,644,717,460,858,100 | 36.06422 | 78 | 0.621782 | false |
justmao945/lab | halftoning/error-diffusion/ed.py | 1 | 1454 | #!/usr/bin/env python2
# -*- coding:utf-8 -*-
'''
Here We use the Floyd Steinberg transforming matrix:
| * 7 |
1/16 | |
| 3 5 1 |
to produce the error diffusion. Where the star (*) indicates current pixel
and other elements represent the weight if pixels. See error_diffusion
function for more infomation.
References:
<1> http://en.wikipedia.org/wiki/Error_diffusion
<2> Purdue University:Digital Image Processing Laboratory Image Halftoning.
'''
import Image
import sys
def error_diffusion( pixel, size=(1,1) ):
""" Diffuse on a single channel, using Floyd-Steinberg kerenl.
@param pixel PIL PixelAccess object.
@param size A tuple to represent the size of pixel.
"""
for y in xrange(0, size[1]-1):
for x in xrange(1, size[0]-1):
oldpixel = pixel[x, y]
pixel[x, y] = 255 if oldpixel > 127 else 0
quant_error = oldpixel - pixel[x, y]
pixel[x+1, y ] = pixel[x+1, y ] + 7/16.0 * quant_error
pixel[x-1, y+1] = pixel[x-1, y+1] + 3/16.0 * quant_error
pixel[x, y+1] = pixel[x, y+1] + 5/16.0 * quant_error
pixel[x+1, y+1] = pixel[x+1, y+1] + 1/16.0 * quant_error
if __name__ == "__main__":
if len(sys.argv) != 2:
print "Usage: %s <image file>" % sys.argv[0]
exit(-1)
im = Image.open(sys.argv[1]).convert('L')
error_diffusion( im.load(), im.size )
#im.show()
im.save( "ed-%s.png" % sys.argv[1].split('.')[0] )
| mit | 7,517,468,272,118,521,000 | 28.08 | 75 | 0.596974 | false |
sigurdga/maps | maps_lib/Window.py | 1 | 5461 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2012 Sigurd Gartmann [email protected]
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
### DO NOT EDIT THIS FILE ###
from gi.repository import Gio, Gtk # pylint: disable=E0611
import logging
logger = logging.getLogger('maps_lib')
from . helpers import get_builder, show_uri, get_help_uri
# This class is meant to be subclassed by MapsWindow. It provides
# common functions and some boilerplate.
class Window(Gtk.Window):
__gtype_name__ = "Window"
# To construct a new instance of this method, the following notable
# methods are called in this order:
# __new__(cls)
# __init__(self)
# finish_initializing(self, builder)
# __init__(self)
#
# For this reason, it's recommended you leave __init__ empty and put
# your initialization code in finish_initializing
def __new__(cls):
"""Special static method that's automatically called by Python when
constructing a new instance of this class.
Returns a fully instantiated BaseMapsWindow object.
"""
builder = get_builder('MapsWindow')
new_object = builder.get_object("maps_window")
new_object.finish_initializing(builder)
return new_object
def finish_initializing(self, builder):
"""Called while initializing this instance in __new__
finish_initializing should be called after parsing the UI definition
and creating a MapsWindow object with it in order to finish
initializing the start of the new MapsWindow instance.
"""
# Get a reference to the builder and set up the signals.
self.builder = builder
self.ui = builder.get_ui(self, True)
self.PreferencesDialog = None # class
self.preferences_dialog = None # instance
self.AboutDialog = None # class
self.settings = Gio.Settings("net.launchpad.maps")
self.settings.connect('changed', self.on_preferences_changed)
# Optional application indicator support
# Run 'quickly add indicator' to get started.
# More information:
# http://owaislone.org/quickly-add-indicator/
# https://wiki.ubuntu.com/DesktopExperienceTeam/ApplicationIndicators
try:
from maps import indicator
# self is passed so methods of this class can be called from indicator.py
# Comment this next line out to disable appindicator
self.indicator = indicator.new_application_indicator(self)
except ImportError:
pass
def on_mnu_contents_activate(self, widget, data=None):
show_uri(self, "ghelp:%s" % get_help_uri())
def on_mnu_about_activate(self, widget, data=None):
"""Display the about box for maps."""
if self.AboutDialog is not None:
about = self.AboutDialog() # pylint: disable=E1102
response = about.run()
about.destroy()
def on_mnu_preferences_activate(self, widget, data=None):
"""Display the preferences window for maps."""
""" From the PyGTK Reference manual
Say for example the preferences dialog is currently open,
and the user chooses Preferences from the menu a second time;
use the present() method to move the already-open dialog
where the user can see it."""
if self.preferences_dialog is not None:
logger.debug('show existing preferences_dialog')
self.preferences_dialog.present()
elif self.PreferencesDialog is not None:
logger.debug('create new preferences_dialog')
self.preferences_dialog = self.PreferencesDialog() # pylint: disable=E1102
self.preferences_dialog.connect('destroy', self.on_preferences_dialog_destroyed)
self.preferences_dialog.show()
# destroy command moved into dialog to allow for a help button
def on_mnu_close_activate(self, widget, data=None):
"""Signal handler for closing the MapsWindow."""
self.destroy()
def on_destroy(self, widget, data=None):
"""Called when the MapsWindow is closed."""
# Clean up code for saving application state should be added here.
Gtk.main_quit()
def on_preferences_changed(self, settings, key, data=None):
logger.debug('preference changed: %s = %s' % (key, str(settings.get_value(key))))
def on_preferences_dialog_destroyed(self, widget, data=None):
'''only affects gui
logically there is no difference between the user closing,
minimising or ignoring the preferences dialog'''
logger.debug('on_preferences_dialog_destroyed')
# to determine whether to create or present preferences_dialog
self.preferences_dialog = None
| gpl-3.0 | -782,138,757,194,549,000 | 41.333333 | 92 | 0.666545 | false |
cfe-lab/Kive | kive/portal/management/commands/graph_kive.py | 1 | 1760 | import itertools
import os
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Generates class diagrams.'
def handle(self, *args, **options):
if 'django_extensions' not in settings.INSTALLED_APPS:
exit('django_extensions not found, try using --setting kive.UML_settings')
docs_path = os.path.join(os.path.pardir, 'docs', 'models')
apps = [app for app in settings.INSTALLED_APPS
if not (app.startswith('django') or
app in ('portal.apps.PortalConfig', 'rest_framework'))]
apps.sort()
for app in apps:
print(app)
exclude_models = ['User', 'Group']
if app != 'metadata':
exclude_models.append('AccessControl')
call_command("graph_models",
app,
pygraphviz=True,
group_models=True,
outputfile=os.path.join(docs_path, app+'.png'),
exclude_models=','.join(exclude_models))
readme_path = os.path.join(docs_path, 'README.md')
with open(readme_path, 'rU+') as f:
models_section = '### Models ###\n'
header = itertools.takewhile(lambda line: line != models_section,
f.readlines())
f.seek(0)
for line in header:
f.write(line)
f.write(models_section)
for app in apps:
f.write('#### {} ####\n'.format(app))
f.write('\n\n'.format(app, app))
f.truncate()
| bsd-3-clause | -1,346,922,852,982,657,500 | 38.111111 | 86 | 0.525 | false |
lcpt/xc | python_modules/rough_calculations/ng_rc_section.py | 1 | 3350 | # -*- coding: utf-8 -*-
__author__= "Luis C. Pérez Tato (LCPT)"
__copyright__= "Copyright 2016, LCPT"
__license__= "GPL"
__version__= "3.0"
__email__= "[email protected]"
from rough_calculations import ng_rebar_def
from materials.sia262 import SIA262_materials
from postprocess.reports import common_formats as fmt
from miscUtils import LogMessages as lmsg
class RCSection(object):
tensionRebars= None
concrete= SIA262_materials.c25_30
b= 0.25
h= 0.25
def __init__(self,tensionRebars,concrete,b,h):
self.tensionRebars= tensionRebars
self.concrete= concrete
self.b= b
self.h= h
self.stressLimitUnderPermanentLoads= 230e6
def setArmature(self,tensionRebars):
self.tensionRebars= tensionRebars
def getMinReinfAreaUnderFlexion(self):
return self.tensionRebars.getMinReinfAreaUnderFlexion(self.concrete,self.h)
def getMinReinfAreaUnderTension(self):
return self.tensionRebars.getMinReinfAreaUnderTension(self.concrete,self.h)
def getMR(self):
return self.tensionRebars.getMR(self.concrete,self.b,self.h)
def getVR(self,Nd,Md):
return self.tensionRebars.getVR(self.concrete,Nd,Md,self.b,self.h)
def writeResultFlexion(self,outputFile,Nd,Md,Vd):
famArm= self.tensionRebars
concrete= self.concrete
AsMin= self.getMinReinfAreaUnderFlexion()
ancrage= famArm.getBasicAnchorageLength(concrete)
outputFile.write(" Dimensions coupe; b= "+ fmt.Longs.format(self.b)+ "m, h= "+ fmt.Longs.format(self.h)+ "m\\\\\n")
ng_rebar_def.writeRebars(outputFile,concrete,famArm,AsMin)
if(abs(Md)>0):
MR= self.getMR()
outputFile.write(" Verif. en flexion: Md= "+ fmt.Esf.format(Md/1e3)+ " kN m, MR= "+ fmt.Esf.format(MR/1e3)+ "kN m")
ng_rebar_def.writeF(outputFile," F(M)", MR/Md)
if(abs(Vd)>0):
VR= self.getVR(Nd,Md)
outputFile.write(" Vérif. eff. tranchant: Vd= "+ fmt.Esf.format(Vd/1e3)+ "kN, VR= "+ fmt.Esf.format(VR/1e3)+ "kN")
ng_rebar_def.writeF(outputFile," F(V)",VR/Vd)
def writeResultTraction(self,outputFile,Nd):
famArm= self.tensionRebars
concrete= self.concrete
AsMin= self.getMinReinfAreaUnderTension()/2
ancrage= famArm.getBasicAnchorageLength(concrete)
ng_rebar_def.writeRebars(outputFile,concrete,famArm,AsMin)
if(abs(Nd)>0):
lmsg.error("ERROR; tension not implemented.")
def writeResultCompression(self,outputFile,Nd,AsTrsv):
''' Results for compressed rebars.
:param AsTrsv: Rebar area in transverse direction.
'''
famArm= self.tensionRebars #Even if they're not in tension...
concrete= self.concrete
AsMin= 0.2*AsTrsv # 20% of the transversal area.
ng_rebar_def.writeRebars(outputFile,concrete,famArm,AsMin)
if(abs(Nd)!=0.0):
lmsg.error("ERROR; not implemented.")
def writeResultStress(self,outputFile,M):
'''Cheking of stresses under permanent loads (SIA 262 fig. 31)'''
concrete= self.concrete
if(abs(M)>0):
stress= M/(0.9*self.h*self.tensionRebars.getAs())
outputFile.write(" Verif. contraintes: M= "+ fmt.Esf.format(M/1e3)+ " kN m, $\sigma_s$= "+ fmt.Esf.format(stress/1e6)+ " MPa\\\\\n")
outputFile.write(" $\sigma_{lim}$= "+ fmt.Esf.format(self.stressLimitUnderPermanentLoads/1e6)+ " MPa")
ng_rebar_def.writeF(outputFile," F($\sigma_s$)", self.stressLimitUnderPermanentLoads/stress)
| gpl-3.0 | -4,807,855,012,892,411,000 | 41.923077 | 139 | 0.698029 | false |
akrzos/cfme_tests | cfme/web_ui/__init__.py | 1 | 121196 | """Provides a number of objects to help with managing certain elements in the CFME UI.
Specifically there are two categories of objects, organizational and elemental.
* **Organizational**
* :py:class:`Region`
* :py:mod:`cfme.web_ui.menu`
* **Elemental**
* :py:class:`AngularCalendarInput`
* :py:class:`AngularSelect`
* :py:class:`ButtonGroup`
* :py:class:`Calendar`
* :py:class:`ColorGroup`
* :py:class:`CheckboxTable`
* :py:class:`CheckboxSelect`
* :py:class:`DHTMLSelect`
* :py:class:`DriftGrid`
* :py:class:`DynamicTable`
* :py:class:`EmailSelectForm`
* :py:class:`Filter`
* :py:class:`Form`
* :py:class:`InfoBlock`
* :py:class:`Input`
* :py:class:`MultiFill`
* :py:class:`Quadicon`
* :py:class:`Radio`
* :py:class:`ScriptBox`
* :py:class:`Select`
* :py:class:`ShowingInputs`
* :py:class:`SplitCheckboxTable`
* :py:class:`SplitTable`
* :py:class:`Table`
* :py:class:`Tree`
* :py:mod:`cfme.web_ui.accordion`
* :py:mod:`cfme.web_ui.cfme_exception`
* :py:mod:`cfme.web_ui.expression_editor`
* :py:mod:`cfme.web_ui.flash`
* :py:mod:`cfme.web_ui.form_buttons`
* :py:mod:`cfme.web_ui.jstimelines`
* :py:mod:`cfme.web_ui.listaccordion`
* :py:mod:`cfme.web_ui.menu`
* :py:mod:`cfme.web_ui.mixins`
* :py:mod:`cfme.web_ui.paginator`
* :py:mod:`cfme.web_ui.search`
* :py:mod:`cfme.web_ui.tabstrip`
* :py:mod:`cfme.web_ui.toolbar`
"""
import atexit
import os
import re
import types
from datetime import date
from collections import Sequence, Mapping, Callable
from tempfile import NamedTemporaryFile
from xml.sax.saxutils import quoteattr
from cached_property import cached_property
from selenium.common import exceptions as sel_exceptions
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.remote.file_detector import LocalFileDetector
from multimethods import multimethod, multidispatch, Anything
import cfme.fixtures.pytest_selenium as sel
from cfme import exceptions, js
from cfme.fixtures.pytest_selenium import browser
from utils import attributize_string, castmap, version
# For backward compatibility with code that pulls in Select from web_ui instead of sel
from cfme.fixtures.pytest_selenium import Select
from utils.log import logger
from utils.pretty import Pretty
class Selector(object):
"""
Special Selector object allowing object resolution on attr access
The Selector is a simple class which allows a 'super' widget to support multiple
implementations. This is achieved by the use of a ``decide`` method which accesses
attrs of the object set by the ``__init__`` of the child class. These attributes
are then used to decide which type of object is on a page. In some cases, this can
avoid a version pick if the information used to instantiate both old and new implementations
can be identical. This is most noteably if using an "id" which remains constant from
implementation to implementation.
As an example, imagine the normal "checkbox" is replaced wit ha fancy new web 2.0
checkbox. Both have an "input" element, and give it the same "id". When the decide method is
invoked, the "id" is inspected and used to determine if it is an old or a new style widget.
We then set a hidden attribute of the super widget and proxy all further attr requests to
that object.
This means that in order for things to behave as expect ALL implementations must also expose
the same "public" API.
"""
def __init__(self):
self._obj = None
def __getattr__(self, name):
if not self._obj:
self._obj = self.decide()
return getattr(self._obj, name)
def decide(self):
raise Exception('This widget does not have a "decide" method which is mandatory')
class Region(Pretty):
"""
Base class for all UI regions/pages
Args:
locators: A dict of locator objects for the given region
title: A string containing the title of the page,
or a versioned dict of page title strings
identifying_loc: Single locator key from locators used by :py:meth:`Region.is_displayed`
to check if the region is currently visible
Usage:
page = Region(locators={
'configuration_button': (By.CSS_SELECTOR, "div.dhx_toolbar_btn[title='Configuration']"),
'discover_button': (By.CSS_SELECTOR,
"tr[title='Discover Cloud Providers']>td.td_btn_txt>" "div.btn_sel_text")
},
title='Cloud Providers',
identifying_loc='discover_button'
)
The elements can then accessed like so::
page.configuration_button
Locator attributes will return the locator tuple for that particular element,
and can be passed on to other functions, such as :py:func:`element` and :py:func:`click`.
Note:
When specifying a region title, omit the "Cloudforms Management Engine: " or "ManageIQ: "
prefix. They're included on every page, and different for the two versions of the appliance,
and :py:meth:`is_displayed` strips them off before checking for equality.
"""
pretty_attrs = ['title']
def __getattr__(self, name):
if hasattr(self, 'locators') and name in self.locators:
locator = self.locators[name]
if isinstance(locator, dict):
return version.pick(locator)
else:
return locator
else:
raise AttributeError("Region has no attribute named " + name)
def __init__(self, locators=None, title=None, identifying_loc=None, **kwargs):
self.locators = locators
self.identifying_loc = identifying_loc
self._title = title
self.infoblock = InfoBlock # Legacy support
@property
def title(self):
# support title being a versioned dict
if isinstance(self._title, dict):
self._title = version.pick(self._title)
return self._title
def is_displayed(self):
"""
Checks to see if the region is currently displayed.
Returns: A boolean describing if the region is currently displayed
"""
if not self.identifying_loc and not self.title:
logger.warning("Region doesn't have an identifying locator or title, "
"can't determine if current page.")
return True
# All page titles have a prefix; strip it off
try:
browser_title = browser().title.split(': ', 1)[1]
except IndexError:
browser_title = None
if self.identifying_loc and sel.is_displayed(
self.locators[self.identifying_loc], _no_deeper=True):
ident_match = True
else:
if not self.title:
logger.info('Identifying locator for region not found')
else:
logger.info('Identifying locator for region %s not found', self.title)
ident_match = False
if self.title is None:
# If we don't have a title we can't match it, and some Regions are multi-page
# so we can't have a title set.
title_match = True
elif self.title and browser_title == self.title:
title_match = True
else:
logger.info("Title %s doesn't match expected title %s", browser_title, self.title)
title_match = False
return title_match and ident_match
def get_context_current_page():
"""
Returns the current page name
Returns: A string containing the current page name
"""
url = browser().current_url()
stripped = url.lstrip('https://')
return stripped[stripped.find('/'):stripped.rfind('?')]
def _convert_header(header):
"""Convers header cell text into something usable as an identifier.
Static method which replaces spaces in headers with underscores and strips out
all other characters to give an identifier.
Args:
header: A header name to be converted.
Returns: A string holding the converted header.
"""
return re.sub('[^0-9a-zA-Z_]+', '', header.replace(' ', '_')).lower()
class CachedTableHeaders(object):
"""the internal cache of headers
This allows columns to be moved and the Table updated. The :py:attr:`headers` stores
the header cache element and the list of headers are stored in _headers. The
attribute header_indexes is then created, before finally creating the items
attribute.
"""
def __init__(self, table):
self.headers = sel.elements('td | th', root=table.header_row)
self.indexes = {
_convert_header(cell.text): index
for index, cell in enumerate(self.headers)}
class Table(Pretty):
"""
Helper class for Table/List objects
Turns CFME custom Table/Lists into iterable objects using a generator.
Args:
table_locator: locator pointing to a table element with child thead and tbody elements
representing that table's header and body row containers
header_offset: In the case of a padding table row above the header, the row offset
can be used to skip rows in ``<thead>`` to locate the correct header row. This offset
is 1-indexed, not 0-indexed, so an offset of 1 is the first child row element
body_offset: In the case of a padding table row above the body rows, the row offset
can be used to skip rows in ``<ttbody>`` to locate the correct header row. This offset
is 1-indexed, not 0-indexed, so an offset of 1 is the first child row element
Attributes:
header_indexes: A dict of header names related to their int index as a column.
Usage:
table = Table('//div[@id="prov_pxe_img_div"]//table')
The HTML code for the table looks something like this::
<div id="prov_pxe_img_div">
<table>
<thead>
<tr>
<th>Name</th>
<th>Animal</th>
<th>Size</th>
</tr>
</thead>
<tbody>
<tr>
<td>John</td>
<td>Monkey</td>
<td>Small</td>
</tr>
<tr>
<td>Mike</td>
<td>Tiger</td>
<td>Large</td>
</tr>
</tbody>
</table>
</div>
We can now click on an element in the list like so, by providing the column
name and the value that we are searching for::
table.click_cell('name', 'Mike')
We can also perform the same, by using the index of the column, like so::
table.click_cell(1, 'Tiger')
Additionally, the rows of a table can be iterated over, and that row's columns can be accessed
by name or index (left to right, 0-index)::
for row in table.rows()
# Get the first cell in the row
row[0]
# Get the row's contents for the column with header 'Row Name'
# All of these will work, though the first is preferred
row.row_name, row['row_name'], row['Row Name']
When doing bulk opererations, such as selecting rows in a table based on their content,
the ``*_by_cells`` methods are able to find matching row much more quickly than iterating,
as the work can be done with fewer selenium calls.
* :py:meth:`find_rows_by_cells`
* :py:meth:`find_row_by_cells`
* :py:meth:`click_rows_by_cells`
* :py:meth:`click_row_by_cells`
Note:
A table is defined by the containers of the header and data areas, and offsets to them.
This allows a table to include one or more padding rows above the header row. In
the example above, there is no padding row, as our offset values are set to 0.
"""
pretty_attrs = ['_loc']
def __init__(self, table_locator, header_offset=0, body_offset=0):
self._headers = None
self._header_indexes = None
self._loc = table_locator
self.header_offset = int(header_offset)
self.body_offset = int(body_offset)
@property
def header_row(self):
"""Property representing the ``<tr>`` element that contains header cells"""
# thead/tr containing header data
# xpath is 1-indexed, so we need to add 1 to the offset to get the correct row
return sel.element('thead/tr[{}]'.format(self.header_offset + 1), root=sel.element(self))
@property
def body(self):
"""Property representing the ``<tbody>`` element that contains body rows"""
# tbody containing body rows
return sel.element('tbody', root=sel.element(self))
@cached_property
def _headers_cache(self):
return CachedTableHeaders(self)
def _update_cache(self):
"""refresh the cache in case we know its stale"""
try:
del self._headers_cache
except AttributeError:
pass # it's not cached, dont try to be eager
else:
self._headers_cache
@property
def headers(self):
"""List of ``<td>`` or ``<th>`` elements in :py:attr:`header_row`
"""
return self._headers_cache.headers
@property
def header_indexes(self):
"""Dictionary of header name: column index for this table's rows
Derived from :py:attr:`headers`
"""
return self._headers_cache.indexes
def locate(self):
return sel.move_to_element(self._loc)
@property
def _root_loc(self):
return self.locate()
def rows(self):
"""A generator method holding the Row objects
This generator yields Row objects starting at the first data row.
Yields:
:py:class:`Table.Row` object corresponding to the next row in the table.
"""
index = self.body_offset
row_elements = sel.elements('tr', root=self.body)
for row_element in row_elements[index:]:
yield self.create_row_from_element(row_element)
def find_row(self, header, value):
"""
Finds a row in the Table by iterating through each visible item.
Args:
header: A string or int, describing which column to inspect.
value: The value to be compared when trying to identify the correct row
to return.
Returns:
:py:class:`Table.Row` containing the requested cell, else ``None``.
"""
return self.find_row_by_cells({header: value})
def find_cell(self, header, value):
"""
Finds an item in the Table by iterating through each visible item,
this work used to be done by the :py:meth::`click_cell` method but
has not been abstracted out to be called separately.
Args:
header: A string or int, describing which column to inspect.
value: The value to be compared when trying to identify the correct cell
to click.
Returns: WebElement of the element if item was found, else ``None``.
"""
matching_cell_rows = self.find_rows_by_cells({header: value})
try:
if isinstance(header, basestring):
return getattr(matching_cell_rows[0], header)
else:
return matching_cell_rows[0][header]
except IndexError:
return None
def find_rows_by_cells(self, cells, partial_check=False):
"""A fast row finder, based on cell content.
If you pass a regexp as a value, then it will be used with its ``.match()`` method.
Args:
cells: A dict of ``header: value`` pairs or a sequence of
nested ``(header, value)`` pairs.
partial_check: If to use the ``in`` operator rather than ``==``.
Returns: A list of containing :py:class:`Table.Row` objects whose contents
match all of the header: value pairs in ``cells``
"""
# accept dicts or supertuples
cells = dict(cells)
cell_text_loc = (
'.//td/descendant-or-self::*[contains(normalize-space(text()), "{}")]/ancestor::tr[1]')
matching_rows_list = list()
for value in cells.values():
# Get all td elements that contain the value text
matching_elements = sel.elements(cell_text_loc.format(value),
root=sel.move_to_element(self._root_loc))
if matching_elements:
matching_rows_list.append(set(matching_elements))
# Now, find the common row elements that matched all the input cells
# (though not yet matching values to headers)
if not matching_rows_list:
# If none matched, short out
return []
rows_elements = list(reduce(lambda set1, set2: set1 & set2, matching_rows_list))
# Convert them to rows
# This is slow, which is why we do it after reducing the row element pile,
# and not when building matching_rows_list, but it makes comparing header
# names and expected values easy
rows = [self.create_row_from_element(element) for element in rows_elements]
# Only include rows where the expected values are in the right columns
matching_rows = list()
def matching_row_filter(heading, value):
if isinstance(value, re._pattern_type):
return value.match(row[heading].text) is not None
elif partial_check:
return value in row[heading].text
else:
return row[heading].text == value
for row in rows:
if all(matching_row_filter(*cell) for cell in cells.items()):
matching_rows.append(row)
return matching_rows
def find_row_by_cells(self, cells, partial_check=False):
"""Find the first row containing cells
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
Returns: The first matching row found, or None if no matching row was found
"""
try:
rows = self.find_rows_by_cells(cells, partial_check=partial_check)
return rows[0]
except IndexError:
return None
def click_rows_by_cells(self, cells, click_column=None, partial_check=False):
"""Click the cell at ``click_column`` in the rows matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
click_column: Which column in the row to click, defaults to None,
which will attempt to click the row element
Note:
The value of click_column can be a string or an int, and will be passed directly to
the item accessor (``__getitem__``) for :py:class:`Table.Row`
"""
rows = self.find_rows_by_cells(cells, partial_check=partial_check)
if click_column is None:
map(sel.click, rows)
else:
map(sel.click, [row[click_column] for row in rows])
def click_row_by_cells(self, cells, click_column=None, partial_check=False):
"""Click the cell at ``click_column`` in the first row matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
click_column: See :py:meth:`Table.click_rows_by_cells`
"""
row = self.find_row_by_cells(cells, partial_check=partial_check)
if click_column is None:
sel.click(row)
else:
sel.click(row[click_column])
def create_row_from_element(self, row_element):
"""Given a row element in this table, create a :py:class:`Table.Row`
Args:
row_element: A table row (``<tr>``) WebElement representing a row in this table.
Returns: A :py:class:`Table.Row` for ``row_element``
"""
return Table.Row(row_element, self)
def click_cells(self, cell_map):
"""Submits multiple cells to be clicked on
Args:
cell_map: A mapping of header names and values, representing cells to click.
As an example, ``{'name': ['wing', 'nut']}, {'age': ['12']}`` would click on
the cells which had ``wing`` and ``nut`` in the name column and ``12`` in
the age column. The yaml example for this would be as follows::
list_items:
name:
- wing
- nut
age:
- 12
Raises:
NotAllItemsClicked: If some cells were unable to be found.
"""
failed_clicks = []
for header, values in cell_map.items():
if isinstance(values, basestring):
values = [values]
for value in values:
res = self.click_cell(header, value)
if not res:
failed_clicks.append("{}:{}".format(header, value))
if failed_clicks:
raise exceptions.NotAllItemsClicked(failed_clicks)
def click_cell(self, header, value):
"""Clicks on a cell defined in the row.
Uses the header identifier and a value to determine which cell to click on.
Args:
header: A string or int, describing which column to inspect.
value: The value to be compared when trying to identify the correct cell
to click the cell in.
Returns: ``True`` if item was found and clicked, else ``False``.
"""
cell = self.find_cell(header, value)
if cell:
sel.click(cell)
return True
else:
return False
class Row(Pretty):
"""An object representing a row in a Table.
The Row object returns a dymanically addressable attribute space so that
the tables headers are automatically generated.
Args:
row_element: A table row ``WebElement``
parent_table: :py:class:`Table` containing ``row_element``
Notes:
Attributes are dynamically generated. The index/key accessor is more flexible
than the attr accessor, as it can operate on int indices and header names.
"""
pretty_attrs = ['row_element', 'table']
def __init__(self, row_element, parent_table):
self.table = parent_table
self.row_element = row_element
@property
def columns(self):
"""A list of WebElements corresponding to the ``<td>`` elements in this row"""
return sel.elements('td', root=self.row_element)
def __getattr__(self, name):
"""
Returns Row element by header name
"""
return self.columns[self.table.header_indexes[_convert_header(name)]]
def __getitem__(self, index):
"""
Returns Row element by header index or name
"""
try:
return self.columns[index]
except TypeError:
# Index isn't an int, assume it's a string
return getattr(self, _convert_header(index))
# Let IndexError raise
def __str__(self):
return ", ".join(["'{}'".format(el.text) for el in self.columns])
def __eq__(self, other):
if isinstance(other, type(self)):
# Selenium elements support equality checks, so we can, too.
return self.row_element == other.row_element
else:
return id(self) == id(other)
def locate(self):
# table.create_row_from_element(row_instance) might actually work...
return sel.move_to_element(self.row_element)
class CAndUGroupTable(Table):
"""Type of tables used in C&U, not tested in others.
Provides ``.groups()`` generator which yields group objects. A group objects consists of the
rows that are located in the group plus the summary informations. THe main principle is that
all the rows inside group are stored in group object's ``.rows`` and when the script encounters
the end of the group, it will store the summary data after the data rows as attributes, so eg.
``Totals:`` will become ``group.totals``. All the rows are represented as dictionaries.
"""
class States:
NORMAL_ROWS = 0
GROUP_SUMMARY = 1
class Group(object):
def __init__(self, group_id, headers, rows, info_rows):
self.id = group_id
self.rows = [dict(zip(headers, row)) for row in rows]
info_headers = headers[1:]
for info_row in info_rows:
name = info_row[0]
rest = info_row[1:]
data = dict(zip(info_headers, rest))
group_attr = attributize_string(name)
setattr(self, group_attr, data)
def __repr__(self):
return '<CAndUGroupTable.Group {}'.format(repr(self.id))
def paginated_rows(self):
from cfme.web_ui import paginator
for page in paginator.pages():
for row in self.rows():
yield row
def find_group(self, group_id):
"""Finds a group by its group ID (the string that is alone on the line)"""
for group in self.groups():
if group.id == group_id:
return group_id
else:
raise KeyError('Group {} not found'.format(group_id))
def groups(self):
headers = map(sel.text, self.headers)
headers_length = len(headers)
rows = self.paginated_rows()
current_group_rows = []
current_group_summary_rows = []
current_group_id = None
state = self.States.NORMAL_ROWS
while True:
try:
row = rows.next()
except StopIteration:
if state == self.States.GROUP_SUMMARY:
row = None
else:
break
if state == self.States.NORMAL_ROWS:
if len(row.columns) == headers_length:
current_group_rows.append(tuple(map(sel.text, row.columns)))
else:
# Transition to the group summary
current_group_id = sel.text(row.columns[0]).strip()
state = self.States.GROUP_SUMMARY
elif state == self.States.GROUP_SUMMARY:
# row is None == we are at the end of the table so a slightly different behaviour
if row is not None:
fc_length = len(sel.text(row.columns[0]).strip())
if row is None or fc_length == 0:
# Done with group
yield self.Group(
current_group_id, headers, current_group_rows, current_group_summary_rows)
current_group_rows = []
current_group_summary_rows = []
current_group_id = None
state = self.States.NORMAL_ROWS
else:
current_group_summary_rows.append(tuple(map(sel.text, row.columns)))
else:
raise RuntimeError('This should never happen')
if current_group_id is not None or current_group_rows or current_group_summary_rows:
raise ValueError(
'GroupTable could not be parsed properly: {} {} {}'.format(
current_group_id, repr(current_group_rows), repr(current_group_summary_rows)))
class SplitTable(Table):
""":py:class:`Table` that supports the header and body rows being in separate tables
Args:
header_data: A tuple, containing an element locator and an offset value.
These point to the container of the header row. The offset is used in case
there is a padding row above the header, or in the case that the header
and the body are contained inside the same table element.
body_data: A tuple, containing an element locator and an offset value.
These point to the container of the body rows. The offset is used in case
there is a padding row above the body rows, or in the case that the header
and the body are contained inside the same table element.
Usage:
table = SplitTable(header_data=('//div[@id="header_table"]//table/tbody', 0),
body_data=('//div[@id="body_table"]//table/tbody', 1))
The HTML code for a split table looks something like this::
<div id="prov_pxe_img_div">
<table id="header_table">
<tbody>
<tr>
<td>Name</td>
<td>Animal</td>
<td>Size</td>
</tr>
</tbody>
</table>
<table id="body_table">
<tbody>
<tr>
<td>Useless</td>
<td>Padding</td>
<td>Row</td>
</tr>
<tr>
<td>John</td>
<td>Monkey</td>
<td>Small</td>
</tr>
<tr>
<td>Mike</td>
<td>Tiger</td>
<td>Large</td>
</tr>
</tbody>
</table>
</div>
Note the use of the offset to skip the "Useless Padding Row" in ``body_data``. Most split
tables require an offset for both the heading and body rows.
"""
def __init__(self, header_data, body_data):
self._header_loc, header_offset = header_data
self._body_loc, body_offset = body_data
self.header_offset = int(header_offset)
self.body_offset = int(body_offset)
@property
def _root_loc(self):
return self._body_loc
@property
def header_row(self):
"""Property representing the ``<tr>`` element that contains header cells"""
# thead/tr containing header data
# xpath is 1-indexed, so we need to add 1 to the offset to get the correct row
return sel.element(
'tr[{}]'.format(self.header_offset + 1), root=sel.element(self._header_loc))
@property
def body(self):
"""Property representing the element that contains body rows"""
# tbody containing body rows
return sel.element(self._body_loc)
def locate(self):
# Use the header locator as the overall table locator
return sel.move_to_element(self._header_loc)
class SortTable(Table):
"""This table is the same as :py:class:`Table`, but with added sorting functionality."""
@property
def _sort_by_cell(self):
try:
return sel.element("./th[contains(@class, 'sorting_')]", root=self.header_row)
except NoSuchElementException:
return None
@property
def sorted_by(self):
"""Return column name what is used for sorting now.
"""
cell = self._sort_by_cell
if cell is None:
return None
return sel.text("./a", root=cell).encode("utf-8")
@property
def sort_order(self):
"""Return order.
Returns: 'ascending' or 'descending'
"""
cell = self._sort_by_cell
if cell is None:
return None
cls = sel.get_attribute(cell, "class")
if "sorting_asc" in cls:
return "ascending"
elif "sorting_desc" in cls:
return "descending"
else:
return None
def click_header_cell(self, text):
"""Clicks on the header to change sorting conditions.
Args:
text: Header cell text.
"""
sel.click(sel.element("./th/a[normalize-space(.)='{}']".format(text), root=self.header_row))
def sort_by(self, header, order):
"""Sorts the table by given conditions
Args:
header: Text of the header cell to use for sorting.
order: ascending or descending
"""
order = order.lower().strip()
if header != self.sorted_by:
# Change column to order by
self.click_header_cell(header)
if self.sorted_by != header:
raise Exception(
"Detected malfunction in table ordering (wanted {}, got {})".format(
header, self.sorted_by))
if order != self.sort_order:
# Change direction
self.click_header_cell(header)
if self.sort_order != order:
raise Exception("Detected malfunction in table ordering (wanted {}, got {})".format(
order, self.sort_order))
class CheckboxTable(Table):
""":py:class:`Table` with support for checkboxes
Args:
table_locator: See :py:class:`cfme.web_ui.Table`
header_checkbox_locator: Locator of header checkbox (default `None`)
Specify in case the header checkbox is not part of the header row
body_checkbox_locator: Locator for checkboxes in body rows
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
_checkbox_loc = ".//input[@type='checkbox']"
def __init__(self, table_locator, header_offset=0, body_offset=0,
header_checkbox_locator=None, body_checkbox_locator=None):
super(CheckboxTable, self).__init__(table_locator, header_offset, body_offset)
if body_checkbox_locator:
self._checkbox_loc = body_checkbox_locator
self._header_checkbox_loc = header_checkbox_locator
@property
def header_checkbox(self):
"""Checkbox used to select/deselect all rows"""
if self._header_checkbox_loc is not None:
return sel.element(self._header_checkbox_loc)
else:
return sel.element(self._checkbox_loc, root=self.header_row)
def select_all(self):
"""Select all rows using the header checkbox or one by one if not present"""
if self._header_checkbox_loc is None:
for row in self.rows():
self._set_row_checkbox(row, True)
else:
sel.uncheck(self.header_checkbox)
sel.check(self.header_checkbox)
def deselect_all(self):
"""Deselect all rows using the header checkbox or one by one if not present"""
if self._header_checkbox_loc is None:
for row in self.rows():
self._set_row_checkbox(row, False)
else:
sel.check(self.header_checkbox)
sel.uncheck(self.header_checkbox)
def _set_row_checkbox(self, row, set_to=False):
row_checkbox = sel.element(self._checkbox_loc, root=row.locate())
sel.checkbox(row_checkbox, set_to)
def _set_row(self, header, value, set_to=False):
""" Internal method used to select/deselect a row by column header and cell value
Args:
header: See :py:meth:`Table.find_row`
value: See :py:meth:`Table.find_row`
set_to: Select if `True`, deselect if `False`
"""
row = self.find_row(header, value)
if row:
self._set_row_checkbox(row, set_to)
return True
else:
return False
def select_rows_by_indexes(self, *indexes):
"""Select rows specified by row indexes (starting with 0)
"""
for i, row in enumerate(self.rows()):
if i in indexes:
self._set_row_checkbox(row, True)
def deselect_rows_by_indexes(self, *indexes):
"""Deselect rows specified by row indexes (starting with 0)
"""
for i, row in enumerate(self.rows()):
if i in indexes:
self._set_row_checkbox(row, False)
def select_row(self, header, value):
"""Select a single row specified by column header and cell value
Args:
header: See :py:meth:`Table.find_row`
value: See :py:meth:`Table.find_row`
Returns: `True` if successful, `False` otherwise
"""
return self._set_row(header, value, True)
def deselect_row(self, header, value):
"""Deselect a single row specified by column header and cell value
Args:
header: See :py:meth:`Table.find_row`
value: See :py:meth:`Table.find_row`
Returns: `True` if successful, `False` otherwise
"""
return self._set_row(header, value, False)
def _set_rows(self, cell_map, set_to=False):
""" Internal method used to select/deselect multiple rows
Args:
cell_map: See :py:meth:`Table.click_cells`
set_to: Select if `True`, deselect if `False`
"""
failed_selects = []
for header, values in cell_map.items():
if isinstance(values, basestring):
values = [values]
for value in values:
res = self._set_row(header, value, set_to)
if not res:
failed_selects.append("{}:{}".format(header, value))
if failed_selects:
raise exceptions.NotAllCheckboxesFound(failed_selects)
def select_rows(self, cell_map):
"""Select multiple rows
Args:
cell_map: See :py:meth:`Table.click_cells`
Raises:
NotAllCheckboxesFound: If some cells were unable to be found
"""
self._set_rows(cell_map, True)
def deselect_rows(self, cell_map):
"""Deselect multiple rows
Args:
cell_map: See :py:meth:`Table.click_cells`
Raises:
NotAllCheckboxesFound: If some cells were unable to be found
"""
self._set_rows(cell_map, False)
def _set_row_by_cells(self, cells, set_to=False, partial_check=False):
row = self.find_row_by_cells(cells, partial_check=partial_check)
self._set_row_checkbox(row, set_to)
def select_row_by_cells(self, cells, partial_check=False):
"""Select the first row matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_row_by_cells(cells, True, partial_check)
def deselect_row_by_cells(self, cells, partial_check=False):
"""Deselect the first row matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_row_by_cells(cells, False, partial_check)
def _set_rows_by_cells(self, cells, set_to=False, partial_check=False):
rows = self.find_rows_by_cells(cells)
for row in rows:
self._set_row_checkbox(row, set_to)
def select_rows_by_cells(self, cells, partial_check=False):
"""Select the rows matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_rows_by_cells(cells, True, partial_check)
def deselect_rows_by_cells(self, cells, partial_check=False):
"""Deselect the rows matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_rows_by_cells(cells, False, partial_check)
class SplitCheckboxTable(SplitTable, CheckboxTable):
""":py:class:`SplitTable` with support for checkboxes
Args:
header_data: See :py:class:`cfme.web_ui.SplitTable`
body_data: See :py:class:`cfme.web_ui.SplitTable`
header_checkbox_locator: See :py:class:`cfme.web_ui.CheckboxTable`
body_checkbox_locator: See :py:class:`cfme.web_ui.CheckboxTable`
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
_checkbox_loc = './/img[contains(@src, "item_chk")]'
def __init__(self, header_data, body_data,
header_checkbox_locator=None, body_checkbox_locator=None):
# To limit multiple inheritance surprises, explicitly call out to SplitTable's __init__
SplitTable.__init__(self, header_data, body_data)
# ...then set up CheckboxTable's locators here
self._header_checkbox_loc = header_checkbox_locator
if body_checkbox_locator:
self._checkbox_loc = body_checkbox_locator
class PagedTable(Table):
""":py:class:`Table` with support for paginator
Args:
table_locator: See :py:class:`cfme.web_ui.Table`
header_checkbox_locator: Locator of header checkbox (default `None`)
Specify in case the header checkbox is not part of the header row
body_checkbox_locator: Locator for checkboxes in body rows
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
def find_row_on_all_pages(self, header, value):
from cfme.web_ui import paginator
for _ in paginator.pages():
sel.wait_for_element(self)
row = self.find_row(header, value)
if row is not None:
return row
class SplitPagedTable(SplitTable, PagedTable):
""":py:class:`SplitTable` with support for paginator
Args:
header_data: See :py:class:`cfme.web_ui.SplitTable`
body_data: See :py:class:`cfme.web_ui.SplitTable`
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
def __init__(self, header_data, body_data):
# To limit multiple inheritance surprises, explicitly call out to SplitTable's __init__
SplitTable.__init__(self, header_data, body_data)
def table_in_object(table_title):
"""If you want to point to tables inside object view, this is what you want to use.
Works both on down- and upstream.
Args:
table_title: Text in `p` element preceeding the table
Returns: XPath locator for the desired table.
"""
return ("//table[(preceding-sibling::p[1] | preceding-sibling::h3[1])[normalize-space(.)={}]]"
.format(quoteattr(table_title)))
@multimethod(lambda loc, value: (sel.tag(loc), sel.get_attribute(loc, 'type')))
def fill_tag(loc, value):
""" Return a tuple of function to do the filling, and a value to log."""
raise NotImplementedError("Don't know how to fill {} into this type: {}".format(value, loc))
@fill_tag.method(("select", Anything))
def fill_select_tag(select, value):
return (sel.select, value)
@fill_tag.method((Anything, 'text'))
@fill_tag.method((Anything, 'textarea'))
def fill_text(textbox, val):
return (sel.set_text, val)
@fill_tag.method((Anything, 'password'))
def fill_password(pwbox, password):
return (sel.set_text, "********")
@fill_tag.method(('a', Anything))
@fill_tag.method(('img', Anything))
@fill_tag.method((Anything, 'image'))
@fill_tag.method((Anything, 'submit'))
def fill_click(el, val):
"""Click only when given a truthy value"""
def click_if(e, v):
if v:
sel.click(e)
return (click_if, val)
@fill_tag.method((Anything, 'file'))
def fill_file(fd, val):
return (sel.send_keys, val)
@fill_tag.method((Anything, 'checkbox'))
def fill_checkbox(cb, val):
return (sel.checkbox, bool(val))
@multidispatch
def fill(loc, content, **kwargs):
"""
Fills in a UI component with the given content.
Usage:
fill(textbox, "text to fill")
fill(myform, [ ... data to fill ...])
fill(radio, "choice to select")
Returns: True if any UI action was taken, False otherwise
"""
action, logval = fill_tag(loc, content)
if hasattr(loc, 'name'):
ident = loc.name
else:
ident = loc
logger.debug(' Filling in [%s], with value %s', ident, logval)
prev_state = action(loc, content)
sel.detect_observed_field(loc)
return prev_state
@fill.method((Mapping, Anything))
def _version_pick(m, a, **kwargs):
return fill(version.pick(m), a, **kwargs)
@fill.method((Table, Mapping))
def _sd_fill_table(table, cells):
""" How to fill a table with a value (by selecting the value as cells in the table)
See Table.click_cells
"""
table._update_cache()
logger.debug(' Clicking Table cell')
table.click_cells(cells)
return bool(cells)
@fill.method((CheckboxTable, object))
def _sd_fill_checkboxtable(table, cells):
""" How to fill a checkboxtable with a value (by selecting the right rows)
See CheckboxTable.select_by_cells
"""
table._update_cache()
logger.debug(' Selecting CheckboxTable row')
table.select_rows(cells)
return bool(cells)
@fill.method((Callable, object))
def fill_callable(f, val):
"""Fill in a Callable by just calling it with the value, allow for arbitrary actions"""
return f(val)
@fill.method((Select, types.NoneType))
@fill.method((Select, object))
def fill_select(slist, val):
logger.debug(' Filling in {} with value {}'.format(str(slist), val))
prev_sel = sel.select(slist, val)
slist.observer_wait()
return prev_sel
class Calendar(Pretty):
"""A CFME calendar form field
Calendar fields are readonly, and managed by the dxhtmlCalendar widget. A Calendar field
will accept any object that can be coerced into a string, but the value may not match the format
expected by dhtmlxCalendar or CFME. For best results, either a ``datetime.date`` or
``datetime.datetime`` object should be used to create a valid date field.
Args:
name: "name" property of the readonly calendar field.
Usage:
calendar = web_ui.Calendar("miq_date_1")
web_ui.fill(calendar, date(2000, 1, 1))
web_ui.fill(calendar, '1/1/2001')
"""
def __init__(self, name):
self.name = name
def locate(self):
return sel.move_to_element(Input(self.name))
@fill.method((Calendar, object))
def _sd_fill_date(calendar, value):
input = sel.element(calendar)
if isinstance(value, date):
date_str = '{}/{}/{}'.format(value.month, value.day, value.year)
else:
date_str = str(value)
# need to write to a readonly field: resort to evil
if sel.get_attribute(input, 'ng-model') is not None:
sel.set_angularjs_value(input, date_str)
else:
sel.set_attribute(input, "value", date_str)
# Now when we set the value, we need to simulate a change event.
if sel.get_attribute(input, "data-date-autoclose"):
# New one
script = "$(\"#{}\").trigger('changeDate');"
else:
# Old one
script = (
"if(typeof $j == 'undefined') {var jq = $;} else {var jq = $j;} "
"jq(\"#{}\").change();")
try:
sel.execute_script(script.format(calendar.name))
except sel_exceptions.WebDriverException as e:
logger.warning(
"An exception was raised during handling of the Cal #{}'s change event:\n{}"
.format(calendar.name, str(e)))
sel.wait_for_ajax()
return True
@fill.method((object, types.NoneType))
@fill.method((types.NoneType, object))
def _sd_fill_none(*args, **kwargs):
""" Ignore a NoneType """
pass
class Form(Region):
"""
A class for interacting with Form elements on pages.
The Form class takes a set of locators and binds them together to create a
unified Form object. This Form object has a defined field order so that the
user does not have to worry about which order the information is provided.
This enables the data to be provided as a dict meaning it can be passed directly
from yamls. It inherits the base Region class, meaning that locators can still be
referenced in the same way a Region's locators can. You can also add one more field which will
be a :py:class:`dict` of metadata, determining mostly field validity. See :py:meth:`field_valid`
Args:
fields: A list of field name/locator tuples. The argument not only defines
the order of the elements but also which elements comprise part of the form.
identifying_loc: A locator which should be present if the form is visible.
Usage:
provider_form = web_ui.Form(
fields=[
('type_select', "//*[@id='server_emstype']"),
('name_text', "//*[@id='name']"),
('hostname_text', "//*[@id='hostname']"),
('ipaddress_text', "//*[@id='ipaddress']"),
('amazon_region_select', "//*[@id='hostname']"),
('api_port', "//*[@id='port']"),
])
Forms can then be filled in like so.::
provider_info = {
'type_select': "OpenStack",
'name_text': "RHOS-01",
'hostname_text': "RHOS-01",
'ipaddress_text': "10.0.0.0",
'api_port': "5000",
}
web_ui.fill(provider_form, provider_info)
Note:
Using supertuples in a list, although ordered due to the properties of a List,
will not overide the field order defined in the Form.
"""
pretty_attrs = ['fields']
def __init__(self, fields=None, identifying_loc=None):
self.metadata = {}
self.locators = {}
for field in fields:
try:
self.locators[field[0]] = field[1]
if len(field) == 3:
self.metadata[field[0]] = field[2]
except IndexError:
raise ValueError("fields= can be 2- or 3-tuples only! (name, loc[, metadata])")
self.fields = fields
self.identifying_loc = identifying_loc
def field_valid(self, field_name):
"""Add the validity constraints here."""
if field_name not in self.metadata:
return True
metadata = self.metadata[field_name]
if "removed_since" in metadata:
removed_since = metadata["removed_since"]
return version.current_version() < removed_since
if "appeared_in" in metadata:
appeared_in = metadata["appeared_in"]
return version.current_version() >= appeared_in
return True
def fill(self, fill_data):
fill(self, fill_data)
@fill.method((Form, Sequence))
def _fill_form_list(form, values, action=None, action_always=False):
"""Fills in field elements on forms
Takes a set of values in dict or supertuple format and locates form elements,
in the correct order, and fills them in.
Note:
Currently supports, text, textarea, select, checkbox, radio, password, a
and Table objects/elements.
Args:
values: a dict or supertuple formatted set of data where
each key is the name of the form locator from the page model. Some
objects/elements, such as :py:class:`Table` objects, support providing
multiple values to be clicked on in a single call.
action: a locator which will be clicked when the form filling is complete
action_always: if True, perform the action even if none of the
values to be filled in required any UI
interaction (eg, text boxes already had the
text to be filled in, checkbox already checked,
etc)
"""
logger.info('Beginning to fill in form...')
sel.wait_for_ajax()
values = list(val for key in form.fields for val in values if val[0] == key[0])
res = []
for field, value in values:
if value is not None and form.field_valid(field):
loc = form.locators[field]
logger.trace(' Dispatching fill for %s', field)
fill_prev = fill(loc, value) # re-dispatch to fill for each item
res.append(fill_prev != value) # note whether anything changed
elif value is None and isinstance(form.locators[field], Select):
fill_prev = fill(form.locators[field], None)
res.append(fill_prev != value)
else:
res.append(False)
if action and (any(res) or action_always): # only perform action if something changed
logger.debug(' Invoking end of form action')
fill(action, True) # re-dispatch with truthy value
logger.debug('Finished filling in form')
return any(res) or action_always
@fill.method((object, Mapping))
def _fill_form_dict(form, values, **kwargs):
"""Fill in a dict by converting it to a list"""
return _fill_form_list(form, values.items(), **kwargs)
class Input(Pretty):
"""Class designed to handle things about ``<input>`` tags that have name attr in one place.
Also applies on ``textarea``, which is basically input with multiple lines (if it has name).
Args:
*names: Possible values (or) of the ``name`` attribute.
Keywords:
use_id: Whether to use ``id`` instead of ``name``. Useful if there is some input that does
not have ``name`` attribute present.
"""
pretty_attrs = ['_names', '_use_id']
def __init__(self, *names, **kwargs):
self._names = names
self._use_id = kwargs.pop("use_id", False)
@property
def names(self):
if len(self._names) == 1 and isinstance(self._names[0], dict):
return (version.pick(self._names[0]),)
else:
return self._names
def _generate_attr(self, name):
return "@{}={}".format("id" if self._use_id else "name", quoteattr(name))
def locate(self):
# If the end of the locator is changed, modify also the choice in Radio!!!
return '//*[(self::input or self::textarea) and ({})]'.format(
" or ".join(self._generate_attr(name) for name in self.names)
)
@property
def angular_help_block(self):
"""Returns the angular helper text (like 'Required')."""
loc = "{}/following-sibling::span".format(self.locate())
if sel.is_displayed(loc):
return sel.text(loc).strip()
else:
return None
def __add__(self, string):
return self.locate() + string
def __radd__(self, string):
return string + self.locate()
class FileInput(Input):
"""A file input handling widget.
Accepts a string. If the string is a file, then it is put in the input. Otherwise a temporary
file is generated and that one is fed to the file input.
"""
pass
@fill.method((FileInput, Anything))
def _fill_file_input(i, a):
# Engage the selenium's file detector so we can reliably transfer the file to the browser
with browser().file_detector_context(LocalFileDetector):
# We need a raw element so we can send_keys to it
input_el = sel.element(i.locate())
if browser().file_detector.is_local_file(a) is None:
# Create a temp file
f = NamedTemporaryFile()
f.write(str(a))
f.flush()
input_el.send_keys(f.name)
atexit.register(f.close)
else:
# It already is a file ...
input_el.send_keys(a)
# Since we used raw selenium element, wait for ajax here ...
sel.wait_for_ajax()
class Radio(Input):
""" A class for Radio button groups
Radio allows the usage of HTML radio elements without resorting to previous
practice of iterating over elements to find the value. The name of the radio
group is passed and then when choices are required, the locator is built.
Args:
name: The HTML elements ``name`` attribute that identifies a group of radio
buttons.
Usage:
radio = Radio("schedule__schedule_type")
A specific radio element can then be returned by running the following::
el = radio.choice('immediately')
click(el)
The :py:class:`Radio` object can be reused over and over with repeated calls to
the :py:func:`Radio.choice` method.
"""
def choice(self, val):
""" Returns the locator for a choice
Args:
val: A string representing the ``value`` attribute of the specific radio
element.
Returns: A string containing the XPATH of the specific radio element.
"""
# Ugly, but working - all the conditions are in parentheses
return re.sub(r"\]$", " and @value={}]".format(quoteattr(val)), self.locate())
def observer_wait(self, val):
sel.detect_observed_field(self.choice(val))
@fill.method((Radio, object))
def _fill_radio(radio, value):
"""How to fill a radio button group (by selecting the given value)"""
logger.debug(' Filling in Radio{} with value "{}"'.format(tuple(radio.names), value))
sel.click(radio.choice(value))
radio.observer_wait(value)
class Tree(Pretty):
""" A class directed at CFME Tree elements
The Tree class aims to deal with all kinds of CFME trees
Args:
locator: This is a locator object pointing to the ``<ul>`` element which contains the rest
of the table.
Returns: A :py:class:`Tree` object.
A Tree object is set up by using a locator which contains the node elements. This element
will usually be a ``<ul>`` in the case of a Dynatree.
Usage:
tree = web_ui.Tree((By.XPATH, '//table//tr[@title="Datastore"]/../..'))
The path can then be navigated to return the last object in the path list, like so::
tree.click_path('Automation', 'VM Lifecycle Management (VMLifecycle)',
'VM Migrate (Migrate)')
Each path element will be expanded along the way, but will not be clicked.
When used in a :py:class:`Form`, a list of path tuples is expected in the form fill data.
The paths will be passed individually to :py:meth:`Tree.check_node`::
form = Form(fields=[
('tree_field', List(locator)),
])
form_fill_data = {
'tree_field': [
('Tree Node', 'Value'),
('Tree Node', 'Branch Node', 'Value'),
]
]
Note: Dynatrees, rely on a ``<ul><li>`` setup. We class a ``<li>`` as a node.
"""
pretty_attrs = ['locator']
def __init__(self, locator):
self.locator = locator
@cached_property
def tree_id(self):
if isinstance(self.locator, basestring) and re.match(r"^[a-zA-Z0-9_-]+$", self.locator):
return self.locator
else:
el = sel.element(self.locator)
tag = sel.tag(el)
tree_id = None
if tag == "ul":
try:
parent = sel.element("..", root=el)
id_attr = sel.get_attribute(parent, "id")
if id_attr:
tree_id = id_attr
except sel.NoSuchElementException:
pass
elif tag == "div":
tree_id = sel.get_attribute(el, "id") or None
else:
raise ValueError("Unknown element ({}) passed to the Tree!".format(tag))
if tree_id is None:
raise ValueError("Could not retrieve the id for Tree {}".format(repr(tree_id)))
else:
return tree_id
def locate(self):
return "#{}".format(self.tree_id)
def root_el(self):
return sel.element(self)
def _get_tag(self):
if getattr(self, 'tag', None) is None:
self.tag = sel.tag(self)
return self.tag
def read_contents(self, by_id=False):
result = False
while result is False:
sel.wait_for_ajax()
result = sel.execute_script(
"{} return read_tree(arguments[0], arguments[1]);".format(js.read_tree),
self.locate(),
by_id)
return result
def expand_path(self, *path, **kwargs):
""" Exposes a path.
Args:
*path: The path as multiple positional string arguments denoting the course to take.
Keywords:
by_id: Whether to match ids instead of text.
Returns: The leaf web element.
"""
by_id = kwargs.pop("by_id", False)
result = False
# Ensure we pass str to the javascript. This handles objects that represent themselves
# using __str__ and generally, you should only pass str because that is what makes sense
path = castmap(str, path)
# We sometimes have to wait for ajax. In that case, JS function returns false
# Then we repeat and wait. It does not seem completely possible to wait for the data in JS
# as it runs on one thread it appears. So this way it will try to drill multiple times
# each time deeper and deeper :)
while result is False:
sel.wait_for_ajax()
try:
result = sel.execute_script(
"{} return find_leaf(arguments[0],arguments[1],arguments[2]);".format(
js.find_leaf),
self.locate(),
path,
by_id)
except sel.WebDriverException as e:
text = str(e)
match = re.search(r"TREEITEM /(.*?)/ NOT FOUND IN THE TREE", text)
if match is not None:
item = match.groups()[0]
raise exceptions.CandidateNotFound(
{'message': "{}: could not be found in the tree.".format(item),
'path': path,
'cause': e})
match = re.search(r"^CANNOT FIND TREE /(.*?)/$", text)
if match is not None:
tree_id = match.groups()[0]
raise exceptions.TreeNotFound(
"Tree {} / {} not found.".format(tree_id, self.locator))
# Otherwise ...
raise
return result
def click_path(self, *path, **kwargs):
""" Exposes a path and then clicks it.
Args:
*path: The path as multiple positional string arguments denoting the course to take.
Keywords:
by_id: Whether to match ids instead of text.
Returns: The leaf web element.
"""
# Ensure we pass str to the javascript. This handles objects that represent themselves
# using __str__ and generally, you should only pass str because that is what makes sense
path = castmap(str, path)
leaf = self.expand_path(*path, **kwargs)
logger.info("Path %r yielded menuitem %r", path, sel.text(leaf))
if leaf is not None:
sel.wait_for_ajax()
sel.click(leaf)
return leaf
@classmethod
def browse(cls, tree, *path):
"""Browse through tree via path.
If node not found, raises exception.
If the browsing reached leaf(str), returns True if also the step was last, otherwise False.
If the result of the path is a subtree, it is returned.
Args:
tree: List with tree.
*path: Path to browse.
"""
# Ensure we pass str to the javascript. This handles objects that represent themselves
# using __str__ and generally, you should only pass str because that is what makes sense
path = castmap(str, path)
current = tree
for i, step in enumerate(path, start=1):
for node in current:
if isinstance(node, list):
if node[0] == step:
current = node[1]
break
else:
if node == step:
return i == len(path)
else:
raise Exception("Could not find node {}".format(step))
return current
@classmethod
def flatten_level(cls, tree):
"""Extracts just node names from current tree (top).
It makes:
.. code-block:: python
["asd", "fgh", ("ijk", [...]), ("lmn", [...])]
to
.. code-block:: python
["asd", "fgh", "ijk", "lmn"]
Useful for checking of contents of current tree level
"""
return map(lambda item: item[0] if isinstance(item, list) else item, tree)
def find_path_to(self, target, exact=False):
""" Method used to look up the exact path to an item we know only by its regexp or partial
description.
Expands whole tree during the execution.
Args:
target: Item searched for. Can be regexp made by
:py:func:`re.compile <python:re.compile>`,
otherwise it is taken as a string for `in` matching.
exact: Useful in string matching. If set to True, it matches the exact string.
Default is False.
Returns: :py:class:`list` with path to that item.
"""
if not isinstance(target, re._pattern_type):
if exact:
target = re.compile(r"^{}$".format(re.escape(str(target))))
else:
target = re.compile(r".*?{}.*?".format(re.escape(str(target))))
def _find_in_tree(t, p=None):
if p is None:
p = []
for item in t:
if isinstance(item, list):
if target.match(item[0]) is None:
subtree = _find_in_tree(item[1], p + [item[0]])
if subtree is not None:
return subtree
else:
return p + [item[0]]
else:
if target.match(item) is not None:
return p + [item]
else:
return None
result = _find_in_tree(self.read_contents())
if result is None:
raise NameError("{} not found in tree".format(target.pattern))
else:
return result
class CheckboxTree(Tree):
"""Tree that has a checkbox on each node, adds methods to check/uncheck them"""
node_checkbox = "../span[@class='dynatree-checkbox']"
def _is_checked(self, leaf):
return 'dynatree-selected' in \
sel.get_attribute(sel.element("..", root=leaf), 'class')
def _check_uncheck_node(self, path, check=False):
""" Checks or unchecks a node.
Args:
*path: The path as multiple positional string arguments denoting the course to take.
check: If ``True``, the node is checked, ``False`` the node is unchecked.
"""
leaf = self.expand_path(*path)
cb = sel.element(self.node_checkbox, root=leaf)
if check is not self._is_checked(leaf):
sel.click(cb)
def check_node(self, *path):
""" Convenience function to check a node
Args:
*path: The path as multiple positional string arguments denoting the course to take.
"""
self._check_uncheck_node(path, check=True)
def uncheck_node(self, *path):
""" Convenience function to uncheck a node
Args:
*path: The path as multiple positional string arguments denoting the course to take.
"""
self._check_uncheck_node(path, check=False)
@fill.method((Tree, Sequence))
def _fill_tree_seq(tree, values):
tree.click_path(*values)
@sel.select.method((CheckboxTree, Sequence))
@fill.method((CheckboxTree, Sequence))
def _select_chkboxtree_seq(cbtree, values):
"""values should be a list of tuple pairs, where the first item is the
path to select, and the second is whether to check or uncheck.
Usage:
select(cbtree, [(['Foo', 'Bar'], False),
(['Baz'], True)])
"""
for (path, to_select) in values:
if to_select:
cbtree.check_node(*path)
else:
cbtree.uncheck_node(*path)
class InfoBlock(Pretty):
DETAIL = "detail"
FORM = "form"
PF = "patternfly"
_TITLE_CACHE = {}
pretty_attrs = ["title"]
def __new__(cls, title, detail=None):
# Caching
if title not in cls._TITLE_CACHE:
cls._TITLE_CACHE[title] = super(InfoBlock, cls).__new__(cls)
cls._TITLE_CACHE[title].__init__(title)
instance = cls._TITLE_CACHE[title]
if detail is None:
return instance
else:
return instance.member(detail)
def __init__(self, title):
if all(map(lambda a: hasattr(self, a), ["title", "_type", "_member_cache"])):
return
self.title = title
self._type = None
self._member_cache = {}
@property
def type(self):
if self._type is None:
self.root # To retrieve it
return self._type
@property
def root(self):
possible_locators = [
# Detail type
version.pick({
'5.3': '//table//th[contains(normalize-space(.), "{}")]/../../../..'.format(
self.title),
version.LOWEST:
'//div[@class="modbox"]/h2[@class="modtitle"]'
'[contains(normalize-space(.), "{}")]/..'.format(self.title)
}),
# Form type
(
'//*[p[@class="legend"][contains(normalize-space(.), "{}")] and table/tbody/tr/td['
'contains(@class, "key")]]'.format(self.title)
),
# Newer Form type (master.20150311020845_547fd06 onwards)
(
'//*[h3[contains(normalize-space(.), "{}")] and table/tbody/tr/td['
'contains(@class, "key")]]'.format(self.title)
),
# Newer Form type used in AC tagging:
(
'//h3[contains(normalize-space(.), "{}")]/following-sibling::div/table/tbody/tr/td['
'contains(@class, "key")]/../../../..'.format(self.title)
),
# The root element must contain table element because listaccordions were caught by the
# locator. It used to be fieldset but it seems it can be really anything
# And here comes a new one, this time no table. (eg. 5.5.0.7 Configuration/About)
(
'//*[h3[contains(normalize-space(.), "{}")] and '
'div[contains(@class, "form-horizontal")]/div/label]'.format(self.title)
)
]
found = sel.elements("|".join(possible_locators))
if not found:
raise exceptions.BlockTypeUnknown("The block type requested is unknown")
root_el = found[0]
if sel.elements("./table/tbody/tr/td[contains(@class, 'key')]", root=root_el):
self._type = self.FORM
elif sel.elements("./div[contains(@class, 'form-horizontal')]/div/label", root=root_el):
self._type = self.PF
else:
self._type = self.DETAIL
return root_el
def member(self, name):
if name not in self._member_cache:
self._member_cache[name] = self.Member(self, name)
return self._member_cache[name]
def by_member_icon(self, icon):
"""In case you want to find the item by icon in the value field (like OS infra diff.)"""
if self._type == self.PF:
raise NotImplementedError(
"I haven't implemented icons+patternfly infoblock yet, so fix me if you see this.")
l = ".//table/tbody/tr/td[2]/img[contains(@src, {})]/../../td[1]".format(quoteattr(icon))
return self.member(sel.text(l))
def __call__(self, member):
"""A present for @smyers"""
return self.member(member)
##
#
# Shortcuts for old-style access
#
@classmethod
def text(cls, *args, **kwargs):
return cls(*args, **kwargs).text
@classmethod
def element(cls, *args, **kwargs):
return cls(*args, **kwargs).element
@classmethod
def elements(cls, *args, **kwargs):
return cls(*args, **kwargs).elements
@classmethod
def icon_href(cls, *args, **kwargs):
return cls(*args, **kwargs).icon_href
@classmethod
def container(cls, args, **kwargs):
try:
return sel.element(cls(*args, **kwargs).container)
except sel_exceptions.NoSuchElementException:
raise exceptions.ElementOrBlockNotFound(
"Either the element of the block could not be found")
class Member(Pretty):
pretty_attrs = "name", "ib"
def __init__(self, ib, name):
self.ib = ib
self.name = name
@property
def pair_locator(self):
if self.ib.type == InfoBlock.DETAIL:
return './/table/tbody/tr/td[1][@class="label"][normalize-space(.)="{}"]/..'.format(
self.name)
elif self.ib.type == InfoBlock.FORM:
return './/table/tbody/tr/td[1][@class="key"][normalize-space(.)="{}"]/..'.format(
self.name)
elif self.ib.type == InfoBlock.PF:
return (
'./div[contains(@class, "form-horizontal")]'
'/div[label[normalize-space(.)="{}"]]/div'.format(self.name))
@property
def pair(self):
return sel.element(self.pair_locator, root=self.ib.root)
@property
def container(self):
if self.ib.type == InfoBlock.PF:
# Because we get the element directly, not the two tds
return self.pair
else:
return sel.element("./td[2]", root=self.pair)
def locate(self):
return self.container
@property
def elements(self):
return sel.elements("./*", root=self.container)
@property
def element(self):
return self.elements[0]
@property
def text(self):
return sel.text(self.container).encode("utf-8").strip()
@property
def icon_href(self):
try:
return sel.get_attribute(sel.element("./img", root=self.container), "src")
except sel_exceptions.NoSuchElementException:
return None
@property
def title(self):
return sel.get_attribute(self.pair, "title") or None
@fill.method((InfoBlock, Sequence))
def _ib_seq(ib, i):
for item in i:
sel.click(ib.member(item))
@fill.method((InfoBlock, basestring))
def _ib_str(ib, s):
fill([s])
@fill.method((InfoBlock.Member, bool))
def _ib_m_seq(member, b):
if b:
sel.click(member)
class Quadicon(Pretty):
"""
Represents a single quadruple icon in the CFME UI.
A Quadicon contains multiple quadrants. These are accessed via attributes.
The qtype is currently one of the following and determines which attribute names
are present. They are mapped internally and can be reassigned easily if the UI changes.
A Quadicon is used by defining the name of the icon and the type. After that, it can be used
to obtain the locator of the Quadicon, or query its quadrants, via attributes.
Args:
name: The label of the icon.
qtype: The type of the quad icon. By default it is ``None``, therefore plain quad without any
retrievable data usable for selecting/clicking.
Usage:
qi = web_ui.Quadicon('hostname.local', 'host')
qi.creds
click(qi)
.. rubric:: Known Quadicon Types and Attributes
* **host** - *from the infra/host page* - has quads:
* a. **no_vm** - Number of VMs
* b. **state** - The current state of the host
* c. **vendor** - The vendor of the host
* d. **creds** - If the creds are valid
* **infra_prov** - *from the infra/providers page* - has quads:
* a. **no_host** - Number of hosts
* b. *Blank*
* c. **vendor** - The vendor of the provider
* d. **creds** - If the creds are valid
* **vm** - *from the infra/virtual_machines page* - has quads:
* a. **os** - The OS of the vm
* b. **state** - The current state of the vm
* c. **vendor** - The vendor of the vm's host
* d. **no_snapshot** - The number of snapshots
* g. **policy** - The state of the policy
* **cloud_prov** - *from the cloud/providers page* - has quads:
* a. **no_instance** - Number of instances
* b. **no_image** - Number of machine images
* c. **vendor** - The vendor of the provider
* d. **creds** - If the creds are valid
* **instance** - *from the cloud/instances page* - has quads:
* a. **os** - The OS of the instance
* b. **state** - The current state of the instance
* c. **vendor** - The vendor of the instance's host
* d. **no_snapshot** - The number of snapshots
* g. **policy** - The state of the policy
* **datastore** - *from the infra/datastores page* - has quads:
* a. **type** - File system type
* b. **no_vm** - Number of VMs
* c. **no_host** - Number of hosts
* d. **avail_space** - Available space
* **repository** - *from the infra/repositories page* - has no quads
* **cluster** - *from the infra/cluster page* - has no quads
* **resource_pool** - *from the infra/resource_pool page* - has no quads
* **stack** - *from the clouds/stacks page* - has no quads
Returns: A :py:class:`Quadicon` object.
"""
pretty_attrs = ['_name', '_qtype']
QUADS = {
"host": {
"no_vm": ("a", 'txt'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"creds": ("d", 'img'),
},
"infra_prov": {
"no_host": ("a", 'txt'),
"vendor": ("c", 'img'),
"creds": ("d", 'img'),
},
"vm": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
"policy": ("g", 'img'),
},
"cloud_prov": {
"no_vm": ("a", 'txt'),
"no_image": ("b", 'txt'),
"vendor": ("b", 'img'),
"creds": ("d", 'img'),
},
"instance": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
"policy": ("g", 'img'),
},
"stack": {},
"datastore": {
"type": ("a", 'img'),
"no_vm": ("b", 'txt'),
"no_host": ("c", 'txt'),
"avail_space": ("d", 'img'),
},
"cluster": {},
"repository": {},
"resource_pool": {},
"template": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
},
"image": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
},
"middleware": {}, # Middleware quads have no fields
None: {}, # If you just want to find the quad and not mess with data
}
def __init__(self, name, qtype=None):
self._name = name
self.qtype = qtype
@property
def qtype(self):
return self._qtype
@qtype.setter
def qtype(self, value):
assert value in self.QUADS
self._qtype = value
@property
def _quad_data(self):
return self.QUADS[self.qtype]
def checkbox(self):
""" Returns: a locator for the internal checkbox for the quadicon"""
return "//input[@type='checkbox' and ../../..//a[{}]]".format(self.a_cond)
@property
def exists(self):
try:
self.locate()
return True
except sel.NoSuchElementException:
return False
@property
def a_cond(self):
return "@title={name} or @data-original-title={name}".format(name=quoteattr(self._name))
def locate(self):
""" Returns: a locator for the quadicon anchor"""
try:
return sel.move_to_element(
'div/a',
root="//div[contains(@id, 'quadicon') and ../../..//a[{}]]".format(self.a_cond))
except sel.NoSuchElementException:
quads = sel.elements("//div[contains(@id, 'quadicon')]/../../../tr/td/a")
if not quads:
raise sel.NoSuchElementException("Quadicon {} not found. No quads present".format(
self._name))
else:
quad_names = [self._get_title(quad) for quad in quads]
raise sel.NoSuchElementException(
"Quadicon {} not found. These quads are present:\n{}".format(
self._name, ", ".join(quad_names)))
def _locate_quadrant(self, corner):
""" Returns: a locator for the specific quadrant"""
return "//div[contains(@class, {}) and ../../../..//a[{}]]".format(
quoteattr("{}72".format(corner)), self.a_cond)
def __getattr__(self, name):
""" Queries the quadrants by name
Args:
name: The name of the quadrant identifier, as defined above.
Returns: A string containing a representation of what is in the quadrant.
"""
if name in self._quad_data:
corner, rtype = self._quad_data[name]
locator = self._locate_quadrant(corner)
# We have to have a try/except here as some quadrants
# do not exist if they have no data, e.g. current_state in a host
# with no credentials.
try:
el = sel.element(locator)
except sel_exceptions.NoSuchElementException:
return None
if rtype == 'txt':
return el.text
if rtype == 'img':
img_el = sel.element('.//img', root=el)
img_name = sel.get_attribute(img_el, 'src')
path, filename = os.path.split(img_name)
root, ext = os.path.splitext(filename)
return root
else:
return object.__getattribute__(self, name)
def __str__(self):
return self.locate()
@classmethod
def _get_title(cls, el):
title = sel.get_attribute(el, "title")
if title is not None:
return title
else:
return sel.get_attribute(el, "data-original-title")
@classmethod
def all(cls, qtype=None, this_page=False):
"""Allows iteration over Quadicons.
Args:
qtype: Quadicon type. Refer to the constructor for reference.
this_page: Whether to look for Quadicons only on current page (do not list pages).
Returns: :py:class:`list` of :py:class:`Quadicon`
"""
from cfme.web_ui import paginator # Prevent circular imports
if this_page:
pages = (None, ) # Single, current page. Since we dont care about the value, using None
else:
pages = paginator.pages()
for page in pages:
for href in sel.elements("//div[contains(@id, 'quadicon')]/../../../tr/td/a"):
yield cls(cls._get_title(href), qtype)
@classmethod
def first(cls, qtype=None):
return cls(cls.get_first_quad_title(), qtype=qtype)
@staticmethod
def select_first_quad():
fill("//div[contains(@id, 'quadicon')]/../..//input", True)
@staticmethod
def get_first_quad_title():
first_quad = "//div[contains(@id, 'quadicon')]/../../../tr/td/a"
title = sel.get_attribute(first_quad, "title")
if title:
return title
else:
return sel.get_attribute(first_quad, "data-original-title") or "" # To ensure str
@classmethod
def any_present(cls):
try:
cls.get_first_quad_title()
except NoSuchElementException:
return False
except AttributeError:
# This is needed so that if there is no browser, we fail nicely, this in turn is
# needed to make the docs not error.
return False
else:
return True
@property
def name(self):
""" Returns name of the quadicon."""
return self._name
@property
def check_for_single_quadrant_icon(self):
""" Checks if the quad icon is a single quadrant icon."""
for quadrant_name in self._quad_data.iterkeys():
# These quadrant will be displayed if it is a regular quad
quadrant_id = self._quad_data[quadrant_name][0] # It is a tuple
if sel.is_displayed(self._locate_quadrant(quadrant_id)):
return False
return sel.is_displayed(self._locate_quadrant("e")) # Image has only 'e'
class DHTMLSelect(Select):
"""
A special Select object for CFME's icon enhanced DHTMLx Select elements.
Args:
loc: A locator.
Returns a :py:class:`cfme.web_ui.DHTMLSelect` object.
"""
@staticmethod
def _log(meth, val=None):
if val:
val_string = " with value {}".format(val)
logger.debug('Filling in DHTMLSelect using (%s)%s', meth, val_string)
def _get_select_name(self):
""" Get's the name reference of the element from its hidden attribute.
"""
root_el = sel.element(self)
el = sel.element("div/input[2]", root=root_el)
name = sel.get_attribute(el, 'name')
return name
@property
def all_selected_options(self):
""" Returns all selected options.
Note: Since the DHTML select can only have one option selected at a time, we
simple return the first element (the only element).
Returns: A Web element.
"""
return [self.first_selected_option]
@property
def first_selected_option(self):
""" Returns the first selected option in the DHTML select
Note: In a DHTML select, there is only one option selectable at a time.
Returns: A webelement.
"""
name = self._get_select_name()
return browser().execute_script(
'return {}.getOptionByIndex({}}.getSelectedIndex()).content'.format(name, name))
@property
def options(self):
""" Returns a list of options of the select as webelements.
Returns: A list of Webelements.
"""
name = self._get_select_name()
return browser().execute_script('return {}.DOMlist.children'.format(name))
def select_by_index(self, index, _cascade=None):
""" Selects an option by index.
Args:
index: The select element's option by index.
"""
name = self._get_select_name()
if index is not None:
if not _cascade:
self._log('index', index)
browser().execute_script('{}.selectOption({})'.format(name, index))
def select_by_visible_text(self, text):
""" Selects an option by visible text.
Args:
text: The select element option's visible text.
"""
name = self._get_select_name()
if text is not None:
self._log('visible_text', text)
value = browser().execute_script(
'return {}.getOptionByLabel("{}").value'.format(name, text))
self.select_by_value(value, _cascade=True)
def select_by_value(self, value, _cascade=None):
""" Selects an option by value.
Args:
value: The select element's option value.
"""
name = self._get_select_name()
if value is not None:
if not _cascade:
self._log('value', value)
index = browser().execute_script('return {}.getIndexByValue("{}")'.format(name, value))
self.select_by_index(index, _cascade=True)
def locate(self):
return sel.move_to_element(self._loc)
@sel.select.method((DHTMLSelect, basestring))
def select_dhtml(dhtml, s):
dhtml.select_by_visible_text(s)
class Filter(Form):
""" Filters requests pages
This class inherits Form as its base and adds a few methods to assist in filtering
request pages.
Usage:
f = Filter(fields=[
('type', Select('//select[@id="type_choice"]')),
('approved', Input("state_choice__approved")),
('denied', Input"state_choice__denied")),
('pending_approval', Input("state_choice__pending_approval")),
('date', Select('//select[@id="time_period"]')),
('reason', Input("reason_text")),
])
f.apply_filter(type="VM Clone", approved=False,
pending_approval=False, date="Last 24 Hours", reason="Just Because")
"""
buttons = {
'default_off': '//div[@id="buttons_off"]/li/a/img[@alt="Set filters to default"]',
'default_on': '//div[@id="buttons_on"]/li/a/img[@alt="Set filters to default"]',
'apply': '//div[@id="buttons_on"]//a[@title="Apply the selected filters"]',
'reset': '//div[@id="buttons_on"]//a[@title="Reset filter changes"]'
}
def default_filter(self):
""" Method to reset the filter back to defaults.
"""
sel.click(self.buttons['default_off'])
sel.click(self.buttons['default_on'])
def reset_filter(self):
""" Method to reset the changes to the filter since last applying.
"""
sel.click(self.buttons['reset'])
def apply_filter(self, **kwargs):
""" Method to apply a filter.
First resets the filter to default and then applies the filter.
Args:
**kwargs: A dictionary of form elements to fill and their values.
"""
self.default_filter()
self.fill(kwargs)
sel.click(self.buttons['apply'])
class MultiSelect(Region):
"""Represents a UI widget where there are two select boxes, one with
possible selections, and another with selected items. Has two
arrow buttons to move items between the two"""
def __init__(self,
available_select=None,
selected_select=None,
select_arrow=None,
deselect_arrow=None):
self.available_select = available_select
self.selected_select = selected_select
self.select_arrow = select_arrow
self.deselect_arrow = deselect_arrow
@sel.select.method((MultiSelect, Sequence))
def select_multiselect(ms, values):
sel.select(ms.available_select, values)
sel.click(ms.select_arrow)
@fill.method((MultiSelect, Sequence))
def fill_multiselect(ms, items):
sel.select(ms, items)
class UpDownSelect(Region):
"""Multiselect with two arrows (up/down) next to it. Eg. in AE/Domain priority selection.
Args:
select_loc: Locator for the select box (without Select element wrapping)
up_loc: Locator of the Move Up arrow.
down_loc: Locator with Move Down arrow.
"""
def __init__(self, select_loc, up_loc, down_loc):
super(UpDownSelect, self).__init__(locators=dict(
select=Select(select_loc, multi=True),
up=up_loc,
down=down_loc,
))
def get_items(self):
return map(lambda el: el.text.encode("utf-8"), self.select.options)
def move_up(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
sel.select(self.select, item)
sel.click(self.up)
def move_down(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
sel.select(self.select, item)
sel.click(self.down)
def move_top(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
while item != self.get_items()[0]:
sel.select(self.select, item)
sel.click(self.up)
def move_bottom(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
while item != self.get_items()[-1]:
sel.select(self.select, item)
sel.click(self.down)
@fill.method((UpDownSelect, Sequence))
def _fill_uds_seq(uds, seq):
seq = map(str, seq)
for item in reversed(seq): # reversed because every new item at top pushes others down
uds.move_top(item)
class ScriptBox(Pretty):
"""Represents a script box as is present on the customization templates pages.
This box has to be activated before keys can be sent. Since this can't be done
until the box element is visible, and some dropdowns change the element, it must
be activated "inline".
Args:
"""
pretty_attrs = ['locator']
def __init__(self, name=None, ta_locator="//textarea[contains(@id, 'method_data')]"):
self._name = name
self.ta_loc = ta_locator
@property
def name(self):
if not self._name:
self._name = version.pick({
version.LOWEST: 'miqEditor',
'5.5': 'ManageIQ.editor'})
return self._name
def get_value(self):
script = sel.execute_script('return {}.getValue();'.format(self.name))
script = script.replace('\\"', '"').replace("\\n", "\n")
return script
def workaround_save_issue(self):
# We need to fire off the handlers manually in some cases ...
sel.execute_script(
"{}._handlers.change.map(function(handler) {{ handler() }});".format(self.name))
sel.wait_for_ajax()
@fill.method((ScriptBox, Anything))
def fill_scriptbox(sb, script):
"""This function now clears and sets the ScriptBox.
"""
logger.info("Filling ScriptBox {} with\n{}".format(sb.name, script))
sel.execute_script('{}.setValue(arguments[0]);'.format(sb.name), script)
sel.wait_for_ajax()
sel.execute_script('{}.save();'.format(sb.name))
sel.wait_for_ajax()
class CheckboxSelect(Pretty):
"""Class used for filling those bunches of checkboxes I (@mfalesni) always hated to search for.
Can fill by values, text or both. To search the text for the checkbox, you have 2 choices:
* If the text can be got from parent's tag (like `<div><input type="checkbox">blablabla</div>`
where blablabla is the checkbox's description looked up), you can leave the
`text_access_func` unfilled.
* If there is more complicated layout and you don't mind a bit slower operation, you can pass
the text_access_func, which should be like `lambda checkbox_el: get_text_of(checkbox_el)`.
The checkbox `WebElement` is passed to it and the description text is the expected output
of the function.
Args:
search_root: Root element for checkbox search
text_access_func: Function returning descriptive text about passed CB element.
"""
pretty_attrs = ['_root']
def __init__(self, search_root, text_access_func=None):
self._root = search_root
self._access_func = text_access_func
@property
def checkboxes(self):
"""All checkboxes."""
return set(sel.elements(".//input[@type='checkbox']", root=sel.element(self._root)))
@property
def selected_checkboxes(self):
"""Only selected checkboxes."""
return {cb for cb in self.checkboxes if cb.is_selected()}
@property
def selected_values(self):
"""Only selected checkboxes' values."""
return {sel.get_attribute(cb, "value") for cb in self.selected_checkboxes}
@property
def unselected_checkboxes(self):
"""Only unselected checkboxes."""
return {cb for cb in self.checkboxes if not cb.is_selected()}
@property
def unselected_values(self):
"""Only unselected checkboxes' values."""
return {sel.get_attribute(cb, "value") for cb in self.unselected_checkboxes}
def checkbox_by_id(self, id):
"""Find checkbox's WebElement by id."""
return sel.element(
".//input[@type='checkbox' and @id='{}']".format(id), root=sel.element(self._root)
)
def select_all(self):
"""Selects all checkboxes."""
for cb in self.unselected_checkboxes:
sel.check(cb)
def unselect_all(self):
"""Unselects all checkboxes."""
for cb in self.selected_checkboxes:
sel.uncheck(cb)
def checkbox_by_text(self, text):
"""Returns checkbox's WebElement by searched by its text."""
if self._access_func is not None:
for cb in self.checkboxes:
txt = self._access_func(cb)
if txt == text:
return cb
else:
raise NameError("Checkbox with text {} not found!".format(text))
else:
# Has to be only single
return sel.element(
".//*[contains(., '{}')]/input[@type='checkbox']".format(text),
root=sel.element(self._root)
)
def check(self, values):
"""Checking function.
Args:
values: Dictionary with key=CB name, value=bool with status.
Look in the function to see.
"""
for name, value in values.iteritems():
if isinstance(name, sel.ByText):
sel.checkbox(self.checkbox_by_text(str(name)), value)
else:
sel.checkbox(self.checkbox_by_id(name), value)
@fill.method((CheckboxSelect, bool))
def fill_cb_select_bool(select, all_state):
if all_state is True:
return select.select_all()
else:
return select.unselect_all()
@fill.method((CheckboxSelect, list))
@fill.method((CheckboxSelect, set))
def fill_cb_select_set(select, names):
return select.check({k: True for k in names})
@fill.method((CheckboxSelect, Mapping))
def fill_cb_select_dictlist(select, dictlist):
return select.check(dictlist)
@fill.method((CheckboxSelect, basestring))
@fill.method((CheckboxSelect, sel.ByText))
def fill_cb_select_string(select, cb):
return fill(select, {cb})
class ShowingInputs(Pretty):
"""This class abstracts out as a container of inputs, that appear after preceeding was filled.
Args:
*locators: In-order-of-display specification of locators.
Keywords:
min_values: How many values are required (Default: 0)
"""
pretty_attrs = ['locators', 'min_values']
def __init__(self, *locators, **kwargs):
self._locators = locators
self._min = kwargs.get("min_values", 0)
def zip(self, with_values):
if len(with_values) < self._min:
raise ValueError("Not enough values provided ({}, expected {})".format(
len(with_values), self._min)
)
if len(with_values) > len(self._locators):
raise ValueError("Too many values provided!")
return zip(self._locators, with_values)
def __getitem__(self, i):
"""To delegate access to the separate locators"""
return self._locators[i]
@fill.method((ShowingInputs, Sequence))
def _fill_showing_inputs_seq(si, i):
for loc, val in si.zip(i):
fill(loc, val)
@fill.method((ShowingInputs, basestring))
def _fill_showing_inputs_str(si, s):
fill(si, [s])
class MultiFill(object):
"""Class designed to fill the same value to multiple fields
Args:
*fields: The fields where the value will be mirrored
"""
def __init__(self, *fields):
self.fields = fields
@fill.method((MultiFill, object))
def _fill_multi_obj(mf, o):
for field in mf.fields:
fill(field, o)
class DriftGrid(Pretty):
""" Class representing the table (grid) specific to host drift analysis comparison page
"""
def __init__(self, loc="//div[@id='drift_grid_div']"):
self.loc = loc
def get_cell(self, row_text, col_index):
""" Finds cell element of the grid specified by column index and row text
Args:
row_text: Title text of the cell's row
col_index: Column index of the cell, starting with 0 for 1st data-containing column
Note:
`col_index` of 0 is used for the 2nd actual column in the drift grid, because
the 1st column does not contain headers, only row descriptions.
Returns:
Selenium element of the cell.
"""
self.expand_all_sections()
cell_loc = ".//div/div[1][contains(., '{}')]/../div[{}]".format(row_text, col_index + 2)
cell = sel.element(cell_loc, root=self.loc)
return cell
def cell_indicates_change(self, row_text, col_index):
""" Finds out if a cell, specified by column index and row text, indicates change
Args:
row_text: Title text of the cell's row
col_index: Column index of the cell
Note:
`col_index` of 0 is used for the 2nd actual column in the drift grid, because
the 1st column does not contain headers, only row descriptions.
Returns:
``True`` if there is a change present, ``False`` otherwise
"""
cell = self.get_cell(row_text, col_index)
# Cell either contains an image
try:
cell_img = sel.element(".//img", root=cell)
if sel.get_attribute(cell_img, "alt") == 'Changed from previous':
return True
# or text
except NoSuchElementException:
if version.current_version() <= '5.3':
cell_textdiv = sel.element("./div", root=cell)
if 'mark' in sel.get_attribute(cell_textdiv, 'class'):
return True
else: # LOWEST
if 'color: rgb(33, 160, 236)' in sel.get_attribute(cell, 'style'):
return True
return False
def expand_all_sections(self):
""" Expands all sections to make the row elements found therein available
"""
while True:
# We need to do this one by one because the DOM changes on every expansion
try:
el = sel.element(
'.//div/span[contains(@class, "toggle") and contains(@class, "expand")]',
root=self.loc)
sel.click(el)
except NoSuchElementException:
break
class ButtonGroup(object):
def __init__(self, key):
""" A ButtonGroup is a set of buttons next to each other, as is used on the DefaultViews
page.
Args:
key: The name of the key field text before the button group.
"""
self.key = key
@property
def _icon_tag(self):
if version.current_version() >= 5.6:
return 'i'
else:
return 'img'
@property
def _state_attr(self):
if version.current_version() >= 5.6:
return 'title'
else:
return 'alt'
@property
def locator(self):
attr = re.sub(r"&", "&", quoteattr(self.key)) # We don't need it in xpath
if version.current_version() < "5.5":
return '//td[@class="key" and normalize-space(.)={}]/..'.format(attr)
else:
return (
'//label[contains(@class, "control-label") and normalize-space(.)={}]/..'
.format(attr))
def locate(self):
""" Moves to the element """
# Use the header locator as the overall table locator
return sel.move_to_element(self.locator)
@property
def locator_base(self):
if version.current_version() < "5.5":
return self.locator + "/td[2]"
else:
return self.locator + "/div"
@property
def active(self):
""" Returns the alt tag text of the active button in thr group. """
loc = sel.element(self.locator_base + '/ul/li[@class="active"]/{}'.format(self._icon_tag))
return loc.get_attribute(self._state_attr)
def status(self, alt):
""" Returns the status of the button identified by the Alt Text of the image. """
active_loc = self.locator_base + '/ul/li/{}[@{}="{}"]'.format(
self._icon_tag, self._state_attr, alt)
try:
sel.element(active_loc)
return True
except NoSuchElementException:
pass
inactive_loc = self.locator_base + '/ul/li/a/{}[@alt="{}"]'.format(self._icon_tag, alt)
try:
sel.element(inactive_loc)
return False
except NoSuchElementException:
pass
def choose(self, alt):
""" Sets the ButtonGroup to select the button identified by the alt text. """
if not self.status(alt):
inactive_loc = self.locator_base + '/ul/li/a/{}[@alt="{}"]'.format(self._icon_tag, alt)
sel.click(inactive_loc)
@fill.method((ButtonGroup, basestring))
def _fill_showing_button_group(tb, s):
tb.choose(s)
class ColorGroup(object):
def __init__(self, key):
""" A ColourGroup is a set of colour buttons next to each other, as is used on the DefaultViews
page.
Args:
key: The name of the key field text before the button group.
"""
self.key = key
self.locator = '//td[@class="key" and text()="{}"]/..'.format(self.key)
def locate(self):
""" Moves to the element """
# Use the header locator as the overall table locator
return sel.move_to_element(self.locator)
@property
def active(self):
""" Returns the alt tag text of the active button in thr group. """
loc = sel.element(self.locator + '/td[2]/div[contains(@title, "selected")]')
color = re.search('The (.*?) theme', loc.get_attribute('title')).groups()[0]
return color
def status(self, color):
""" Returns the status of the color button identified by the Title Text of the image. """
active_loc = self.locator + '/td[2]/div[contains(@title, "{}")' \
'and contains(@title, "selected")]'.format(color)
try:
sel.element(active_loc)
return True
except NoSuchElementException:
pass
inactive_loc = self.locator + '/td[2]/div[contains(@title, "{}")' \
'and contains(@title, "Click")]'.format(color)
try:
sel.element(inactive_loc)
return False
except NoSuchElementException:
pass
def choose(self, color):
""" Sets the ColorGroup to select the button identified by the title text. """
if not self.status(color):
inactive_loc = self.locator + '/td[2]/div[contains(@title, "{}")' \
'and contains(@title, "Click")]'.format(color)
sel.click(inactive_loc)
@fill.method((ColorGroup, basestring))
def _fill_showing_color_group(tb, s):
tb.choose(s)
class DynamicTable(Pretty):
"""A table that can add or remove the rows.
"""
pretty_attrs = "root_loc", "default_row_item"
ROWS = ".//tbody/tr[not(contains(@id, 'new_tr'))]"
DELETE_ALL = {
version.LOWEST: ".//tbody/tr/td/img[@alt='Delete']",
'5.6': './/tbody/tr/td/button/i[contains(@class, "minus")]'
}
def __init__(self, root_loc, default_row_item=None):
self.root_loc = root_loc
self.default_row_item = default_row_item
@property
def rows(self):
return map(lambda r_el: self.Row(self, r_el), sel.elements(self.ROWS, root=self.root_loc))
@cached_property
def header_names(self):
return map(sel.text, sel.elements(".//thead/tr/th", root=self.root_loc))
def click_add(self):
sel.click(sel.element(
".//tbody/tr[@id='new_tr']/td//img | .//tbody/tr[@id='new_tr']/td//i",
root=self.root_loc))
def click_save(self):
if version.current_version() < "5.6":
sel.click(sel.element(
".//tbody/tr[@id='new_tr']/td//input[@type='image']", root=self.root_loc))
else:
# 5.6+ uses the same button.
self.click_add()
def delete_row(self, by):
pass
def clear(self):
while True:
buttons = sel.elements(self.DELETE_ALL)
if not buttons:
break
sel.click(buttons[0])
def add_row(self, data):
self.click_add()
editing_row = self.Row(self, ".//tbody/tr[@id='new_tr']")
fill(editing_row, data)
self.click_save()
class Row(object):
def __init__(self, table, root):
self.table = table
self.root = root
@property
def values(self):
cells = sel.elements("./td", root=self.root)
return dict(zip(self.table.header_names, map(sel.text, cells)))
@property
def inputs(self):
result = []
for cell in sel.elements("./td", root=self.root):
inputs = sel.elements("./input", root=cell)
if not inputs:
result.append(None)
else:
result.append(inputs[0])
return result
@property
def inputs_for_filling(self):
return dict(zip(self.table.header_names, self.inputs))
@fill.method((DynamicTable.Row, Mapping))
def _fill_dt_row_map(dtr, m):
for name, input in dtr.inputs_for_filling.iteritems():
fill(input, m.get(name, None))
@fill.method((DynamicTable.Row, Anything))
def _fill_dt_row_other(dtr, anything):
mapping_fields = [name for name in dtr.table.header_names if name.strip()]
if isinstance(anything, (list, tuple)) and len(anything) == len(mapping_fields):
# Create the dict and fill by dict
fill(dtr, dict(zip(mapping_fields, anything)))
else:
# Use the default field
if dtr.table.default_row_item is None:
raise Exception("Cannot fill table row with anything when we dont know the def. field")
fill(dtr, {dtr.table.default_row_item: anything})
@fill.method((DynamicTable, list))
def _fill_dt_list(dt, l, clear_before=False):
if clear_before:
dt.clear()
for item in l:
dt.add_row(item)
@fill.method((DynamicTable, Anything))
def _fill_dt_anything(dt, anything, **kwargs):
fill(dt, [anything], **kwargs)
fill.prefer((DynamicTable, Anything), (object, Mapping))
fill.prefer((DynamicTable.Row, Anything), (object, Mapping))
fill.prefer((Select, types.NoneType), (object, types.NoneType))
fill.prefer((DHTMLSelect, types.NoneType), (object, types.NoneType))
fill.prefer((object, types.NoneType), (Select, object))
class AngularSelect(object):
BUTTON = "//button[@data-id='{}']"
def __init__(self, loc, none=None, multi=False):
self.none = none
if isinstance(loc, AngularSelect):
self._loc = loc._loc
else:
self._loc = self.BUTTON.format(loc)
self.multi = multi
def locate(self):
return sel.move_to_element(self._loc)
@property
def select(self):
return Select('select#{}'.format(self.did), multi=self.multi)
@property
def did(self):
return sel.element(self._loc).get_attribute('data-id')
@property
def is_broken(self):
return sel.is_displayed(self) and sel.is_displayed(self.select)
@property
def is_open(self):
el = sel.element(self._loc)
return el.get_attribute('aria-expanded') == "true"
def open(self):
sel.click(self._loc)
def select_by_visible_text(self, text):
if not self.is_open:
self.open()
new_loc = self._loc + '/../div/ul/li/a[contains(., "{}")]'.format(text)
e = sel.element(new_loc)
sel.execute_script("arguments[0].scrollIntoView();", e)
sel.click(new_loc)
def select_by_value(self, value):
value = str(value) # Because what we read from the page is a string
options_map = [a.value for a in self.select.all_options]
index = options_map.index(value)
if not self.is_open:
self.open()
new_loc = self._loc + '/../div/ul/li[@data-original-index={}]'.format(index)
e = sel.element(new_loc)
sel.execute_script("arguments[0].scrollIntoView();", e)
sel.click(new_loc)
@property
def all_options(self):
return self.select.all_options
@property
def classes(self):
"""Combines class from the button and from select."""
return sel.classes(self) | sel.classes("select#{}".format(self.did))
@property
def options(self):
return self.select.options
@property
def first_selected_option(self):
new_loc = self._loc + '/span'
e = sel.element(new_loc)
text = e.text
for option in self.all_options:
if option.text == text:
return option
return None
@property
def first_selected_option_text(self):
new_loc = self._loc + '/span'
e = sel.element(new_loc)
text = e.text
return text
@fill.method((AngularSelect, sel.ByText))
@fill.method((AngularSelect, basestring))
def _fill_angular_string(obj, s):
if s:
obj.select_by_visible_text(s)
else:
return
@fill.method((AngularSelect, sel.ByValue))
def _fill_angular_value(obj, s):
if s.value:
obj.select_by_value(s.value)
else:
return
@fill.method((AngularSelect, list))
def _fill_angular_list(obj, l):
for i in l:
fill(obj, i)
class AngularCalendarInput(Pretty):
pretty_attrs = "input_name", "click_away_element"
def __init__(self, input_name, click_away_element):
self.input_name = input_name
self.click_away_element = click_away_element
@property
def input(self):
return Input(self.input_name, use_id=True)
@property
def clear_button(self):
return sel.element("../a/img", root=self.input)
def locate(self):
return self.input.locate()
def fill(self, value):
if isinstance(value, date):
value = '{}/{}/{}'.format(value.month, value.day, value.year)
else:
value = str(value)
try:
sel.click(self.input)
sel.set_text(self.input, value)
finally:
# To ensure the calendar itself is closed
sel.click(self.click_away_element)
def clear(self):
if sel.text(self.input).strip():
sel.click(self.clear_button)
@fill.method((AngularCalendarInput, Anything))
def _fill_angular_calendar_input(obj, a):
return obj.fill(a)
class EmailSelectForm(Pretty):
"""Class encapsulating the e-mail selector, eg. in Control/Alarms editing."""
fields = Region(locators=dict(
from_address=Input('from'),
user_emails={
version.LOWEST: Select("//select[@id='user_email']"),
"5.5": AngularSelect("user_email")},
manual_input=Input('email'),
add_email_manually={
version.LOWEST: "(//img | //i)[@title='Add' and contains(@onclick, 'add_email')]",
"5.5": "//div[@alt='Add']/i"}
))
@property
def to_emails(self):
"""Returns list of e-mails that are selected"""
return [
sel.text(el)
for el
in sel.elements("//a[contains(@href, 'remove_email')]")
]
@property
def user_emails(self):
"""Returns list of e-mail that users inside CFME have so that they can be selected"""
try:
return [
sel.get_attribute(el, "value")
for el
in self.fields.user_emails.options
if len(sel.get_attribute(el, "value").strip()) > 0
]
except NoSuchElementException: # It disappears when empty
return []
def remove_email(self, email):
"""Remove specified e-mail
Args:
email: E-mail to remove
"""
if email in self.to_emails:
sel.click("//a[contains(@href, 'remove_email')][normalize-space(.)='{}']".format(email))
return email not in self.to_emails
else:
return True
@to_emails.setter
def to_emails(self, emails):
"""Function for filling e-mails
Args:
emails: List of e-mails that should be filled. Any existing e-mails that are not in this
variable will be deleted.
"""
if isinstance(emails, basestring):
emails = [emails]
# Delete e-mails that have nothing to do here
for email in self.to_emails:
if email not in emails:
assert self.remove_email(email), "Could not remove e-mail '{}'".format(email)
# Add new
for email in emails:
if email in self.to_emails:
continue
if email in self.user_emails:
sel.select(self.fields.user_emails, sel.ByValue(email))
else:
fill(self.fields.manual_input, email)
sel.click(self.fields.add_email_manually)
assert email in self.to_emails, "Adding e-mail '{}' manually failed!".format(email)
@fill.method((EmailSelectForm, basestring))
@fill.method((EmailSelectForm, list))
@fill.method((EmailSelectForm, set))
@fill.method((EmailSelectForm, tuple))
def fill_email_select_form(form, emails):
form.to_emails = emails
class BootstrapSwitch(object):
def __init__(self, input_id):
"""A Bootstrap On/Off switch
Args:
input_id: The HTML ID of the input element associated with the checkbox
"""
self.input_id = input_id
self.loc_container = "//input[@id={}]/..".format(quoteattr(self.input_id))
self.on_off = "{}/span[contains(@class, 'bootstrap-switch-handle-{}')]".format(
self.loc_container, '{}')
def fill(self, val):
"""Convenience function"""
if val:
self.check()
else:
self.uncheck()
def check(self):
"""Checks the bootstrap box"""
el = sel.element(self.on_off.format("off"))
sel.click(el)
def uncheck(self):
"""Unchecks the bootstrap box"""
el = sel.element(self.on_off.format("on"))
sel.click(el)
def is_selected(self):
if sel.is_displayed("//div[contains(@class, 'bootstrap-switch-on')]{}"
.format(self.loc_container)):
return True
else:
return False
@fill.method((BootstrapSwitch, bool))
def fill_bootstrap_switch(bs, val):
bs.fill(val)
class OldCheckbox(object):
def __init__(self, input_id):
"""An original HTML checkbox element
Args:
input_id: The HTML ID of the input element associated with the checkbox
"""
self.input_id = input_id
self.locator = "//input[@id={}]".format(quoteattr(input_id))
def fill(self, val):
"""
Checks or unchecks
Args:
value: The value the checkbox should represent as a bool (or None to do nothing)
Returns: Previous state of the checkbox
"""
if val is not None:
selected = self.is_selected()
if selected is not val:
logger.debug("Setting checkbox {} to {}".format(str(self.locator), str(val)))
sel.click(self._el)
return selected
def check(self):
"""Convenience function"""
self.fill(True)
def uncheck(self):
"""Convenience function"""
self.fill(False)
def _el(self):
return sel.move_to_element(self.locator)
def is_selected(self):
return self._el().is_selected()
@fill.method((OldCheckbox, bool))
def fill_oldcheckbox_switch(ob, val):
ob.fill(val)
class CFMECheckbox(Selector):
def __init__(self, input_id):
self.input_id = input_id
super(CFMECheckbox, self).__init__()
def decide(self):
ref_loc = "//input[@id={}]/../span" \
"[contains(@class, 'bootstrap-switch-label')]".format(quoteattr(self.input_id))
if sel.is_displayed(ref_loc):
return BootstrapSwitch(self.input_id)
else:
return OldCheckbox(self.input_id)
@fill.method((CFMECheckbox, bool))
def fill_cfmecheckbox_switch(ob, val):
ob.fill(val)
def breadcrumbs():
"""Returns a list of breadcrumbs.
Returns:
:py:class:`list` of breadcrumbs if they are present, :py:class:`NoneType` otherwise.
"""
result = map(sel.text_sane, sel.elements('//ol[contains(@class, "breadcrumb")]/li'))
return result if result else None
SUMMARY_TITLE_LOCATORS = [
'//h1'
]
SUMMARY_TITLE_LOCATORS = '|'.join(SUMMARY_TITLE_LOCATORS)
def summary_title():
"""Returns a title of the page.
Returns:
:py:class:`str` if present, :py:class:`NoneType` otherwise.
"""
try:
return sel.text_sane(SUMMARY_TITLE_LOCATORS)
except sel.NoSuchElementException:
return None
| gpl-2.0 | 5,192,834,815,882,375,000 | 32.996073 | 103 | 0.577915 | false |
pypyr/pypyr-cli | tests/unit/pypyr/pypeloaders/fileloader_test.py | 1 | 3950 | """fileloader.py unit tests."""
from pathlib import Path
from unittest.mock import mock_open, patch
from pypyr.errors import PipelineNotFoundError
import pypyr.pypeloaders.fileloader
import pytest
# ------------------------- get_pipeline_path --------------------------------#
cwd = Path.cwd()
def test_get_pipeline_path_in_working_dir():
"""Find a pipeline in the working dir."""
working_dir = cwd.joinpath('tests')
path_found = pypyr.pypeloaders.fileloader.get_pipeline_path(
'testpipelinewd',
working_dir)
expected_path = cwd.joinpath('tests',
'testpipelinewd.yaml')
assert path_found == expected_path
def test_get_pipeline_path_in_working_dir_pipelines():
"""Find a pipeline in the working dir pipelines."""
working_dir = cwd.joinpath('tests')
path_found = pypyr.pypeloaders.fileloader.get_pipeline_path('testpipeline',
working_dir)
expected_path = cwd.joinpath('tests',
'pipelines',
'testpipeline.yaml')
assert path_found == expected_path
def test_get_pipeline_path_in_pypyr_dir():
"""Find a pipeline in the pypyr install dir."""
working_dir = cwd.joinpath('tests')
path_found = pypyr.pypeloaders.fileloader.get_pipeline_path('donothing',
working_dir)
expected_path = cwd.joinpath('pypyr',
'pipelines',
'donothing.yaml')
assert path_found == expected_path
def test_get_pipeline_path_raises():
"""Failure to find pipeline should raise PipelineNotFoundError."""
with pytest.raises(PipelineNotFoundError) as err:
pypyr.pypeloaders.fileloader.get_pipeline_path('unlikelypipeherexyz',
cwd)
current_path = cwd.joinpath('pipelines')
pypyr_path = cwd.joinpath('pypyr',
'pipelines')
expected_msg = (
f'unlikelypipeherexyz.yaml not found in any of the following:\n'
f'{cwd}\n{current_path}\n{pypyr_path}')
assert str(err.value) == f"{expected_msg}"
# ------------------------- get_pipeline_path --------------------------------#
# ------------------------- get_pipeline_definition --------------------------#
@patch('ruamel.yaml.YAML.load', return_value='mocked pipeline def')
@patch('pypyr.pypeloaders.fileloader.get_pipeline_path',
return_value='arb/path/x.yaml')
def test_get_pipeline_definition_pass(mocked_get_path,
mocked_yaml):
"""get_pipeline_definition passes correct params to all methods."""
with patch('pypyr.pypeloaders.fileloader.open',
mock_open(read_data='pipe contents')) as mocked_open:
pipeline_def = pypyr.pypeloaders.fileloader.get_pipeline_definition(
'pipename', '/working/dir')
assert pipeline_def == 'mocked pipeline def'
mocked_get_path.assert_called_once_with(
pipeline_name='pipename', working_directory='/working/dir')
mocked_open.assert_called_once_with('arb/path/x.yaml')
mocked_yaml.assert_called_once_with(mocked_open.return_value)
@patch('pypyr.pypeloaders.fileloader.get_pipeline_path',
return_value='arb/path/x.yaml')
def test_get_pipeline_definition_file_not_found(mocked_get_path):
"""get_pipeline_definition raises file not found."""
with patch('pypyr.pypeloaders.fileloader.open',
mock_open(read_data='pipe contents')) as mocked_open:
mocked_open.side_effect = FileNotFoundError('deliberate err')
with pytest.raises(FileNotFoundError):
pypyr.pypeloaders.fileloader.get_pipeline_definition(
'pipename', '/working/dir')
# ------------------------- get_pipeline_definition --------------------------#
| apache-2.0 | 4,627,104,753,105,716,000 | 36.980769 | 79 | 0.587342 | false |
fersauce/prueba | recetario/principal/views.py | 1 | 2183 | from django.shortcuts import render_to_response, get_object_or_404
from django.http.response import HttpResponse, HttpResponseRedirect
from principal.models import Receta, Comentario
from django.contrib.auth.models import User
from django.template.context import RequestContext
from principal.forms import ContactoForm
from django.core.mail.message import EmailMessage
def sobre(request):
html='<html><body>Proyecto de Ejemplo en MDW</body></html>'
return HttpResponse(html)
def inicio(request):
recetas = Receta.objects.all()
return render_to_response('inicio.html',{'recetas':recetas})
def usuarios(request):
usuarios = User.objects.all()
recetas = Receta.objects.all()
return render_to_response('usuarios.html', {'usuarios':usuarios,
'recetas':recetas})
def listaRecetas(request):
recetas = Receta.objects.all()
return render_to_response('recetas.html', {'datos':recetas},
context_instance = RequestContext(request))
def detalleReceta(request, idReceta):
dato = get_object_or_404(Receta, pk=idReceta)
comentarios = Comentario.objects.filter(receta=dato)
return render_to_response('receta.html', {'receta':dato,
'comentarios':comentarios},
context_instance=RequestContext(request))
def contacto(request):
if(request.method == 'POST'):
formulario = ContactoForm(request.POST)
if(formulario.is_valid()):
titulo = 'Mensaje desde el recetario de MDW'
contenido = formulario.cleaned_data['mensaje']+"\n"
contenido += 'Comunicarse a: ' + formulario.cleaned_data['correo']
correo = EmailMessage(titulo, contenido,
to=['[email protected]'])
correo.send()
return HttpResponseRedirect('/')
else:
formulario = ContactoForm()
return render_to_response('contactoform.html', {'formulario':formulario},
context_instance = RequestContext(request))
| gpl-2.0 | 2,600,570,381,765,135,400 | 41.66 | 78 | 0.626202 | false |
unreal666/outwiker | plugins/diagrammer/diagrammer/libs/blockdiag/noderenderer/note.py | 3 | 2286 | # -*- coding: utf-8 -*-
# Copyright 2011 Takeshi KOMIYA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blockdiag.noderenderer import install_renderer
from blockdiag.noderenderer.base import NodeShape
from blockdiag.utils import XY
class Note(NodeShape):
def render_shape(self, drawer, _, **kwargs):
fill = kwargs.get('fill')
m = self.metrics.cell(self.node)
r = self.metrics.cellsize * 2
tr = m.topright
note = [m.topleft, XY(tr.x - r, tr.y), XY(tr.x, tr.y + r),
m.bottomright, m.bottomleft, m.topleft]
box = self.metrics.cell(self.node).box
# draw outline
if kwargs.get('shadow'):
note = self.shift_shadow(note)
if kwargs.get('style') == 'blur':
drawer.polygon(note, fill=fill, outline=fill,
filter='transp-blur')
else:
drawer.polygon(note, fill=fill, outline=fill)
elif self.node.background:
drawer.polygon(note, fill=self.node.color,
outline=self.node.color)
drawer.image(box, self.node.background)
drawer.polygon(note, fill="none",
outline=self.node.linecolor, style=self.node.style)
else:
drawer.polygon(note, fill=self.node.color,
outline=self.node.linecolor, style=self.node.style)
# draw folded
if not kwargs.get('shadow'):
folded = [XY(tr.x - r, tr.y),
XY(tr.x - r, tr.y + r),
XY(tr.x, tr.y + r)]
drawer.line(folded, fill=self.node.linecolor,
style=self.node.style)
def setup(self):
install_renderer('note', Note)
| gpl-3.0 | 1,789,136,837,365,262,000 | 36.47541 | 78 | 0.590989 | false |
dsweet04/rekall | rekall-core/rekall/plugins/common/profile_index.py | 1 | 13574 | # Rekall Memory Forensics
# Copyright 2014 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""This module implements profile indexing.
Rekall relies on accurate profiles for reliable analysis of memory artifacts. We
depend on selecting the correct profile from the profile repository, but
sometimes it's hard to determine the exact profile to use. The profile
repository has index files that are used to lookup the correct profile quickly,
based on a limited set of symbols and offsets that are known, or can be easily
detected, about the image.
"""
__author__ = (
"Michael Cohen <[email protected]>",
"Adam Sindelar <[email protected]>",
"Jordi Sanchez <[email protected]>"
)
import hashlib
from rekall import obj
from rekall_lib import utils
class IndexProfileLoader(obj.ProfileSectionLoader):
name = "$INDEX"
def LoadIntoProfile(self, session, profile, index):
profile.LoadIndex(index)
return profile
class Index(obj.Profile):
"""A profile which contains an index to locate other profiles."""
index = None
base_offset = 0
PERFECT_MATCH = 1.0
GOOD_MATCH = 0.75
def LoadIndex(self, index):
self.index = index
def copy(self):
result = super(Index, self).copy()
result.index = self.index.copy()
return result
def _TestSymbols(self, address_space, offset, possible_values):
"""Match any of the possible_values at offset.
Return True if there is a match.
"""
for value in possible_values:
value = value.decode("hex")
data = address_space.read(offset, len(value))
if value == data:
return data
def _TestProfile(self, address_space, image_base, profile, symbols,
minimal_match=1):
"""Match _all_ the symbols against this data."""
count_matched = 0
count_unmatched = 0
for offset, possible_values in symbols:
# The possible_values can be a single string which means there is
# only one option. If it is a list, then any of the symbols may
# match at this offset to be considered a match.
if isinstance(possible_values, basestring):
possible_values = [possible_values]
# If the offset is not mapped in we can not compare it. Skip it.
offset_to_check = image_base + offset
if address_space.vtop(offset_to_check) == None:
continue
match = self._TestSymbols(
address_space=address_space,
offset=offset_to_check,
possible_values=possible_values)
if match:
self.session.logging.debug(
"%s matched offset %#x+%#x=%#x (%r)",
profile, offset, image_base, offset+image_base, match)
count_matched += 1
else:
# FIXME: We get here if the comparison point does not match -
# does it make sense to allow some points to not match? Should
# we consider these a failure to match?
count_unmatched += 1
# Require at least this many comparison points to be matched.
if count_matched < minimal_match:
return 0
if count_matched > 0:
self.session.logging.debug(
"%s matches %d/%d comparison points",
profile, count_matched, count_matched + count_unmatched)
return float(count_matched) / (count_matched + count_unmatched)
return 0
def IndexHits(self, image_base, address_space=None, minimal_match=1):
if address_space == None:
address_space = self.session.GetParameter("default_address_space")
for profile, symbols in self.index.iteritems():
match = self._TestProfile(
address_space=address_space,
image_base=image_base,
profile=profile,
minimal_match=minimal_match,
symbols=symbols)
yield match, profile
def LookupIndex(self, image_base, address_space=None, minimal_match=1):
partial_matches = []
for match, profile in self.IndexHits(image_base, address_space,
minimal_match=minimal_match):
if match == self.PERFECT_MATCH:
# Yield perfect matches right away.
yield (profile, self.PERFECT_MATCH)
elif match > 0:
# Imperfect matches will be saved and returned in order of
# accuracy.
partial_matches.append((match, profile))
partial_matches.sort(reverse=True)
for match, profile in partial_matches:
yield (profile, match)
class SymbolOffsetIndex(Index):
"""A specialized index that works on symbols-offsets."""
def __init__(self, *args, **kwargs):
super(SymbolOffsetIndex, self).__init__(*args, **kwargs)
if not self.index:
self.index = {}
@utils.safe_property
def hashes(self):
return self.index.get("$HASHES", {})
@utils.safe_property
def traits(self):
return self.index.get("$TRAITS", {})
@utils.safe_property
def profiles(self):
return self.index.get("$PROFILES", {})
@utils.safe_property
def duplicates(self):
return [p for p in self.index.get("$PROFILES") if p not in self.hashes]
def LookupProfile(self, symbols):
"""Returns which profiles in the index match a dict of symbols.
Returns:
A list of tuples of (profile, num_matched_traits).
"""
profiles = []
try:
relative_symbols = self.RelativizeSymbols(symbols.copy())
except ValueError as e:
self.session.logging.debug(str(e))
return []
for profile, traits in self.traits.iteritems():
matched_traits = 0
for trait in traits:
# A trait is a list of symbol-offset tuples.
match = all([relative_symbols.get(symbol) == offset
for (symbol, offset) in trait])
if match:
matched_traits += 1
if matched_traits > 0:
profiles.append((profile, matched_traits))
return profiles
def LookupHash(self, profile_hash):
"""Returns the profile with hash profile_hash."""
return self.hashes.get(profile_hash)
@classmethod
def FilterSymbols(cls, symbols):
"""Filters a dict of symbols, discarding irrelevant ones."""
return symbols
@classmethod
def CalculateRawProfileHash(cls, profile):
"""Calculates a hash of a list of symbols."""
# Skip superfluous symbols.
symbols = profile["$CONSTANTS"]
ordered_symbol_list = sorted(
["(%s, %d)" % (k, v)
for (k, v) in cls.FilterSymbols(symbols).iteritems()])
hasher = hashlib.sha256()
hasher.update("|".join(ordered_symbol_list))
return hasher.hexdigest()
@classmethod
def CalculateRawSymbolsHash(cls, profile):
"""Calculates a hash of a list of symbols."""
# Skip superfluous symbols.
symbols = profile["$CONSTANTS"]
ordered_symbol_list = sorted(symbols.keys())
hasher = hashlib.sha256()
hasher.update("|".join(ordered_symbol_list))
return hasher.hexdigest()
def ProfileMetadata(self, profile_name):
return self.profiles.get(profile_name)
@classmethod
def ProfileMatchesTrait(cls, profile, trait):
"""Whether a profile matches another profile's trait.
A trait is a list of tuples (symbol, offset) that uniquely identify
a profile.
"""
return all([profile.get_constant(t[0]) == t[1] for t in trait])
@classmethod
def RawProfileMatchesTrait(cls, profile, trait):
"""Whether a raw profile (JSON) matches another profile's trait.
A trait is a list of tuples (symbol, offset) that uniquely identify
a profile.
"""
try:
return all([profile.get(t[0]) == t[1] for t in trait])
except:
return False
@classmethod
def BuildIndex(cls, hashes=None, traits=None, duplicates=None, spec=None,
iomanager=None):
"""Builds a SymbolOffset index from traits, profiles, hashes and a spec.
Args:
hashes: A dictionary of hash:profile_id. Hashes must be obtained via
the SymbolOffsetIndex.CalculateRawProfileHash() method.
traits: A dictionary of profile_id:traits. Traits are the result
of calling the SymbolOffsetIndex.FindTraits() method.
profiles: A dictionary of profile_id metadata. Profile metadata
is obtained via SymbolOffsetIndex.GetProfileMetadata().
duplicates: A list of newly found profile ids that are duplicate.
"""
spec = spec or {}
metadata = dict(Type="Index",
ProfileClass=spec.get("implementation", cls.__name__),
BaseSymbol=spec.get("base_symbol"))
hashes = hashes or {}
traits = traits or {}
# Assert all profiles that have hashes have traits as well
if not all([profile in hashes.values() for profile in traits]):
raise ValueError("Not all profiles with traits have hashes")
# Assert all profiles that have traits have hashes as well
if not all([profile in traits for profile in hashes.values()]):
raise ValueError("Not all profiles with hashes have traits")
profiles = dict([(profile_id,
cls.GetProfileMetadata(
iomanager=iomanager, profile_id=profile_id))
for profile_id in traits])
duplicates = duplicates or []
for duplicate_profile in duplicates:
profiles[duplicate_profile] = cls.GetProfileMetadata(
iomanager=iomanager, profile_id=duplicate_profile)
index = {
"$METADATA": metadata,
"$INDEX": {
"$TRAITS": traits or {},
"$PROFILES": profiles or {},
"$HASHES": hashes or {},
}
}
return index
@classmethod
def GetProfileMetadata(cls, iomanager=None, profile_id=None):
profile_metadata = dict()
file_mtime = iomanager.Metadata(profile_id)["LastModified"]
profile_metadata["LastModified"] = file_mtime
return profile_metadata
def __len__(self):
return len(self.traits)
def __iter__(self):
"""Yields tuples of profile_id, traits.
Each trait is a list of tuples of (symbol, offset) that make this
profile unique within the repository.
"""
for profile, traits in self.index.get("$TRAITS").iteritems():
yield profile, traits
def RelativizeSymbols(self, symbols, base_symbol=None):
"""Modifies a dict of symbols so its offsets relative to base_symbol.
If no base_symbol is provided and the index itself doesn't define one
then returns the symbols as is.
Args:
symbols: A dictionary of symbol:value
base_symbol: The name of the symbol to base others' values on.
"""
if not base_symbol:
base_symbol = self.metadata("BaseSymbol")
if not base_symbol:
return symbols
base_value = symbols.get(base_symbol)
if not base_value:
raise ValueError("Symbol %s not found in profile", base_symbol)
new_symbols = symbols.copy()
for symbol, value in new_symbols.iteritems():
new_symbols[symbol] = value - base_value
return new_symbols
class LinuxSymbolOffsetIndex(SymbolOffsetIndex):
"""Specialized symbol-offset index for linux."""
@classmethod
def FilterSymbols(cls, symbols):
"""Filters a dict of symbols, discarding irrelevant ones."""
return dict([(k, v) for (k, v) in symbols.iteritems()
if not "." in k and k != "__irf_end"])
@classmethod
def BuildIndex(cls, hashes=None, traits=None, duplicates=None, spec=None,
iomanager=None):
index = super(LinuxSymbolOffsetIndex, cls).BuildIndex(
hashes=hashes, traits=traits, spec=spec, duplicates=duplicates,
iomanager=iomanager)
# By default, we'll calculate KASLR from linux_proc_banner which is
# present on all kernels.
spec = spec or {}
index["$METADATA"]["BaseSymbol"] = spec.get("base_symbol",
"linux_proc_banner")
return index
| gpl-2.0 | 6,259,217,831,076,171,000 | 34.534031 | 80 | 0.602328 | false |
eallrich/watchword | ww/api/migrations/0002_auto_20160213_1817.py | 1 | 2019 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Flare',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('config', models.CharField(max_length=255, blank=True)),
('signal', models.CharField(default=b'email', max_length=10, choices=[(b'email', b'Email'), (b'webhook', b'Webhook')])),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Launch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('message', models.CharField(max_length=255, blank=True)),
('trigger_state', models.CharField(max_length=5, choices=[(b'fresh', b'Fresh'), (b'quiet', b'Quiet'), (b'alert', b'Alert'), (b'alarm', b'Alarm'), (b'sleep', b'Sleep')])),
('flare', models.ForeignKey(to='api.Flare')),
('watch', models.ForeignKey(to='api.Watch')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='watch',
name='flares',
field=models.ManyToManyField(to='api.Flare'),
),
]
| mit | 6,896,003,905,374,241,000 | 39.38 | 186 | 0.538385 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.