repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
gymnasium/edx-platform | lms/djangoapps/badges/models.py | 1 | 12332 | """
Database models for the badges app
"""
from importlib import import_module
from config_models.models import ConfigurationModel
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from lazy import lazy
from model_utils.models import TimeStampedModel
from opaque_keys import InvalidKeyError
from opaque_keys.edx.django.models import CourseKeyField
from opaque_keys.edx.keys import CourseKey
from badges.utils import deserialize_count_specs
from xmodule.modulestore.django import modulestore
def validate_badge_image(image):
"""
Validates that a particular image is small enough to be a badge and square.
"""
if image.width != image.height:
raise ValidationError(_(u"The badge image must be square."))
if not image.size < (250 * 1024):
raise ValidationError(_(u"The badge image file size must be less than 250KB."))
def validate_lowercase(string):
"""
Validates that a string is lowercase.
"""
if not string.islower():
raise ValidationError(_(u"This value must be all lowercase."))
class CourseBadgesDisabledError(Exception):
"""
Exception raised when Course Badges aren't enabled, but an attempt to fetch one is made anyway.
"""
class BadgeClass(models.Model):
"""
Specifies a badge class to be registered with a backend.
"""
slug = models.SlugField(max_length=255, unique=True)
issuing_component = models.SlugField(max_length=50, default='', blank=True, validators=[validate_lowercase])
display_name = models.CharField(max_length=255)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
description = models.TextField()
criteria = models.TextField() # TODO: Badgr and Open Badges spec can take both text and url criteria
mode = models.CharField(max_length=100, default='', blank=True)
image = models.ImageField(upload_to='badge_classes', validators=[validate_badge_image])
def __unicode__(self):
return u"<Badge '{slug}' for '{issuing_component}', {course_id} {mode}>".format(
slug=self.slug, issuing_component=self.issuing_component,
course_id=unicode(self.course_id), mode=self.mode
)
@classmethod
def get_badge_class(
cls, slug=None, issuing_component=None, display_name=None, description=None, criteria=None, image_file_handle=None,
mode='', course_id=None, create=True
):
"""
Looks up a badge class by its slug, or combination of mode and course_id and returns it should it exist.
If it does not exist, and create is True, creates it according to the arguments. Otherwise, returns None.
The expectation is that an XBlock or platform developer should not need to concern themselves with whether
or not a badge class has already been created, but should just feed all requirements to this function
and it will 'do the right thing'. It should be the exception, rather than the common case, that a badge class
would need to be looked up without also being created were it missing.
"""
if course_id and not modulestore().get_course(course_id).issue_badges:
raise CourseBadgesDisabledError("This course does not have badges enabled.")
if not course_id:
course_id = CourseKeyField.Empty
try:
if slug:
return cls.objects.get(slug=slug)
else:
if mode:
return cls.objects.get(mode=mode, course_id=course_id)
else: # allow setting a BadgeClass with no mode, can be used for all modes
return cls.objects.get(course_id=course_id)
except cls.DoesNotExist:
if not create:
return None
badge_class = cls(
slug=slug,
issuing_component=issuing_component,
display_name=display_name,
course_id=course_id,
mode=mode,
description=description,
criteria=criteria,
)
badge_class.image.save(image_file_handle.name, image_file_handle)
badge_class.full_clean()
badge_class.save()
return badge_class
@lazy
def backend(self):
"""
Loads the badging backend.
"""
module, klass = settings.BADGING_BACKEND.rsplit('.', 1)
module = import_module(module)
return getattr(module, klass)()
def get_for_user(self, user):
"""
Get the assertion for this badge class for this user, if it has been awarded.
"""
return self.badgeassertion_set.filter(user=user)
def award(self, user, evidence_url=None):
"""
Contacts the backend to have a badge assertion created for this badge class for this user.
"""
return self.backend.award(self, user, evidence_url=evidence_url)
# def save(self, **kwargs):
# #"""
# # Slugs must always be lowercase.
# #"""
# super(BadgeClass, self).save(**kwargs)
class Meta(object):
app_label = "badges"
unique_together = (('mode', 'course_id'),)
verbose_name_plural = "Badge Classes"
class BadgeAssertion(TimeStampedModel):
"""
Tracks badges on our side of the badge baking transaction
"""
user = models.ForeignKey(User, on_delete=models.CASCADE)
badge_class = models.ForeignKey(BadgeClass, on_delete=models.CASCADE)
data = JSONField()
backend = models.CharField(max_length=50)
image_url = models.URLField()
assertion_url = models.URLField()
def __unicode__(self):
return u"<{username} Badge Assertion for {slug} for {issuing_component}".format(
username=self.user.username, slug=self.badge_class.slug,
issuing_component=self.badge_class.issuing_component,
)
@classmethod
def assertions_for_user(cls, user, course_id=None):
"""
Get all assertions for a user, optionally constrained to a course.
"""
if course_id:
return cls.objects.filter(user=user, badge_class__course_id=course_id)
return cls.objects.filter(user=user)
class Meta(object):
app_label = "badges"
# Abstract model doesn't index this, so we have to.
BadgeAssertion._meta.get_field('created').db_index = True # pylint: disable=protected-access
class CourseCompleteImageConfiguration(models.Model):
"""
Contains the icon configuration for badges for a specific course mode.
"""
mode = models.CharField(
max_length=125,
help_text=_(u'The course mode for this badge image. For example, "verified" or "honor".'),
unique=True,
)
icon = models.ImageField(
# Actual max is 256KB, but need overhead for badge baking. This should be more than enough.
help_text=_(
u"Badge images must be square PNG files. The file size should be under 250KB."
),
upload_to='course_complete_badges',
validators=[validate_badge_image]
)
default = models.BooleanField(
help_text=_(
u"Set this value to True if you want this image to be the default image for any course modes "
u"that do not have a specified badge image. You can have only one default image."
),
default=False,
)
def __unicode__(self):
return u"<CourseCompleteImageConfiguration for '{mode}'{default}>".format(
mode=self.mode,
default=u" (default)" if self.default else u''
)
def clean(self):
"""
Make sure there's not more than one default.
"""
# pylint: disable=no-member
if self.default and CourseCompleteImageConfiguration.objects.filter(default=True).exclude(id=self.id):
raise ValidationError(_(u"There can be only one default image."))
@classmethod
def image_for_mode(cls, mode):
"""
Get the image for a particular mode.
"""
try:
return cls.objects.get(mode=mode).icon
except cls.DoesNotExist:
# Fall back to default, if there is one.
return cls.objects.get(default=True).icon
class Meta(object):
app_label = "badges"
class CourseEventBadgesConfiguration(ConfigurationModel):
"""
Determines the settings for meta course awards-- such as completing a certain
number of courses or enrolling in a certain number of them.
"""
courses_completed = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of completed courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
courses_enrolled = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of enrolled courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
course_groups = models.TextField(
blank=True, default='',
help_text=_(
u"Each line is a comma-separated list. The first item in each line is the slug of a badge class you "
u"have created that has an issuing component of 'openedx__course'. The remaining items in each line are "
u"the course keys the learner needs to complete to be awarded the badge. For example: "
u"slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second"
)
)
def __unicode__(self):
return u"<CourseEventBadgesConfiguration ({})>".format(u"Enabled" if self.enabled else u"Disabled")
@property
def completed_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_completed)
@property
def enrolled_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_enrolled)
@property
def course_group_settings(self):
"""
Parses the course group settings. In example, the format is:
slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second
"""
specs = self.course_groups.strip()
if not specs:
return {}
specs = [line.split(',', 1) for line in specs.splitlines()]
return {
slug.strip().lower(): [CourseKey.from_string(key.strip()) for key in keys.strip().split(',')]
for slug, keys in specs
}
def clean_fields(self, exclude=tuple()):
"""
Verify the settings are parseable.
"""
errors = {}
error_message = _(u"Please check the syntax of your entry.")
if 'courses_completed' not in exclude:
try:
self.completed_settings
except (ValueError, InvalidKeyError):
errors['courses_completed'] = [unicode(error_message)]
if 'courses_enrolled' not in exclude:
try:
self.enrolled_settings
except (ValueError, InvalidKeyError):
errors['courses_enrolled'] = [unicode(error_message)]
if 'course_groups' not in exclude:
store = modulestore()
try:
for key_list in self.course_group_settings.values():
for course_key in key_list:
if not store.get_course(course_key):
ValueError(u"The course {course_key} does not exist.".format(course_key=course_key))
except (ValueError, InvalidKeyError):
errors['course_groups'] = [unicode(error_message)]
if errors:
raise ValidationError(errors)
class Meta(object):
app_label = "badges"
| agpl-3.0 | -6,750,894,004,181,752,000 | 37.179567 | 127 | 0.631771 | false |
lcrees/twoq | twoq/tests/auto/queuing.py | 1 | 2927 | # -*- coding: utf-8 -*-
'''auto queuing call chain test mixins'''
class AQMixin(object):
###########################################################################
## queue manipulation #####################################################
###########################################################################
def test_repr(self):
from stuf.six import strings
self.assertTrue(isinstance(
self.qclass([1, 2, 3, 4, 5, 6]).__repr__(), strings,
))
def test_ro(self):
self.assertListEqual(
self.qclass([1, 2, 3, 4, 5, 6]).ro().peek(), [1, 2, 3, 4, 5, 6],
)
def test_extend(self):
self.assertEqual(
self.qclass().extend([1, 2, 3, 4, 5, 6]).outsync().end(),
[1, 2, 3, 4, 5, 6],
)
def test_outextend(self):
self.assertEqual(
self.qclass().outextend([1, 2, 3, 4, 5, 6]).end(),
[1, 2, 3, 4, 5, 6],
)
def test_extendleft(self):
self.assertListEqual(
self.qclass().extendleft([1, 2, 3, 4, 5, 6]).outsync().end(),
[6, 5, 4, 3, 2, 1]
)
def test_append(self):
autoq = self.qclass().append('foo').outsync()
self.assertEqual(autoq.end(), 'foo')
def test_appendleft(self):
autoq = self.qclass().appendleft('foo').outsync()
self.assertEqual(autoq.end(), 'foo')
def test_inclear(self):
self.assertEqual(len(list(self.qclass([1, 2, 5, 6]).inclear())), 0)
def test_outclear(self):
self.assertEqual(
len(list(self.qclass([1, 2, 5, 6]).outclear().outgoing)), 0
)
###########################################################################
## queue balancing ########################################################
###########################################################################
def test_insync(self):
q = self.qclass([1, 2, 3, 4, 5, 6]).outshift().inclear().shift()
self.assertListEqual(list(q.incoming), list(q.outgoing))
def test_inshift(self):
q = self.qclass([1, 2, 3, 4, 5, 6]).outshift().sync()
self.assertListEqual(list(q.incoming), list(q.outgoing))
def test_outsync(self):
q = self.qclass([1, 2, 3, 4, 5, 6]).outshift()
self.assertListEqual(list(q.incoming), list(q.outgoing))
def test_outshift(self):
q = self.qclass([1, 2, 3, 4, 5, 6]).outsync()
self.assertListEqual(list(q.incoming), list(q.outgoing))
##########################################################################
# queue information ######################################################
##########################################################################
def test_results(self):
self.assertListEqual(
list(self.qclass(1, 2, 3, 4, 5, 6).outsync().results()),
[1, 2, 3, 4, 5, 6],
)
| bsd-3-clause | 2,572,901,103,623,996,000 | 33.845238 | 79 | 0.413393 | false |
shekkizh/TensorflowProjects | ImageArt/ImageColoring.py | 1 | 7092 | __author__ = 'Charlie'
"""Image coloring by fully convolutional networks - incomplete """
import numpy as np
import tensorflow as tf
import os, sys, inspect
from datetime import datetime
import scipy.misc as misc
lib_path = os.path.realpath(
os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe()))[0], "..")))
if lib_path not in sys.path:
sys.path.insert(0, lib_path)
import TensorflowUtils as utils
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string("data_dir", "Data_zoo/CIFAR10_data/", """Path to the CIFAR10 data""")
tf.flags.DEFINE_string("mode", "train", "Network mode train/ test")
tf.flags.DEFINE_string("test_image_path", "", "Path to test image - read only if mode is test")
tf.flags.DEFINE_integer("batch_size", "128", "train batch size")
tf.flags.DEFINE_string("logs_dir", "logs/ImageColoring_logs/", """Path to save logs and checkpoint if needed""")
DATA_URL = 'http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz'
LEARNING_RATE = 1e-3
MAX_ITERATIONS = 100001
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 20000
IMAGE_SIZE = 32
def read_cifar10(filename_queue):
class CIFAR10Record(object):
pass
result = CIFAR10Record()
label_bytes = 1
result.height = IMAGE_SIZE
result.width = IMAGE_SIZE
result.depth = 3
image_bytes = result.height * result.width * result.depth
record_bytes = label_bytes + image_bytes
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
result.key, value = reader.read(filename_queue)
record_bytes = tf.decode_raw(value, tf.uint8)
depth_major = tf.cast(tf.reshape(tf.slice(record_bytes, [label_bytes], [image_bytes]),
[result.depth, result.height, result.width]), tf.float32)
image = tf.transpose(depth_major, [1, 2, 0])
# extended_image = tf.reshape(image, (result.height, result.width, result.depth))
result.color_image = image
print result.color_image.get_shape()
print "Converting image to gray scale"
result.gray_image = 0.21 * result.color_image[ :, :, 2] + 0.72 * result.color_image[ :, :,
1] + 0.07 * result.color_image[ :, :, 0]
result.gray_image = tf.expand_dims(result.gray_image, 2)
print result.gray_image.get_shape()
return result
def get_image(image_dir):
image = misc.imread(image_dir)
image = np.ndarray.reshape(image.astype(np.float32), ((1,) + image.shape))
return image
def inputs():
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
filenames = [os.path.join(data_dir, 'data_batch_%d.bin' % i) for i in xrange(1, 6)]
for f in filenames:
if not tf.gfile.Exists(f):
raise ValueError('Failed to find file: ' + f)
filename_queue = tf.train.string_input_producer(filenames)
read_input = read_cifar10(filename_queue)
num_preprocess_threads = 8
min_queue_examples = int(0.4 * NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN)
print "Shuffling"
input_gray, input_colored = tf.train.shuffle_batch([read_input.gray_image, read_input.color_image],
batch_size=FLAGS.batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * FLAGS.batch_size,
min_after_dequeue=min_queue_examples)
input_gray = (input_gray - 128) / 255.0
input_colored = (input_colored - 128) / 255.0
return input_gray, input_colored
def inference(image):
W1 = utils.weight_variable_xavier_initialized([9, 9, 1, 32])
b1 = utils.bias_variable([32])
tf.histogram_summary("W1", W1)
tf.histogram_summary("b1", b1)
h_conv1 = tf.nn.relu(utils.conv2d_basic(image, W1, b1))
W2 = utils.weight_variable_xavier_initialized([3, 3, 32, 64])
b2 = utils.bias_variable([64])
tf.histogram_summary("W2", W2)
tf.histogram_summary("b2", b2)
h_conv2 = tf.nn.relu(utils.conv2d_strided(h_conv1, W2, b2))
W3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128])
b3 = utils.bias_variable([128])
tf.histogram_summary("W3", W3)
tf.histogram_summary("b3", b3)
h_conv3 = tf.nn.relu(utils.conv2d_strided(h_conv2, W3, b3))
# upstrides
W4 = utils.weight_variable_xavier_initialized([3, 3, 64, 128])
b4 = utils.bias_variable([64])
tf.histogram_summary("W4", W4)
tf.histogram_summary("b4", b4)
h_conv4 = tf.nn.relu(utils.conv2d_transpose_strided(h_conv3, W4, b4))
W5 = utils.weight_variable_xavier_initialized([3, 3, 32, 64])
b5 = utils.bias_variable([32])
tf.histogram_summary("W5", W5)
tf.histogram_summary("b5", b5)
h_conv5 = tf.nn.relu(utils.conv2d_transpose_strided(h_conv4, W5, b5))
W6 = utils.weight_variable_xavier_initialized([9, 9, 32, 3])
b6 = utils.bias_variable([3])
tf.histogram_summary("W6", W6)
tf.histogram_summary("b6", b6)
pred_image = tf.nn.tanh(utils.conv2d_basic(h_conv5, W6, b6))
return pred_image
def loss(pred, colored):
rmse = tf.sqrt(2 * tf.nn.l2_loss(tf.sub(colored, pred))) / FLAGS.batch_size
tf.scalar_summary("RMSE", rmse)
return rmse
def train(loss_val, step):
learning_rate = tf.train.exponential_decay(LEARNING_RATE, step, 0.4 * MAX_ITERATIONS, 0.99)
train_op = tf.train.AdamOptimizer(learning_rate).minimize(loss_val, global_step=step)
return train_op
def main(argv=None):
utils.maybe_download_and_extract(FLAGS.data_dir, DATA_URL, is_tarfile=True)
print "Setting up model..."
global_step = tf.Variable(0,trainable=False)
gray, color = inputs()
pred = 255 * inference(gray) + 128
tf.image_summary("Gray", gray, max_images=1)
tf.image_summary("Ground_truth", color, max_images=1)
tf.image_summary("Prediction", pred, max_images=1)
image_loss = loss(pred, color)
train_op = train(image_loss, global_step)
summary_op = tf.merge_all_summaries()
with tf.Session() as sess:
print "Setting up summary writer, queue, saver..."
sess.run(tf.initialize_all_variables())
summary_writer = tf.train.SummaryWriter(FLAGS.logs_dir, sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
if ckpt and ckpt.model_checkpoint_path:
print "Restoring model from checkpoint..."
saver.restore(sess, ckpt.model_checkpoint_path)
tf.train.start_queue_runners(sess)
for step in xrange(MAX_ITERATIONS):
if step % 400 == 0:
loss_val, summary_str = sess.run([image_loss, summary_op])
print "Step %d, Loss: %g" % (step, loss_val)
summary_writer.add_summary(summary_str, global_step=step)
if step % 1000 == 0:
saver.save(sess, FLAGS.logs_dir + "model.ckpt", global_step=step)
print "%s" % datetime.now()
sess.run(train_op)
if __name__ == "__main__":
tf.app.run()
| mit | -4,695,333,736,710,493,000 | 36.925134 | 112 | 0.628173 | false |
tbabej/freeipa | ipalib/pkcs10.py | 1 | 9170 | # Authors:
# Rob Crittenden <[email protected]>
#
# Copyright (C) 2010 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
import base64
import nss.nss as nss
from pyasn1.type import univ, char, namedtype, tag
from pyasn1.codec.der import decoder
import six
if six.PY3:
unicode = str
PEM = 0
DER = 1
SAN_DNSNAME = 'DNS name'
SAN_RFC822NAME = 'RFC822 Name'
SAN_OTHERNAME_UPN = 'Other Name (OID.1.3.6.1.4.1.311.20.2.3)'
SAN_OTHERNAME_KRB5PRINCIPALNAME = 'Other Name (OID.1.3.6.1.5.2.2)'
def get_subject(csr, datatype=PEM):
"""
Given a CSR return the subject value.
This returns an nss.DN object.
"""
request = load_certificate_request(csr, datatype)
try:
return request.subject
finally:
del request
def get_extensions(csr, datatype=PEM):
"""
Given a CSR return OIDs of certificate extensions.
The return value is a tuple of strings
"""
request = load_certificate_request(csr, datatype)
# Work around a bug in python-nss where nss.oid_dotted_decimal
# errors on unrecognised OIDs
#
# https://bugzilla.redhat.com/show_bug.cgi?id=1246729
#
def get_prefixed_oid_str(ext):
"""Returns a string like 'OID.1.2...'."""
if ext.oid_tag == 0:
return repr(ext)
else:
return nss.oid_dotted_decimal(ext.oid)
return tuple(get_prefixed_oid_str(ext)[4:]
for ext in request.extensions)
class _PrincipalName(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('name-type', univ.Integer().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
),
namedtype.NamedType('name-string', univ.SequenceOf(char.GeneralString()).subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
),
)
class _KRB5PrincipalName(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('realm', char.GeneralString().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
),
namedtype.NamedType('principalName', _PrincipalName().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
),
)
def _decode_krb5principalname(data):
principal = decoder.decode(data, asn1Spec=_KRB5PrincipalName())[0]
realm = (str(principal['realm']).replace('\\', '\\\\')
.replace('@', '\\@'))
name = principal['principalName']['name-string']
name = '/'.join(str(n).replace('\\', '\\\\')
.replace('/', '\\/')
.replace('@', '\\@') for n in name)
name = '%s@%s' % (name, realm)
return name
class _AnotherName(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type-id', univ.ObjectIdentifier()),
namedtype.NamedType('value', univ.Any().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
),
)
class _GeneralName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('otherName', _AnotherName().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
),
namedtype.NamedType('rfc822Name', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
),
namedtype.NamedType('dNSName', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))
),
namedtype.NamedType('x400Address', univ.Sequence().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))
),
namedtype.NamedType('directoryName', univ.Choice().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))
),
namedtype.NamedType('ediPartyName', univ.Sequence().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))
),
namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))
),
namedtype.NamedType('iPAddress', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))
),
namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))
),
)
class _SubjectAltName(univ.SequenceOf):
componentType = _GeneralName()
def get_subjectaltname(csr, datatype=PEM):
"""
Given a CSR return the subjectaltname value, if any.
The return value is a tuple of strings or None
"""
request = load_certificate_request(csr, datatype)
for extension in request.extensions:
if extension.oid_tag == nss.SEC_OID_X509_SUBJECT_ALT_NAME:
break
else:
return None
del request
nss_names = nss.x509_alt_name(extension.value, nss.AsObject)
asn1_names = decoder.decode(extension.value.data,
asn1Spec=_SubjectAltName())[0]
names = []
for nss_name, asn1_name in zip(nss_names, asn1_names):
name_type = nss_name.type_string
if name_type == SAN_OTHERNAME_KRB5PRINCIPALNAME:
name = _decode_krb5principalname(asn1_name['otherName']['value'])
else:
name = nss_name.name
names.append((name_type, name))
return tuple(names)
# Unfortunately, NSS can only parse the extension request attribute, so
# we have to parse friendly name ourselves (see RFC 2986)
class _Attribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', univ.ObjectIdentifier()),
namedtype.NamedType('values', univ.Set()),
)
class _Attributes(univ.SetOf):
componentType = _Attribute()
class _CertificationRequestInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', univ.Integer()),
namedtype.NamedType('subject', univ.Sequence()),
namedtype.NamedType('subjectPublicKeyInfo', univ.Sequence()),
namedtype.OptionalNamedType('attributes', _Attributes().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class _CertificationRequest(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('certificationRequestInfo',
_CertificationRequestInfo()),
namedtype.NamedType('signatureAlgorithm', univ.Sequence()),
namedtype.NamedType('signatureValue', univ.BitString()),
)
_FRIENDLYNAME = univ.ObjectIdentifier('1.2.840.113549.1.9.20')
def get_friendlyname(csr, datatype=PEM):
"""
Given a CSR return the value of the friendlyname attribute, if any.
The return value is a string.
"""
if datatype == PEM:
csr = strip_header(csr)
csr = base64.b64decode(csr)
csr = decoder.decode(csr, asn1Spec=_CertificationRequest())[0]
for attribute in csr['certificationRequestInfo']['attributes']:
if attribute['type'] == _FRIENDLYNAME:
return unicode(attribute['values'][0])
return None
def strip_header(csr):
"""
Remove the header and footer from a CSR.
"""
headerlen = 40
s = csr.find("-----BEGIN NEW CERTIFICATE REQUEST-----")
if s == -1:
headerlen = 36
s = csr.find("-----BEGIN CERTIFICATE REQUEST-----")
if s >= 0:
e = csr.find("-----END")
csr = csr[s+headerlen:e]
return csr
def load_certificate_request(csr, datatype=PEM):
"""
Given a base64-encoded certificate request, with or without the
header/footer, return a request object.
"""
if datatype == PEM:
csr = strip_header(csr)
csr = base64.b64decode(csr)
# A fail-safe so we can always read a CSR. python-nss/NSS will segfault
# otherwise
if not nss.nss_is_initialized():
nss.nss_init_nodb()
return nss.CertificateRequest(csr)
if __name__ == '__main__':
nss.nss_init_nodb()
# Read PEM request from stdin and print out its components
csrlines = sys.stdin.readlines()
csr = ''.join(csrlines)
print(load_certificate_request(csr))
print(get_subject(csr))
print(get_subjectaltname(csr))
print(get_friendlyname(csr))
| gpl-3.0 | -1,926,462,383,126,963,000 | 33.603774 | 89 | 0.648637 | false |
tshirtman/ultimate-smash-friends | usf/screens/configure.py | 1 | 2357 | ################################################################################
# copyright 2009 Gabriel Pettier <[email protected]> #
# #
# This file is part of Ultimate Smash Friends. #
# #
# Ultimate Smash Friends is free software: you can redistribute it and/or #
# modify it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# Ultimate Smash Friends is distributed in the hope that it will be useful, but#
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or#
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along with #
# Ultimate Smash Friends. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
'''
The Base Configuration screen, show buttons to other configuration screens.
'''
from usf.screens.screen import Screen
from usf.widgets.box import VBox
from usf.widgets.button import Button
from usf.translation import _
class Configure(Screen):
def init(self):
self.add(VBox())
self.name = _("configure")
#I18N:option screen
self.widget.add(Button(_('Audio')))
self.widget.add(Button(_('Display')))
self.widget.add(Button(_('Keyboard')))
self.widget.add(Button(_('Back')), margin=100)
def callback(self, action):
if action.text == _('Audio'):
return {'goto': 'sound'}
if action.text == _('Display'):
return {'goto': 'display'}
if action.text == _('Keyboard'):
return {'goto': 'keyboard'}
if action.text == _('Back'):
return {'goto': 'back'}
| gpl-3.0 | -8,758,575,220,416,393,000 | 42.648148 | 80 | 0.474332 | false |
nickmilon/mongoUtils | mongoUtils/importsExports.py | 1 | 8999 | """Classes used to import/export data to mongoDB
"""
from Hellas.Thebes import format_header
xlrd = None # reserved to import xlrd on demand
def _xlrd_on_demand():
global xlrd
if xlrd is None:
try:
import xlrd
except ImportError:
print ("this module requires xlrd library please install (pip install xlrd")
raise
return xlrd
def import_workbook(workbook, db, fields=None, ws_options={'dt_python': True}, stats_every=1000):
"""save all workbook's sheets to a db
consider using :class:`~ImportXls` class instead which is more flexible but imports only a single sheet
:Parameters: see :class:`~ImportXls` class
:Example:
>>> from pymongo import MongoClient
>>> from mongoUtils import _PATH_TO_DATA
>>> db = MongoClient().test
>>> res = import_workbook(_PATH_TO_DATA + "example_workbook.xlsx", db)
>>> res
[{'rows': 368, 'db': 'test', 'collection': 'weather'}, {'rows': 1007, 'db': 'test', 'collection': 'locations'}]
"""
_xlrd_on_demand()
workbook = xlrd.open_workbook(workbook, on_demand=True)
return [ImportXls(workbook, i, db, fields=fields, ws_options=ws_options, stats_every=stats_every)()
for i in range(0, workbook.nsheets)]
class Import(object):
"""generic class for importing into a mongoDB collection, successors should use/extend this class
:Parameters:
- db: a pynongo database object that will be used for output
- collection: a pymongo collection object that will be used for output
- drop_collection: (defaults to True)
- True drops output collection on init before writing to it
- False appends to output collection
- stats_every: int print import stats every stats_every rows or 0 to cancel stats (defaults to 10000)
"""
format_stats = "|{db:16s}|{collection:16s}|{rows:15,d}|"
format_stats_header = format_header(format_stats)
def __init__(self, collection, drop_collection=True, stats_every=10000):
if drop_collection:
collection.database.drop_collection(collection.name)
self.info = {'db': collection.database.name, 'collection': collection.name, 'rows': 0}
self.stats_every = stats_every
self.collection = collection
def import_to_collection(self):
"""successors should implement this"""
raise NotImplementedError
def _import_to_collection_before(self):
"""successors can call this or implement their's"""
if self.stats_every > 0:
print(self.format_stats_header)
def _import_to_collection_after(self):
"""successors can call this or implement their's"""
if self.stats_every > 0:
self.print_stats()
def print_stats(self):
print(self.format_stats.format(**self.info))
def __call__(self):
return self.import_to_collection()
class ImportXls(Import):
"""save an an xls sheet to a collection
`see <https://github.com/python-excel/xlrd>`_
:Parameters:
- workbook: path to a workbook or an xlrd workbook object
- sheet: name of a work sheet in workbook or an int (sheet number in workbook)
- db: a pymongo database object
- coll_name: str output collection name or None to create name from sheet name (defaults to None)
- row_start: int or None starting raw or None to start from first row (defaults to None)
- row_end:int or None ending raw or None to end at lastrow (defaults to None)
- fields:
- a list with field names
- or True (to treat first row as field names)
- or None (for auto creating field names i.e: [fld_1, fld_2, etc]
- or a function that:
- takes one argument (a list of row values)
- returns a dict (if this dict contains a key '_id' this value will be used for _id)
- >>> lambda x: {'coordinates': [x[0] , x[1]]}
- ws_options: (optional) a dictionary specifying how to treat cell values
- dt_python : bool convert dates to python datetime
- integers_only : round float values to int helpful coz all int values are represented as floats in sheets
- negatives_to_0 : treat all negative numbers as 0's
- drop_collection: (defaults to True)
- True drops output collection on init before writing to it
- False appends to output collection
- stats_every: int print import stats every stats_every rows or 0 to cancel stats (defaults to 10000)
- drop_collection: if True drops collection on init otherwise appends to collection
:Example:
>>> from pymongo import MongoClient
>>> from mongoUtils import _PATH_TO_DATA
>>> db = MongoClient().test
>>> res = ImportXls(_PATH_TO_DATA + "example_workbook.xlsx", 0, db)()
>>> res
{'rows': 367, 'db': u'test', 'collection': u'weather'}
"""
def __init__(self,
workbook, sheet,
db, coll_name=None,
row_start=None, row_end=None,
fields=True,
ws_options={'dt_python': True, 'integers_only': False, 'negatives_to_0': False},
stats_every=10000,
drop_collection=True):
_xlrd_on_demand()
if not isinstance(workbook, xlrd.book.Book):
workbook = xlrd.open_workbook(workbook, on_demand=True)
self.workbook = workbook
self.sheet = workbook.sheet_by_index(sheet) if isinstance(sheet, int) else workbook.sheet_by_name(sheet)
self._ws_options = {}
self.ws_options_set(ws_options)
coll_name = self.fix_name(self.sheet.name) if coll_name is None else coll_name
if row_start is None:
row_start = 1 if fields is True else 0
self.row_start = row_start
self.row_end = row_end
collection = db[coll_name]
super(ImportXls, self).__init__(collection, drop_collection=drop_collection, stats_every=stats_every)
self.auto_field_names(fields)
@property
def ws_options(self):
return self._ws_options
def ws_options_set(self, options_dict):
self._ws_options.update(options_dict)
def fix_name(self, name, cnt=0):
if name == '':
return 'fld_{}'.format(cnt)
else:
return name.replace(' ', '_').replace('.', '_').replace('$', '_')
def auto_field_names(self, fields):
row0_values = self.sheet.row_values(0)
if fields is True:
self._fields_or_fun = [self.fix_name(fn, cnt) for cnt, fn in enumerate(row0_values)]
elif fields is None:
self._fields_or_fun = ['fld_{}'.format(i) for i in range(len(row0_values))]
elif isinstance(fields, list):
self._fields_or_fun = [self.fix_name(fn, cnt) for cnt, fn in enumerate(fields)]
else: # then it has to be function
self._fields_or_fun = fields
return self._fields_or_fun
def row_to_doc(self, valueslist, _id=None):
if isinstance(self._fields_or_fun, list):
doc = dict(list(zip(self._fields_or_fun, valueslist)))
else:
doc = self._fields_or_fun(valueslist)
if _id is not None and doc.get('_id') is None:
doc['_id'] = _id
return doc
def ws_convert_cell(self, cl):
"""
:Parameters:
- cl an xlrd cell object
"""
# XL_CELL_BLANK XL_CELL_BOOLEAN XL_CELL_NUMBER XL_CELL_TEXT
tp = cl.ctype
vl = cl.value
if tp == xlrd.XL_CELL_NUMBER: # number
if self._ws_options.get('integers_only') is True:
if vl % 1 == 0:
vl = int(vl + 0.49999) # kind of round
if vl < 0 and self._ws_options.get('negatives_to_0'):
vl = 0
elif tp == xlrd.XL_CELL_DATE and self._ws_options.get('dt_python') is True:
vl = xlrd.xldate.xldate_as_datetime(vl, self.sheet.book.datemode)
return vl
def import_to_collection(self):
super(ImportXls, self)._import_to_collection_before()
outlist = []
for i in range(self.row_start, self.row_end or self.sheet.nrows):
self.info['rows'] += 1
row_values = [self.ws_convert_cell(cl) for cl in self.sheet.row(i)]
outlist.append(self.row_to_doc(row_values, i))
if self.stats_every and i % self.stats_every == 0:
self.print_stats()
if len(outlist) == 200:
try:
self.collection.insert_many(outlist)
outlist = []
except Exception:
print (outlist)
raise
if len(outlist) > 0:
self.collection.insert_many(outlist)
super(ImportXls, self)._import_to_collection_after()
return self.info
| apache-2.0 | -6,477,179,523,395,941,000 | 40.662037 | 119 | 0.595066 | false |
Leberwurscht/OfflineDict | buildindex.py | 1 | 1491 | #!/usr/bin/python
# -*- coding: utf8 -*-
import sys, re
filename = sys.argv[1]
tokensize = int(sys.argv[2])
numbersize = int(sys.argv[3])
numbersize2 = int(sys.argv[4])
def normalize(s):
r = s.lower()
r = r.replace(u'ä',u'a');
r = r.replace(u'ö',u'o');
r = r.replace(u'ü',u'u');
r = r.replace(u'Ä',u'A');
r = r.replace(u'Ö',u'O');
r = r.replace(u'Ü',u'U');
r = r.replace(u'ß',u'ss');
r = r.replace(u'ñ',u'n');
r = r.replace(u'á',u'a');
r = r.replace(u'é',u'e');
r = r.replace(u'í',u'i');
r = r.replace(u'ó',u'o');
r = r.replace(u'ú',u'u');
r = r.replace(u'Á',u'A');
r = r.replace(u'É',u'E');
r = r.replace(u'Í',u'I');
r = r.replace(u'Ó',u'O');
r = r.replace(u'Ú',u'U');
return r.encode("utf8")
pos = 0
for line in open(filename):
linelength = len(line)
if line.strip() and not line[0]=="#":
length = len(line)
line = unicode(line, 'utf8')
i=line.rindex('\t')
line = line[0:i]
red = re.sub(r'\[.*?\]|\{.*?\}','',line,flags=re.UNICODE).strip()
tokens = re.split(r'\W', red, flags=re.UNICODE)
for token in tokens:
ntoken = normalize(token)
if len(ntoken)>tokensize: raise Exception("increase tokensize")
if pos>10**numbersize-1: raise Exception("increase numbersize")
if length>10**numbersize2-1: raise Exception("increase numbersize2")
if ntoken: print ("%-"+str(tokensize)+"s %"+str(numbersize)+"d %"+str(numbersize2)+"d") % (ntoken, pos, length)
pos += linelength
| mpl-2.0 | 982,324,380,487,193,000 | 28.46 | 117 | 0.570944 | false |
isohybrid/dotfile | vim/bundle/git:--github.com-klen-python-mode/pylibs/logilab/astng/scoped_nodes.py | 1 | 34414 | # copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
# copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:[email protected]
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""This module contains the classes for "scoped" node, i.e. which are opening a
new local scope in the language definition : Module, Class, Function (and
Lambda, GenExpr, DictComp and SetComp to some extent).
"""
from __future__ import with_statement
__doctype__ = "restructuredtext en"
import sys
from itertools import chain
from logilab.common.compat import builtins
from logilab.common.decorators import cached
from logilab.astng import BUILTINS_MODULE
from logilab.astng.exceptions import NotFoundError, NoDefault, \
ASTNGBuildingException, InferenceError
from logilab.astng.node_classes import Const, DelName, DelAttr, \
Dict, From, List, Name, Pass, Raise, Return, Tuple, Yield, \
are_exclusive, LookupMixIn, const_factory as cf, unpack_infer
from logilab.astng.bases import NodeNG, InferenceContext, Instance,\
YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \
BUILTINS_NAME
from logilab.astng.mixins import FilterStmtsMixin
from logilab.astng.bases import Statement
from logilab.astng.manager import ASTNGManager
def remove_nodes(func, cls):
def wrapper(*args, **kwargs):
nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)]
if not nodes:
raise NotFoundError()
return nodes
return wrapper
def function_to_method(n, klass):
if isinstance(n, Function):
if n.type == 'classmethod':
return BoundMethod(n, klass)
if n.type != 'staticmethod':
return UnboundMethod(n)
return n
def std_special_attributes(self, name, add_locals=True):
if add_locals:
locals = self.locals
else:
locals = {}
if name == '__name__':
return [cf(self.name)] + locals.get(name, [])
if name == '__doc__':
return [cf(self.doc)] + locals.get(name, [])
if name == '__dict__':
return [Dict()] + locals.get(name, [])
raise NotFoundError(name)
MANAGER = ASTNGManager()
def builtin_lookup(name):
"""lookup a name into the builtin module
return the list of matching statements and the astng for the builtin
module
"""
builtin_astng = MANAGER.astng_from_module(builtins)
if name == '__dict__':
return builtin_astng, ()
try:
stmts = builtin_astng.locals[name]
except KeyError:
stmts = ()
return builtin_astng, stmts
# TODO move this Mixin to mixins.py; problem: 'Function' in _scope_lookup
class LocalsDictNodeNG(LookupMixIn, NodeNG):
""" this class provides locals handling common to Module, Function
and Class nodes, including a dict like interface for direct access
to locals information
"""
# attributes below are set by the builder module or by raw factories
# dictionary of locals with name as key and node defining the local as
# value
def qname(self):
"""return the 'qualified' name of the node, eg module.name,
module.class.name ...
"""
if self.parent is None:
return self.name
return '%s.%s' % (self.parent.frame().qname(), self.name)
def frame(self):
"""return the first parent frame node (i.e. Module, Function or Class)
"""
return self
def scope(self):
"""return the first node defining a new scope (i.e. Module,
Function, Class, Lambda but also GenExpr, DictComp and SetComp)
"""
return self
def _scope_lookup(self, node, name, offset=0):
"""XXX method for interfacing the scope lookup"""
try:
stmts = node._filter_stmts(self.locals[name], self, offset)
except KeyError:
stmts = ()
if stmts:
return self, stmts
if self.parent: # i.e. not Module
# nested scope: if parent scope is a function, that's fine
# else jump to the module
pscope = self.parent.scope()
if not pscope.is_function:
pscope = pscope.root()
return pscope.scope_lookup(node, name)
return builtin_lookup(name) # Module
def set_local(self, name, stmt):
"""define <name> in locals (<stmt> is the node defining the name)
if the node is a Module node (i.e. has globals), add the name to
globals
if the name is already defined, ignore it
"""
#assert not stmt in self.locals.get(name, ()), (self, stmt)
self.locals.setdefault(name, []).append(stmt)
__setitem__ = set_local
def _append_node(self, child):
"""append a child, linking it in the tree"""
self.body.append(child)
child.parent = self
def add_local_node(self, child_node, name=None):
"""append a child which should alter locals to the given node"""
if name != '__class__':
# add __class__ node as a child will cause infinite recursion later!
self._append_node(child_node)
self.set_local(name or child_node.name, child_node)
def __getitem__(self, item):
"""method from the `dict` interface returning the first node
associated with the given name in the locals dictionary
:type item: str
:param item: the name of the locally defined object
:raises KeyError: if the name is not defined
"""
return self.locals[item][0]
def __iter__(self):
"""method from the `dict` interface returning an iterator on
`self.keys()`
"""
return iter(self.keys())
def keys(self):
"""method from the `dict` interface returning a tuple containing
locally defined names
"""
return self.locals.keys()
def values(self):
"""method from the `dict` interface returning a tuple containing
locally defined nodes which are instance of `Function` or `Class`
"""
return [self[key] for key in self.keys()]
def items(self):
"""method from the `dict` interface returning a list of tuple
containing each locally defined name with its associated node,
which is an instance of `Function` or `Class`
"""
return zip(self.keys(), self.values())
def __contains__(self, name):
return name in self.locals
has_key = __contains__
# Module #####################################################################
class Module(LocalsDictNodeNG):
_astng_fields = ('body',)
fromlineno = 0
lineno = 0
# attributes below are set by the builder module or by raw factories
# the file from which as been extracted the astng representation. It may
# be None if the representation has been built from a built-in module
file = None
# the module name
name = None
# boolean for astng built from source (i.e. ast)
pure_python = None
# boolean for package module
package = None
# dictionary of globals with name as key and node defining the global
# as value
globals = None
# names of python special attributes (handled by getattr impl.)
special_attributes = set(('__name__', '__doc__', '__file__', '__path__',
'__dict__'))
# names of module attributes available through the global scope
scope_attrs = set(('__name__', '__doc__', '__file__', '__path__'))
def __init__(self, name, doc, pure_python=True):
self.name = name
self.doc = doc
self.pure_python = pure_python
self.locals = self.globals = {}
self.body = []
def block_range(self, lineno):
"""return block line numbers.
start from the beginning whatever the given lineno
"""
return self.fromlineno, self.tolineno
def scope_lookup(self, node, name, offset=0):
if name in self.scope_attrs and not name in self.locals:
try:
return self, self.getattr(name)
except NotFoundError:
return self, ()
return self._scope_lookup(node, name, offset)
def pytype(self):
return '%s.module' % BUILTINS_MODULE
def display_type(self):
return 'Module'
def getattr(self, name, context=None, ignore_locals=False):
if name in self.special_attributes:
if name == '__file__':
return [cf(self.file)] + self.locals.get(name, [])
if name == '__path__' and self.package:
return [List()] + self.locals.get(name, [])
return std_special_attributes(self, name)
if not ignore_locals and name in self.locals:
return self.locals[name]
if self.package:
try:
return [self.import_module(name, relative_only=True)]
except ASTNGBuildingException:
raise NotFoundError(name)
except Exception:# XXX pylint tests never pass here; do we need it?
import traceback
traceback.print_exc()
raise NotFoundError(name)
getattr = remove_nodes(getattr, DelName)
def igetattr(self, name, context=None):
"""inferred getattr"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = copy_context(context)
context.lookupname = name
try:
return _infer_stmts(self.getattr(name, context), context, frame=self)
except NotFoundError:
raise InferenceError(name)
def fully_defined(self):
"""return True if this module has been built from a .py file
and so contains a complete representation including the code
"""
return self.file is not None and self.file.endswith('.py')
def statement(self):
"""return the first parent node marked as statement node
consider a module as a statement...
"""
return self
def previous_sibling(self):
"""module has no sibling"""
return
def next_sibling(self):
"""module has no sibling"""
return
if sys.version_info < (2, 8):
def absolute_import_activated(self):
for stmt in self.locals.get('absolute_import', ()):
if isinstance(stmt, From) and stmt.modname == '__future__':
return True
return False
else:
absolute_import_activated = lambda self: True
def import_module(self, modname, relative_only=False, level=None):
"""import the given module considering self as context"""
if relative_only and level is None:
level = 0
absmodname = self.relative_to_absolute_name(modname, level)
try:
return MANAGER.astng_from_module_name(absmodname)
except ASTNGBuildingException:
# we only want to import a sub module or package of this module,
# skip here
if relative_only:
raise
return MANAGER.astng_from_module_name(modname)
def relative_to_absolute_name(self, modname, level):
"""return the absolute module name for a relative import.
The relative import can be implicit or explicit.
"""
# XXX this returns non sens when called on an absolute import
# like 'pylint.checkers.logilab.astng.utils'
# XXX doesn't return absolute name if self.name isn't absolute name
if self.absolute_import_activated() and level is None:
return modname
if level:
if self.package:
level = level - 1
package_name = self.name.rsplit('.', level)[0]
elif self.package:
package_name = self.name
else:
package_name = self.name.rsplit('.', 1)[0]
if package_name:
if not modname:
return package_name
return '%s.%s' % (package_name, modname)
return modname
def wildcard_import_names(self):
"""return the list of imported names when this module is 'wildcard
imported'
It doesn't include the '__builtins__' name which is added by the
current CPython implementation of wildcard imports.
"""
# take advantage of a living module if it exists
try:
living = sys.modules[self.name]
except KeyError:
pass
else:
try:
return living.__all__
except AttributeError:
return [name for name in living.__dict__.keys()
if not name.startswith('_')]
# else lookup the astng
#
# We separate the different steps of lookup in try/excepts
# to avoid catching too many Exceptions
# However, we can not analyse dynamically constructed __all__
try:
all = self['__all__']
except KeyError:
return [name for name in self.keys() if not name.startswith('_')]
try:
explicit = all.assigned_stmts().next()
except InferenceError:
return [name for name in self.keys() if not name.startswith('_')]
except AttributeError:
# not an assignment node
# XXX infer?
return [name for name in self.keys() if not name.startswith('_')]
try:
# should be a Tuple/List of constant string / 1 string not allowed
return [const.value for const in explicit.elts]
except AttributeError:
return [name for name in self.keys() if not name.startswith('_')]
class ComprehensionScope(LocalsDictNodeNG):
def frame(self):
return self.parent.frame()
scope_lookup = LocalsDictNodeNG._scope_lookup
class GenExpr(ComprehensionScope):
_astng_fields = ('elt', 'generators')
def __init__(self):
self.locals = {}
self.elt = None
self.generators = []
class DictComp(ComprehensionScope):
_astng_fields = ('key', 'value', 'generators')
def __init__(self):
self.locals = {}
self.key = None
self.value = None
self.generators = []
class SetComp(ComprehensionScope):
_astng_fields = ('elt', 'generators')
def __init__(self):
self.locals = {}
self.elt = None
self.generators = []
class _ListComp(NodeNG):
"""class representing a ListComp node"""
_astng_fields = ('elt', 'generators')
elt = None
generators = None
if sys.version_info >= (3, 0):
class ListComp(_ListComp, ComprehensionScope):
"""class representing a ListComp node"""
def __init__(self):
self.locals = {}
else:
class ListComp(_ListComp):
"""class representing a ListComp node"""
# Function ###################################################################
class Lambda(LocalsDictNodeNG, FilterStmtsMixin):
_astng_fields = ('args', 'body',)
# function's type, 'function' | 'method' | 'staticmethod' | 'classmethod'
type = 'function'
def __init__(self):
self.locals = {}
self.args = []
self.body = []
def pytype(self):
if 'method' in self.type:
return '%s.instancemethod' % BUILTINS_MODULE
return '%s.function' % BUILTINS_MODULE
def display_type(self):
if 'method' in self.type:
return 'Method'
return 'Function'
def callable(self):
return True
def argnames(self):
"""return a list of argument names"""
if self.args.args: # maybe None with builtin functions
names = _rec_get_names(self.args.args)
else:
names = []
if self.args.vararg:
names.append(self.args.vararg)
if self.args.kwarg:
names.append(self.args.kwarg)
return names
def infer_call_result(self, caller, context=None):
"""infer what a function is returning when called"""
return self.body.infer(context)
def scope_lookup(self, node, name, offset=0):
if node in self.args.defaults:
frame = self.parent.frame()
# line offset to avoid that def func(f=func) resolve the default
# value to the defined function
offset = -1
else:
# check this is not used in function decorators
frame = self
return frame._scope_lookup(node, name, offset)
class Function(Statement, Lambda):
_astng_fields = ('decorators', 'args', 'body')
special_attributes = set(('__name__', '__doc__', '__dict__'))
is_function = True
# attributes below are set by the builder module or by raw factories
blockstart_tolineno = None
decorators = None
def __init__(self, name, doc):
self.locals = {}
self.args = []
self.body = []
self.decorators = None
self.name = name
self.doc = doc
self.extra_decorators = []
self.instance_attrs = {}
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
# lineno is the line number of the first decorator, we want the def statement lineno
if self.decorators is not None:
self.fromlineno += len(self.decorators.nodes)
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self.args.tolineno
def block_range(self, lineno):
"""return block line numbers.
start from the "def" position whatever the given lineno
"""
return self.fromlineno, self.tolineno
def getattr(self, name, context=None):
"""this method doesn't look in the instance_attrs dictionary since it's
done by an Instance proxy at inference time.
"""
if name == '__module__':
return [cf(self.root().qname())]
if name in self.instance_attrs:
return self.instance_attrs[name]
return std_special_attributes(self, name, False)
def is_method(self):
"""return true if the function node should be considered as a method"""
# check we are defined in a Class, because this is usually expected
# (e.g. pylint...) when is_method() return True
return self.type != 'function' and isinstance(self.parent.frame(), Class)
def decoratornames(self):
"""return a list of decorator qualified names"""
result = set()
decoratornodes = []
if self.decorators is not None:
decoratornodes += self.decorators.nodes
decoratornodes += self.extra_decorators
for decnode in decoratornodes:
for infnode in decnode.infer():
result.add(infnode.qname())
return result
decoratornames = cached(decoratornames)
def is_bound(self):
"""return true if the function is bound to an Instance or a class"""
return self.type == 'classmethod'
def is_abstract(self, pass_is_abstract=True):
"""return true if the method is abstract
It's considered as abstract if the only statement is a raise of
NotImplementError, or, if pass_is_abstract, a pass statement
"""
for child_node in self.body:
if isinstance(child_node, Raise):
if child_node.raises_not_implemented():
return True
if pass_is_abstract and isinstance(child_node, Pass):
return True
return False
# empty function is the same as function with a single "pass" statement
if pass_is_abstract:
return True
def is_generator(self):
"""return true if this is a generator function"""
# XXX should be flagged, not computed
try:
return self.nodes_of_class(Yield, skip_klass=Function).next()
except StopIteration:
return False
def infer_call_result(self, caller, context=None):
"""infer what a function is returning when called"""
if self.is_generator():
yield Generator(self)
return
returns = self.nodes_of_class(Return, skip_klass=Function)
for returnnode in returns:
if returnnode.value is None:
yield Const(None)
else:
try:
for infered in returnnode.value.infer(context):
yield infered
except InferenceError:
yield YES
def _rec_get_names(args, names=None):
"""return a list of all argument names"""
if names is None:
names = []
for arg in args:
if isinstance(arg, Tuple):
_rec_get_names(arg.elts, names)
else:
names.append(arg.name)
return names
# Class ######################################################################
def _class_type(klass, ancestors=None):
"""return a Class node type to differ metaclass, interface and exception
from 'regular' classes
"""
# XXX we have to store ancestors in case we have a ancestor loop
if klass._type is not None:
return klass._type
if klass.name == 'type':
klass._type = 'metaclass'
elif klass.name.endswith('Interface'):
klass._type = 'interface'
elif klass.name.endswith('Exception'):
klass._type = 'exception'
else:
if ancestors is None:
ancestors = set()
if klass in ancestors:
# XXX we are in loop ancestors, and have found no type
klass._type = 'class'
return 'class'
ancestors.add(klass)
# print >> sys.stderr, '_class_type', repr(klass)
for base in klass.ancestors(recurs=False):
if _class_type(base, ancestors) != 'class':
klass._type = base.type
break
if klass._type is None:
klass._type = 'class'
return klass._type
def _iface_hdlr(iface_node):
"""a handler function used by interfaces to handle suspicious
interface nodes
"""
return True
class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
# some of the attributes below are set by the builder module or
# by a raw factories
# a dictionary of class instances attributes
_astng_fields = ('decorators', 'bases', 'body') # name
decorators = None
special_attributes = set(('__name__', '__doc__', '__dict__', '__module__',
'__bases__', '__mro__', '__subclasses__'))
blockstart_tolineno = None
_type = None
type = property(_class_type,
doc="class'type, possible values are 'class' | "
"'metaclass' | 'interface' | 'exception'")
def __init__(self, name, doc):
self.instance_attrs = {}
self.locals = {}
self.bases = []
self.body = []
self.name = name
self.doc = doc
def _newstyle_impl(self, context=None):
if context is None:
context = InferenceContext()
if self._newstyle is not None:
return self._newstyle
for base in self.ancestors(recurs=False, context=context):
if base._newstyle_impl(context):
self._newstyle = True
break
if self._newstyle is None:
self._newstyle = False
return self._newstyle
_newstyle = None
newstyle = property(_newstyle_impl,
doc="boolean indicating if it's a new style class"
"or not")
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.blockstart_tolineno = self.bases and self.bases[-1].tolineno or self.fromlineno
if lastchild is not None:
self.tolineno = lastchild.tolineno
# else this is a class with only a docstring, then tolineno is (should be) already ok
def block_range(self, lineno):
"""return block line numbers.
start from the "class" position whatever the given lineno
"""
return self.fromlineno, self.tolineno
def pytype(self):
if self.newstyle:
return '%s.type' % BUILTINS_MODULE
return '%s.classobj' % BUILTINS_MODULE
def display_type(self):
return 'Class'
def callable(self):
return True
def infer_call_result(self, caller, context=None):
"""infer what a class is returning when called"""
yield Instance(self)
def scope_lookup(self, node, name, offset=0):
if node in self.bases:
frame = self.parent.frame()
# line offset to avoid that class A(A) resolve the ancestor to
# the defined class
offset = -1
else:
frame = self
return frame._scope_lookup(node, name, offset)
# list of parent class as a list of string (i.e. names as they appear
# in the class definition) XXX bw compat
def basenames(self):
return [bnode.as_string() for bnode in self.bases]
basenames = property(basenames)
def ancestors(self, recurs=True, context=None):
"""return an iterator on the node base classes in a prefixed
depth first order
:param recurs:
boolean indicating if it should recurse or return direct
ancestors only
"""
# FIXME: should be possible to choose the resolution order
# XXX inference make infinite loops possible here (see BaseTransformer
# manipulation in the builder module for instance)
yielded = set([self])
if context is None:
context = InferenceContext()
for stmt in self.bases:
with context.restore_path():
try:
for baseobj in stmt.infer(context):
if not isinstance(baseobj, Class):
# duh ?
continue
if baseobj in yielded:
continue # cf xxx above
yielded.add(baseobj)
yield baseobj
if recurs:
for grandpa in baseobj.ancestors(True, context):
if grandpa in yielded:
continue # cf xxx above
yielded.add(grandpa)
yield grandpa
except InferenceError:
# XXX log error ?
continue
def local_attr_ancestors(self, name, context=None):
"""return an iterator on astng representation of parent classes
which have <name> defined in their locals
"""
for astng in self.ancestors(context=context):
if name in astng:
yield astng
def instance_attr_ancestors(self, name, context=None):
"""return an iterator on astng representation of parent classes
which have <name> defined in their instance attribute dictionary
"""
for astng in self.ancestors(context=context):
if name in astng.instance_attrs:
yield astng
def has_base(self, node):
return node in self.bases
def local_attr(self, name, context=None):
"""return the list of assign node associated to name in this class
locals or in its parents
:raises `NotFoundError`:
if no attribute with this name has been find in this class or
its parent classes
"""
try:
return self.locals[name]
except KeyError:
# get if from the first parent implementing it if any
for class_node in self.local_attr_ancestors(name, context):
return class_node.locals[name]
raise NotFoundError(name)
local_attr = remove_nodes(local_attr, DelAttr)
def instance_attr(self, name, context=None):
"""return the astng nodes associated to name in this class instance
attributes dictionary and in its parents
:raises `NotFoundError`:
if no attribute with this name has been find in this class or
its parent classes
"""
values = self.instance_attrs.get(name, [])
# get all values from parents
for class_node in self.instance_attr_ancestors(name, context):
values += class_node.instance_attrs[name]
if not values:
raise NotFoundError(name)
return values
instance_attr = remove_nodes(instance_attr, DelAttr)
def instanciate_class(self):
"""return Instance of Class node, else return self"""
return Instance(self)
def getattr(self, name, context=None):
"""this method doesn't look in the instance_attrs dictionary since it's
done by an Instance proxy at inference time.
It may return a YES object if the attribute has not been actually
found but a __getattr__ or __getattribute__ method is defined
"""
values = self.locals.get(name, [])
if name in self.special_attributes:
if name == '__module__':
return [cf(self.root().qname())] + values
# FIXME : what is expected by passing the list of ancestors to cf:
# you can just do [cf(tuple())] + values without breaking any test
# this is ticket http://www.logilab.org/ticket/52785
if name == '__bases__':
return [cf(tuple(self.ancestors(recurs=False, context=context)))] + values
# XXX need proper meta class handling + MRO implementation
if name == '__mro__' and self.newstyle:
# XXX mro is read-only but that's not our job to detect that
return [cf(tuple(self.ancestors(recurs=True, context=context)))] + values
return std_special_attributes(self, name)
# don't modify the list in self.locals!
values = list(values)
for classnode in self.ancestors(recurs=True, context=context):
values += classnode.locals.get(name, [])
if not values:
raise NotFoundError(name)
return values
def igetattr(self, name, context=None):
"""inferred getattr, need special treatment in class to handle
descriptors
"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = copy_context(context)
context.lookupname = name
try:
for infered in _infer_stmts(self.getattr(name, context), context,
frame=self):
# yield YES object instead of descriptors when necessary
if not isinstance(infered, Const) and isinstance(infered, Instance):
try:
infered._proxied.getattr('__get__', context)
except NotFoundError:
yield infered
else:
yield YES
else:
yield function_to_method(infered, self)
except NotFoundError:
if not name.startswith('__') and self.has_dynamic_getattr(context):
# class handle some dynamic attributes, return a YES object
yield YES
else:
raise InferenceError(name)
def has_dynamic_getattr(self, context=None):
"""return True if the class has a custom __getattr__ or
__getattribute__ method
"""
# need to explicitly handle optparse.Values (setattr is not detected)
if self.name == 'Values' and self.root().name == 'optparse':
return True
try:
self.getattr('__getattr__', context)
return True
except NotFoundError:
#if self.newstyle: XXX cause an infinite recursion error
try:
getattribute = self.getattr('__getattribute__', context)[0]
if getattribute.root().name != BUILTINS_NAME:
# class has a custom __getattribute__ defined
return True
except NotFoundError:
pass
return False
def methods(self):
"""return an iterator on all methods defined in the class and
its ancestors
"""
done = {}
for astng in chain(iter((self,)), self.ancestors()):
for meth in astng.mymethods():
if meth.name in done:
continue
done[meth.name] = None
yield meth
def mymethods(self):
"""return an iterator on all methods defined in the class"""
for member in self.values():
if isinstance(member, Function):
yield member
def interfaces(self, herited=True, handler_func=_iface_hdlr):
"""return an iterator on interfaces implemented by the given
class node
"""
# FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)...
try:
implements = Instance(self).getattr('__implements__')[0]
except NotFoundError:
return
if not herited and not implements.frame() is self:
return
found = set()
missing = False
for iface in unpack_infer(implements):
if iface is YES:
missing = True
continue
if not iface in found and handler_func(iface):
found.add(iface)
yield iface
if missing:
raise InferenceError()
| bsd-2-clause | -297,131,739,150,811,800 | 34.40535 | 93 | 0.586012 | false |
botswana-harvard/tshilo-dikotla | td_infant/models/infant_birth_data.py | 1 | 2659 | from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from edc_constants.choices import YES_NO, GENDER
from .infant_crf_model import InfantCrfModel
class InfantBirthData(InfantCrfModel):
""" A model completed by the user on the infant's birth exam. """
infant_gender = models.CharField(
max_length=6,
choices=GENDER,
verbose_name="What is the gender of the infant?",
help_text="")
weight_kg = models.DecimalField(
max_digits=3,
decimal_places=2,
verbose_name="What was the infant's birth weight? ",
help_text="Measured in Kilograms (kg)")
infant_length = models.DecimalField(
max_digits=4,
decimal_places=2,
validators=[MinValueValidator(0), MaxValueValidator(90)],
verbose_name="What was the infant's length at birth? ",
help_text="Measured in centimeters, (cm)")
head_circumference = models.DecimalField(
max_digits=4,
decimal_places=2,
validators=[MinValueValidator(0), MaxValueValidator(41)],
verbose_name="What was the head circumference in centimeters? ",
help_text="Measured in centimeters, (cm)")
apgar_score = models.CharField(
max_length=3,
choices=YES_NO,
verbose_name="Was Apgar Score performed? ",
help_text="If 'No' go to question 10. Otherwise continue")
apgar_score_min_1 = models.IntegerField(
verbose_name="At 1 minute: ",
help_text="",
blank=True,
null=True,
validators=[MaxValueValidator(10),
MinValueValidator(0)])
apgar_score_min_5 = models.IntegerField(
verbose_name="At 5 minutes: ",
help_text="",
blank=True,
null=True,
validators=[MaxValueValidator(10),
MinValueValidator(0)])
apgar_score_min_10 = models.IntegerField(
verbose_name="At 10 minutes: ",
help_text="",
blank=True,
null=True,
validators=[MaxValueValidator(10),
MinValueValidator(0)])
congenital_anomalities = models.CharField(
max_length=3,
choices=YES_NO,
verbose_name="Were any congenital anomalies identified? ",
help_text="If 'Yes' please complete the Congenital Anomalies Form",)
other_birth_info = models.TextField(
max_length=250,
verbose_name="Other birth information ",
blank=True,
null=True)
class Meta:
app_label = 'td_infant'
verbose_name = "Infant Birth: Data"
verbose_name_plural = "Infant Birth: Data"
| gpl-2.0 | -4,537,329,226,076,851,700 | 31.036145 | 76 | 0.620158 | false |
EnriqueSoria/Series-my | series.py | 1 | 8682 | # -*- coding: utf-8 -*-
directorios = \
r'''
D:/Series
'''.split('\n')
'''
Traducción para algunos géneros
'''
gen = {
'Crime': u'Crimen',
'Action': u'Acción',
'Drama': u'Drama',
'Comedy': u'Comedia',
'Adventure': u'Aventuras',
'Thriller': u'Thriller'
}
##############################################################################
' HTML '
##############################################################################
html_header = u'''<!DOCTYPE html><html lang="es"><head>
<meta charset="utf-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="description" content=""><meta name="author" content=""><link rel="icon" href="favicon.ico"><title>Series</title>
<link href="css/bootstrap.min.css" rel="stylesheet"><link href="css/jumbotron-narrow.css" rel="stylesheet">
</head><body>
<h1 class="header" align="center">Series<br></h1><div>'''
html_serie_row = '''<div class="row">'''
html_serie = u'''
<!--- Serie --->
<div class="col-xs-4">
<div class="row">
<div class="col-xs-4"><img src="{img}" alt="{titulo}" class="img-thumbnail"></div>
<div class="col-xs-8" align="left">
<h2>{titulo} ({anyo})</h2>
<ul>
<li><b>Genero</b>: {genero}</li>
<li><b>Temporadas</b>: {temporadas}</li>
<li><b>Mas info</b>: {masinfo}</li>
</ul><br>
<p><a class="btn btn-info" data-toggle='collapse' data-target="#{toggle}" aria-expanded="false" aria-controls="{toggle}">Ver capítulos</a></p>
<div class="collapse" id="{toggle}">
<div class="well">
{enlaces}
</div>
</div>
</div>
</div>
</div>
'''
html_serie_finrow = '''</div>'''
html_season = u'''<a href='#'>%s</a>'''
html_footer = u'''<footer class="footer"></footer></div>
<script src="//ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js"></script>
<!-- Latest compiled and minified CSS -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<!-- Optional theme -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap-theme.min.css">
<!-- Latest compiled and minified JavaScript -->
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js"></script>
</body></html>'''
def series_links(d):
'''
Devuelve una lista de links a los capítulos de una temporada
de una serie concreta.
Buscamos patrones comunes:
1x01, S01E03, 101...
'''
path = d[u'path']
patterns = patterns = [ \
# Del tipo: 1x01, 12x24
'(\d{1,2}x\d\d)',
# S01E01, S12E24
'(S\d\dE\d\d)',
# 101, 1224
'(\d{3,4})']
patterns = [re.compile(regex) for regex in patterns]
capitulos = []
for temporada in [x for x in ls(path) if not '.' in x]:
for capitulo in ls('%s/%s' %(path,temporada)):
print capitulo
# 1x03
p = re.search(patterns[0], capitulo)
if p and len(p.groups()):
cap = p.groups()[0]
capitulos.append( (cap, u'%s/%s/%s' % (utf(path), utf(temporada) , utf(capitulo) )) )
print cap
continue
# S01E03
p = re.search(patterns[1], capitulo)
if p and len(p.groups()):
cap = p.groups()[0]
cap = u'%s%sx%s%s' % (cap[1] if cap[1]!=0 else '', cap[2], cap[4], cap[5])
capitulos.append( ( cap, u'%s/%s/%s' % (utf(path), utf(temporada) , utf(capitulo)) ))
print cap
continue
# 103
p = re.search(patterns[2], capitulo)
if p and len(p.groups()):
cap = p.groups()[0]
if len(cap)==3: cap = u'%sx%s%s' % (cap[0], cap[1], cap[2])
else: cap = u'%s%sx%s%s' % (cap[0], cap[1], cap[2], cap[3])
capitulos.append( ( cap, u'%s/%s/%s' % (utf(path), utf(temporada) , utf(capitulo) )))
print cap
continue
# Si tiene algun numero lo añado
if re.search('\d', capitulo):
capitulos.append( ( capitulo, u'%s/%s/%s' % (path, temporada, capitulo) ) )
return capitulos
def serie_HTML(d, download=False):
''' Devuelve el HTML para una determinada serie '''
return html_serie.format(
img = d[u'img'] if not download else 'imgs/%s.jpg' % download_image(d),
titulo = d[u'name'].decode('utf-8', 'replace'),
anyo = d[u'year'],
genero = gen[d[u'maingenre']],
temporadas = u' '.join( [html_season % idx for idx in xrange(1,d[u'seasons']+1)]),
masinfo = u'',
toggle = d[u'name'].decode('utf-8', 'replace').split(' ')[0],
enlaces = u'<br>'.join( [(u'<a href="file:///%s">%s</a>' % (cap[1], cap[0])) for cap in series_links(d)])
)
##############################################################################
' Funciones aux '
##############################################################################
def read(pathFNAME):
'''
Abre un fichero, lo lee y devuelve un diccionario.
'''
with open(pathFNAME, 'r', 'utf-8') as fn:
return eval(fn.read())
def paths_de_las_series(orden=lambda (p,d): d[u'name']):
'''
Buscamos por todos los directorios y nos guardamos dónde están las
series de forma ordenada.
'''
paths = []
for pathBase in [d for d in directorios if d]:
for path in ls(pathBase):
if not '.' in path:
if 'info.json' in ls('%s/%s'%(pathBase, path)):
# Save the path
camino = '%s/%s' % (pathBase, path)
inform = read('%s/info.json' % (camino))
inform[u'path'] = camino
paths.append((camino, inform))
return sorted(paths, key=orden)
utf = lambda x: x.decode('utf-8', 'replace')
def urlify(name):
'''
Devuelve una string como si fuera una URL
'''
name = name#.decode('utf-8', 'replace')
for l, ll in zip(u'áàéèíìóòúù:',u'aaeeiioouu_'):
name = name.replace(l,ll)
return (name.encode('ASCII', 'replace')).replace(' ', '-')
def download_image(d):
'''
Descarga la imagen de la serie
'''
# Nombre del fichero
fName = urlify(d[u'name'])
# Comprueba si ya está descargada
if ('%s.jpg' % fName) in ls('D:/Series/_web/imgs/'):
pass
else:
call("wget %s -O %s.jpg" % (d[u'poster'][u'large'], fName) )
sleep(2)
mv('%s.jpg' % fName, 'D:/Series/_web/imgs/%s.jpg' % fName)
return fName
##############################################################################
' Main code '
##############################################################################
if __name__=='__main__':
'''
Código principal
'''
from shutil import move as mv
from os import listdir as ls
from time import sleep
from subprocess import call
import re
import codecs
open = codecs.open
''' Creamos el HTML '''
html = html_header
ps = paths_de_las_series()
la, lb, lc = len(ps[0::3]), len(ps[1::3]), len(ps[2::3])
for a, b, c in zip( ps[0::3] , \
ps[1::3] + ([0] if la>lb else []), \
ps[2::3] + ([0] if la>lc else [])):
html += html_serie_row
html += serie_HTML(a[1]) if a else ''
html += serie_HTML(b[1]) if b else ''
html += serie_HTML(c[1]) if c else ''
html += html_serie_finrow
html += html_footer
''' Guardamos el HTML '''
location = r'./_web/index.html'
with open(location, 'w', 'utf-8') as f:
f.write(html)
| mit | 3,314,433,357,700,545,500 | 35.70339 | 276 | 0.448857 | false |
advisory/djangosaml2_tenant | setup.py | 1 | 1850 | # Copyright (C) 2015 Education Advisory Board
# Copyright (C) 2011-2012 Yaco Sistemas
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from setuptools import setup, find_packages
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='djangosaml2_tenant',
version='0.22.0',
description='pysaml2 integration for multi-tenant in Django',
long_description='\n\n'.join([read('README'), read('CHANGES')]),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI",
"Topic :: Security",
"Topic :: Software Development :: Libraries :: Application Frameworks",
],
keywords="django,pysaml2,saml2,federated authentication,multi-tenant",
author="Education Advisory Board",
author_email="[email protected]",
url="https://github.com/advisory/djangosaml2_tenant",
license='Apache 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'pysaml2==2.2.0',
'python-memcached==1.48',
],
)
| apache-2.0 | 5,805,203,127,526,979,000 | 33.90566 | 79 | 0.668649 | false |
Adamssss/projectEuler | Problem 001-150 Python/pb050.py | 1 | 1068 | import math
import time
t1 = time.time()
prime = [2,3,5]
primen = 2
while primen < 547:
b = prime[primen]
t = 1
while (t == 1):
b = b+2
i = 0
t = 0
while (prime[i]*prime[i] < b)and (t == 0):
i=i+1
if (b%prime[i] == 0):
t = 1
if (t == 0):
primen += 1
prime.append(b)
# define a method to check if it is a prime
def isPrime(num):
if num%2 == 0:
return False
i = 3
while i < math.sqrt(num):
if num%i == 0:
return False
i += 2
return True
# first 546 consective prime sum is the greatest less than 1 million
def sumOf(start,number):
total = 0
i = 0
while i<number:
total += prime[start+i]
i += 1
return total
# print(sumOf(0,546))
for i in range(0,500):
for j in range(0,i+1):
test = sumOf(j,546-i)
if isPrime(test):
break
if isPrime(test):
print (test)
break
print("time:",time.time()-t1)
| mit | 1,853,741,059,998,883,600 | 17.736842 | 69 | 0.47191 | false |
angadpc/Alexa-Project- | twilio/rest/api/v2010/account/available_phone_number/local.py | 1 | 16142 | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class LocalList(ListResource):
def __init__(self, version, account_sid, country_code):
"""
Initialize the LocalList
:param Version version: Version that contains the resource
:param account_sid: The 34 character string that uniquely identifies your account.
:param country_code: The ISO Country code to lookup phone numbers for.
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalList
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalList
"""
super(LocalList, self).__init__(version)
# Path Solution
self._solution = {
'account_sid': account_sid,
'country_code': country_code,
}
self._uri = '/Accounts/{account_sid}/AvailablePhoneNumbers/{country_code}/Local.json'.format(**self._solution)
def stream(self, area_code=values.unset, contains=values.unset,
sms_enabled=values.unset, mms_enabled=values.unset,
voice_enabled=values.unset,
exclude_all_address_required=values.unset,
exclude_local_address_required=values.unset,
exclude_foreign_address_required=values.unset, beta=values.unset,
near_number=values.unset, near_lat_long=values.unset,
distance=values.unset, in_postal_code=values.unset,
in_region=values.unset, in_rate_center=values.unset,
in_lata=values.unset, limit=None, page_size=None):
"""
Streams LocalInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param unicode area_code: The area_code
:param unicode contains: The contains
:param bool sms_enabled: The sms_enabled
:param bool mms_enabled: The mms_enabled
:param bool voice_enabled: The voice_enabled
:param bool exclude_all_address_required: The exclude_all_address_required
:param bool exclude_local_address_required: The exclude_local_address_required
:param bool exclude_foreign_address_required: The exclude_foreign_address_required
:param bool beta: The beta
:param unicode near_number: The near_number
:param unicode near_lat_long: The near_lat_long
:param unicode distance: The distance
:param unicode in_postal_code: The in_postal_code
:param unicode in_region: The in_region
:param unicode in_rate_center: The in_rate_center
:param unicode in_lata: The in_lata
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
area_code=area_code,
contains=contains,
sms_enabled=sms_enabled,
mms_enabled=mms_enabled,
voice_enabled=voice_enabled,
exclude_all_address_required=exclude_all_address_required,
exclude_local_address_required=exclude_local_address_required,
exclude_foreign_address_required=exclude_foreign_address_required,
beta=beta,
near_number=near_number,
near_lat_long=near_lat_long,
distance=distance,
in_postal_code=in_postal_code,
in_region=in_region,
in_rate_center=in_rate_center,
in_lata=in_lata,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, area_code=values.unset, contains=values.unset,
sms_enabled=values.unset, mms_enabled=values.unset,
voice_enabled=values.unset, exclude_all_address_required=values.unset,
exclude_local_address_required=values.unset,
exclude_foreign_address_required=values.unset, beta=values.unset,
near_number=values.unset, near_lat_long=values.unset,
distance=values.unset, in_postal_code=values.unset,
in_region=values.unset, in_rate_center=values.unset,
in_lata=values.unset, limit=None, page_size=None):
"""
Lists LocalInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param unicode area_code: The area_code
:param unicode contains: The contains
:param bool sms_enabled: The sms_enabled
:param bool mms_enabled: The mms_enabled
:param bool voice_enabled: The voice_enabled
:param bool exclude_all_address_required: The exclude_all_address_required
:param bool exclude_local_address_required: The exclude_local_address_required
:param bool exclude_foreign_address_required: The exclude_foreign_address_required
:param bool beta: The beta
:param unicode near_number: The near_number
:param unicode near_lat_long: The near_lat_long
:param unicode distance: The distance
:param unicode in_postal_code: The in_postal_code
:param unicode in_region: The in_region
:param unicode in_rate_center: The in_rate_center
:param unicode in_lata: The in_lata
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance]
"""
return list(self.stream(
area_code=area_code,
contains=contains,
sms_enabled=sms_enabled,
mms_enabled=mms_enabled,
voice_enabled=voice_enabled,
exclude_all_address_required=exclude_all_address_required,
exclude_local_address_required=exclude_local_address_required,
exclude_foreign_address_required=exclude_foreign_address_required,
beta=beta,
near_number=near_number,
near_lat_long=near_lat_long,
distance=distance,
in_postal_code=in_postal_code,
in_region=in_region,
in_rate_center=in_rate_center,
in_lata=in_lata,
limit=limit,
page_size=page_size,
))
def page(self, area_code=values.unset, contains=values.unset,
sms_enabled=values.unset, mms_enabled=values.unset,
voice_enabled=values.unset, exclude_all_address_required=values.unset,
exclude_local_address_required=values.unset,
exclude_foreign_address_required=values.unset, beta=values.unset,
near_number=values.unset, near_lat_long=values.unset,
distance=values.unset, in_postal_code=values.unset,
in_region=values.unset, in_rate_center=values.unset,
in_lata=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of LocalInstance records from the API.
Request is executed immediately
:param unicode area_code: The area_code
:param unicode contains: The contains
:param bool sms_enabled: The sms_enabled
:param bool mms_enabled: The mms_enabled
:param bool voice_enabled: The voice_enabled
:param bool exclude_all_address_required: The exclude_all_address_required
:param bool exclude_local_address_required: The exclude_local_address_required
:param bool exclude_foreign_address_required: The exclude_foreign_address_required
:param bool beta: The beta
:param unicode near_number: The near_number
:param unicode near_lat_long: The near_lat_long
:param unicode distance: The distance
:param unicode in_postal_code: The in_postal_code
:param unicode in_region: The in_region
:param unicode in_rate_center: The in_rate_center
:param unicode in_lata: The in_lata
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
"""
params = values.of({
'AreaCode': area_code,
'Contains': contains,
'SmsEnabled': sms_enabled,
'MmsEnabled': mms_enabled,
'VoiceEnabled': voice_enabled,
'ExcludeAllAddressRequired': exclude_all_address_required,
'ExcludeLocalAddressRequired': exclude_local_address_required,
'ExcludeForeignAddressRequired': exclude_foreign_address_required,
'Beta': beta,
'NearNumber': near_number,
'NearLatLong': near_lat_long,
'Distance': distance,
'InPostalCode': in_postal_code,
'InRegion': in_region,
'InRateCenter': in_rate_center,
'InLata': in_lata,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return LocalPage(self._version, response, self._solution)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.LocalList>'
class LocalPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the LocalPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The 34 character string that uniquely identifies your account.
:param country_code: The ISO Country code to lookup phone numbers for.
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
"""
super(LocalPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of LocalInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
"""
return LocalInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
country_code=self._solution['country_code'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.LocalPage>'
class LocalInstance(InstanceResource):
def __init__(self, version, payload, account_sid, country_code):
"""
Initialize the LocalInstance
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
"""
super(LocalInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'friendly_name': payload['friendly_name'],
'phone_number': payload['phone_number'],
'lata': payload['lata'],
'rate_center': payload['rate_center'],
'latitude': deserialize.decimal(payload['latitude']),
'longitude': deserialize.decimal(payload['longitude']),
'region': payload['region'],
'postal_code': payload['postal_code'],
'iso_country': payload['iso_country'],
'address_requirements': payload['address_requirements'],
'beta': payload['beta'],
'capabilities': payload['capabilities'],
}
# Context
self._context = None
self._solution = {
'account_sid': account_sid,
'country_code': country_code,
}
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def phone_number(self):
"""
:returns: The phone_number
:rtype: unicode
"""
return self._properties['phone_number']
@property
def lata(self):
"""
:returns: The lata
:rtype: unicode
"""
return self._properties['lata']
@property
def rate_center(self):
"""
:returns: The rate_center
:rtype: unicode
"""
return self._properties['rate_center']
@property
def latitude(self):
"""
:returns: The latitude
:rtype: unicode
"""
return self._properties['latitude']
@property
def longitude(self):
"""
:returns: The longitude
:rtype: unicode
"""
return self._properties['longitude']
@property
def region(self):
"""
:returns: The region
:rtype: unicode
"""
return self._properties['region']
@property
def postal_code(self):
"""
:returns: The postal_code
:rtype: unicode
"""
return self._properties['postal_code']
@property
def iso_country(self):
"""
:returns: The iso_country
:rtype: unicode
"""
return self._properties['iso_country']
@property
def address_requirements(self):
"""
:returns: The address_requirements
:rtype: unicode
"""
return self._properties['address_requirements']
@property
def beta(self):
"""
:returns: The beta
:rtype: bool
"""
return self._properties['beta']
@property
def capabilities(self):
"""
:returns: The capabilities
:rtype: unicode
"""
return self._properties['capabilities']
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.LocalInstance>'
| mit | -3,366,049,378,486,977,000 | 37.070755 | 118 | 0.610767 | false |
r8/scrapy-kinopoisk | kinopoisk/pipelines.py | 1 | 2920 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import os
import sys
import codecs
from slugify import slugify
from time import strptime, strftime
from html2text import html2text
class MarkdownPipeline(object):
"""Scrapy pipeline to save reviews as markdown document"""
def parse_datetime(self, str_datetime):
"""Parse date string in russian"""
dictionary = {u"января": 'Jan', u"февраля": 'Feb', u"марта": 'Mar',
u"апреля": 'Apr', u"мая": 'May', u"июня": 'Jun', u"июля": 'Jul',
u"августа": 'Aug', u"сентября": 'Sep', u"октября": 'Oct',
u"ноября": 'Nov', u"декабря": 'Dec'}
for russian, english in dictionary.items():
str_datetime = str_datetime.replace(russian, english)
return strptime(str_datetime, '%d %b %Y %H:%M')
def fix_typography(self, s):
"""Fix typographic symbols"""
s = s.replace(u'\x97', u'\u2014') # Fix dashes
s = s.replace(u'\x85', u'\u2026') # Fix ellipsis
return s
def process_item(self, item, spider):
"""Process and save review item"""
settings = spider.settings
if not os.path.exists(settings['MARKDOWN_OUTPUT']):
os.mkdir(settings['MARKDOWN_OUTPUT'])
file_name = strftime('%Y-%m-%d-', self.parse_datetime(item['review_datetime'][0])) + slugify(item['movie_title'][0]) + '.md'
try:
output_file = codecs.open(settings['MARKDOWN_OUTPUT'] + '/' + file_name, 'w', 'utf-8')
except IOError:
print 'Error opening target file: %s' % output_file
sys.exit(1)
if len(item['review_title']) > 0:
title = item['review_title'][0]
else:
title = item['movie_title'][0]
title = self.fix_typography(title)
output_file.write("%s\n" % title)
output_file.write("%s\n\n" % ('=' * len(title)))
output_file.write("* **User Id:** %s\n" % item['user_id'])
output_file.write("* **Movie Title:** %s\n" % item['movie_title'][0])
output_file.write("* **Movie Original Title:** %s\n" % item['movie_original_title'][0])
output_file.write("* **Movie Link:** [{0}]({0})\n".format(item['movie_link'][0]))
output_file.write("* **Review Date:** %s\n" % item['review_datetime'][0])
output_file.write("* **Review Grade:** %s\n" % item['review_grade'][0])
output_file.write("* **Review Link:** [{0}]({0})\n".format(item['review_link']))
output_file.write("\n")
review_text = html2text(item['review_text'])
review_text = self.fix_typography(review_text)
output_file.write(review_text)
output_file.close()
return item
| gpl-3.0 | -5,104,314,660,100,140,000 | 36.012987 | 132 | 0.579298 | false |
asphalt-framework/asphalt-wamp | tests/test_client.py | 1 | 15009 | import asyncio
import logging
import os
import re
from typing import Dict, Any
import pytest
from autobahn.wamp import ApplicationError
from autobahn.wamp.types import Challenge, PublishOptions, CallOptions
from asphalt.core import executor, Context, qualified_name
from asphalt.exceptions import ExtrasProvider
from asphalt.exceptions.api import ExceptionReporter
from asphalt.wamp.client import WAMPClient, AsphaltSession, ConnectionError
from asphalt.wamp.events import SessionJoinEvent, SessionLeaveEvent
from asphalt.wamp.extras_providers import WAMPExtrasProvider
class TestAsphaltSession:
@pytest.fixture
def session(self, request):
return AsphaltSession('default', request.param, 'foo', 'bar')
@pytest.mark.parametrize('session', ['ticket'], indirect=['session'])
def test_challenge_mismatch(self, session):
challenge = Challenge('wampcra')
exc = pytest.raises(ConnectionError, session.onChallenge, challenge)
assert exc.match('expected authentication method "ticket" but received a "wampcra" '
'challenge instead')
@pytest.mark.parametrize('session', ['ticket'], indirect=['session'])
def test_ticket_challenge(self, session):
challenge = Challenge('ticket')
assert session.onChallenge(challenge) == 'bar'
@pytest.mark.parametrize('session', ['wampcra'], indirect=['session'])
def test_wampcra_challenge(self, session):
challenge = Challenge('wampcra', {'challenge': b'\xff\x00345jfsdf'})
retval = session.onChallenge(challenge)
assert isinstance(retval, bytes)
@pytest.mark.parametrize('session', ['wampcra'], indirect=['session'])
def test_wampcra_salted_challenge(self, session):
challenge = Challenge('wampcra', {'challenge': b'\xff\x00345jfsdf', 'salt': '5ihod',
'iterations': 5, 'keylen': 32})
retval = session.onChallenge(challenge)
assert isinstance(retval, bytes)
class TestWAMPClient:
@pytest.fixture
def otherclient(self, request, event_loop, context):
kwargs = getattr(request, 'param', {})
kwargs.setdefault('host', os.getenv('CROSSBAR_HOST', 'localhost'))
kwargs.setdefault('max_reconnection_attempts', 0)
client = WAMPClient(**kwargs)
event_loop.run_until_complete(client.start(context))
yield client
event_loop.run_until_complete(client.stop())
@pytest.mark.asyncio
async def test_client_events(self, wampclient: WAMPClient):
def listener(event):
events.append(event)
events = []
wampclient.realm_joined.connect(listener)
wampclient.realm_left.connect(listener)
await wampclient.connect()
await wampclient.stop()
assert len(events) == 2
assert isinstance(events[0], SessionJoinEvent)
assert isinstance(events[1], SessionLeaveEvent)
@pytest.mark.parametrize('connect_first', [False, True])
@pytest.mark.asyncio
async def test_call(self, wampclient: WAMPClient, connect_first):
if connect_first:
await wampclient.connect()
result = await wampclient.call('wamp.session.count')
assert result == 1
@pytest.mark.asyncio
async def test_register_call_progress(self, wampclient: WAMPClient):
async def progressive_procedure(ctx, start, end):
for value in range(start, end):
ctx.progress(value)
return end
progress_values = []
await wampclient.register(progressive_procedure, 'test.progressive')
result = await wampclient.call('test.progressive', 2, 6,
options=CallOptions(on_progress=progress_values.append))
assert progress_values == [2, 3, 4, 5]
assert result == 6
@pytest.mark.asyncio
async def test_register_call_blocking(self, wampclient: WAMPClient):
@executor
def add(ctx, x, y):
return x + y
await wampclient.register(add, 'test.add')
result = await wampclient.call('test.add', 2, 3)
assert result == 5
@pytest.mark.asyncio
async def test_register_call_plain(self, wampclient: WAMPClient):
def add(ctx, x, y):
return x + y
await wampclient.register(add, 'test.add')
result = await wampclient.call('test.add', 2, 3)
assert result == 5
@pytest.mark.parametrize('wampclient', [
{'auth_method': 'wampcra', 'auth_id': 'testuser', 'auth_secret': 'testpass'}
], indirect=True)
@pytest.mark.asyncio
async def test_auth_wampcra(self, wampclient: WAMPClient):
await wampclient.connect()
result = await wampclient.call('wamp.session.get', wampclient.session_id)
assert result['authid'] == wampclient.details.authid == 'testuser'
@pytest.mark.parametrize('wampclient', [
{'auth_method': 'ticket', 'auth_id': 'device1', 'auth_secret': 'abc123'}
], indirect=True)
@pytest.mark.asyncio
async def test_auth_ticket(self, wampclient: WAMPClient):
await wampclient.connect()
result = await wampclient.call('wamp.session.get', wampclient.session_id)
assert result['authid'] == wampclient.details.authid == 'device1'
@pytest.mark.parametrize('wampclient', [
{'auth_method': 'ticket', 'auth_id': 'device1', 'auth_secret': 'abc124'}
], indirect=True)
@pytest.mark.asyncio
async def test_auth_failure(self, wampclient: WAMPClient):
with pytest.raises(ConnectionError) as exc:
await wampclient.connect()
assert exc.match('ticket in static WAMP-Ticket authentication is invalid')
@pytest.mark.asyncio
async def test_publish_autoconnect(self, wampclient: WAMPClient):
result = await wampclient.publish('test.topic', options=PublishOptions(acknowledge=True))
assert result
@pytest.mark.parametrize('connect_first', [False, True])
@pytest.mark.asyncio
async def test_publish_subscribe(self, wampclient: WAMPClient, connect_first):
async def subscriber(ctx, *args):
await q.put(args)
raise Exception()
q = asyncio.Queue()
if connect_first:
await wampclient.connect()
await wampclient.subscribe(subscriber, 'test.topic')
publication_id = await wampclient.publish(
'test.topic', 2, 3, options=PublishOptions(exclude_me=False, acknowledge=True))
assert isinstance(publication_id, int)
event = await asyncio.wait_for(q.get(), 2)
assert event == (2, 3)
@pytest.mark.parametrize('connect_first', [False, True])
@pytest.mark.asyncio
async def test_map_exception(self, wampclient: WAMPClient, connect_first):
class TestException(Exception):
pass
async def error(ctx):
raise TestException
if connect_first:
await wampclient.connect()
wampclient.map_exception(TestException, 'test.exception')
await wampclient.register(error, 'test.error')
with pytest.raises(TestException):
await wampclient.call('test.error')
@pytest.mark.asyncio
async def test_connect_procedure_registration_failure(self, wampclient: WAMPClient,
otherclient: WAMPClient):
"""
Test that a failure in registering the registry's procedures causes the connection attempt
to fail.
"""
await otherclient.register(lambda ctx: None, 'blah')
with pytest.raises(ApplicationError):
await wampclient.register(lambda ctx: None, 'blah')
assert wampclient.session_id is None
@pytest.mark.parametrize('wampclient', [
{'port': 8081, 'max_reconnection_attempts': 1, 'reconnect_delay': 0.3}], indirect=True)
@pytest.mark.asyncio
async def test_connect_retry(self, wampclient: WAMPClient, caplog):
"""Test that if the client can't connect, it will retry after a delay."""
with pytest.raises(ConnectionRefusedError):
await wampclient.connect()
messages = [record.message for record in caplog.records
if record.name == 'asphalt.wamp.client' and
record.message.startswith('Connection failed')]
assert len(messages) == 1
assert re.fullmatch("Connection failed \(attempt 1\): ConnectionRefusedError\(.+?\); "
"reconnecting in 0.3 seconds", messages[0])
@pytest.mark.asyncio
async def test_close_wait_handlers(self, event_loop, wampclient: WAMPClient,
otherclient: WAMPClient, caplog):
"""
Test that WAMPClient.close() waits for any running handler tasks to finish before
disconnecting from the router.
"""
async def sleep_subscriber(ctx):
nonlocal close_task
close_task = event_loop.create_task(wampclient.stop())
await asyncio.sleep(0.3)
async def sleep_sum(ctx, x, y):
await asyncio.sleep(0.3)
return x + y
caplog.set_level(logging.INFO)
close_task = None
await wampclient.register(sleep_sum)
await wampclient.subscribe(sleep_subscriber, 'testtopic')
await otherclient.publish('testtopic', options=PublishOptions(acknowledge=True))
result = await otherclient.call('sleep_sum', 1, 2)
assert result == 3
await close_task
messages = [record.message for record in caplog.records
if record.name == 'asphalt.wamp.client' and
record.message.startswith('Waiting for')]
assert messages == ['Waiting for 2 WAMP subscription/procedure handler tasks to finish']
@pytest.mark.asyncio
async def test_connect_twice(self, wampclient: WAMPClient):
"""
Test that when connect() is called while connected, it just returns a Future that resolves
immediately.
"""
retval = wampclient.connect()
assert isinstance(retval, asyncio.Task)
await retval
retval = wampclient.connect()
assert isinstance(retval, asyncio.Future)
await retval
def test_session_id_not_connected(self, wampclient: WAMPClient):
assert wampclient.session_id is None
def test_session_details_not_connected(self, wampclient: WAMPClient):
assert wampclient.details is None
@pytest.mark.parametrize('custom_exception', [False, True])
@pytest.mark.asyncio
async def test_report_applicationerror(self, wampclient: WAMPClient, context: Context,
custom_exception):
class DummyReporter(ExceptionReporter):
def report_exception(self, ctx: Context, exception: BaseException, message: str,
extra: Dict[str, Any]) -> None:
errors.append((exception, message, extra))
class CustomError(Exception):
pass
def handler(ctx):
if custom_exception:
raise CustomError
else:
raise ApplicationError('dummy.error')
errors = []
context.add_resource(DummyReporter(), types=[ExceptionReporter])
wampclient.map_exception(CustomError, 'dummy.error')
await wampclient.register(handler, 'dummyprocedure')
with pytest.raises(CustomError):
await wampclient.call('dummyprocedure')
assert not errors
@pytest.mark.parametrize('wampclient', [
{'auth_method': 'ticket', 'auth_id': 'device1', 'auth_secret': 'abc123'}
], indirect=True)
@pytest.mark.asyncio
async def test_sentry_extras_provider_procedure(self, wampclient: WAMPClient,
context: Context, monkeypatch):
class DummyReporter(ExceptionReporter):
def report_exception(self, ctx: Context, exception: BaseException, message: str,
extra: Dict[str, Any]) -> None:
errors.append((exception, message, extra))
def handler(ctx):
raise Exception('foo')
errors = []
context.add_resource(DummyReporter(), types=[ExceptionReporter])
context.add_resource(WAMPExtrasProvider(), types=[ExtrasProvider])
await wampclient.register(handler, 'dummyprocedure')
monkeypatch.setattr('asphalt.wamp.extras_providers.SENTRY_CLASS_NAME',
qualified_name(DummyReporter))
with pytest.raises(ApplicationError):
await wampclient.call('dummyprocedure')
assert len(errors) == 1
exc, message, extra = errors[0]
assert type(exc) is Exception
assert str(exc) == 'foo'
assert message == "Error running handler for procedure 'dummyprocedure'"
assert extra == {'extra': {'procedure': 'dummyprocedure'},
'user_context': {'auth_role': 'authorized_users',
'id': 'device1',
'session_id': wampclient.session_id}
}
@pytest.mark.parametrize('wampclient', [
{'auth_method': 'ticket', 'auth_id': 'device1', 'auth_secret': 'abc123'}
], indirect=True)
@pytest.mark.asyncio
async def test_sentry_extras_provider_subscriber(self, wampclient: WAMPClient,
context: Context, monkeypatch):
class DummyReporter(ExceptionReporter):
def report_exception(self, ctx: Context, exception: BaseException, message: str,
extra: Dict[str, Any]) -> None:
errors.append((exception, message, extra))
def handler(ctx):
ctx.loop.call_soon(event.set)
raise Exception('foo')
event = asyncio.Event()
errors = []
context.add_resource(DummyReporter(), types=[ExceptionReporter])
context.add_resource(WAMPExtrasProvider(), types=[ExtrasProvider])
await wampclient.subscribe(handler, 'dummytopic')
monkeypatch.setattr('asphalt.wamp.extras_providers.SENTRY_CLASS_NAME',
qualified_name(DummyReporter))
await wampclient.publish('dummytopic', options=dict(acknowledge=True, exclude_me=False))
await event.wait()
assert len(errors) == 1
exc, message, extra = errors[0]
assert type(exc) is Exception
assert str(exc) == 'foo'
assert message == "Error running subscription handler for topic 'dummytopic'"
assert extra == {'extra': {'topic': 'dummytopic'},
'user_context': {'auth_role': 'authorized_users',
'id': 'device1',
'session_id': wampclient.session_id}
}
| apache-2.0 | -2,718,282,172,219,678,700 | 40.233516 | 98 | 0.62156 | false |
datagutten/comics | comics/accounts/urls.py | 1 | 4857 | from django.conf import settings
from django.conf.urls import patterns, url
from django.contrib.auth import views as auth_views
from django.views.generic.base import TemplateView
from invitation import views as invitation_views
from registration import views as reg_views
from comics.accounts.forms import (
AuthenticationForm, PasswordResetForm, RegistrationForm)
from comics.accounts import views as account_views
urlpatterns = patterns(
'',
### django-invitation
url(r'^invite/complete/$',
TemplateView.as_view(
template_name='invitation/invitation_complete.html'),
{
'extra_context': {'active': {
'invite': True,
}},
},
name='invitation_complete'),
url(r'^invite/$',
invitation_views.invite,
{
'extra_context': {'active': {
'invite': True,
}},
},
name='invitation_invite'),
url(r'^invited/(?P<invitation_key>\w+)/$',
invitation_views.invited,
{
'extra_context': {'active': {'register': True}},
},
name='invitation_invited'),
url(r'^register/$',
invitation_views.register,
{
'backend': 'comics.accounts.backends.RegistrationBackend',
'form_class': RegistrationForm,
'extra_context': {'active': {'register': True}},
},
name='registration_register'),
### django-registration
#url(r'^register/$',
# reg_views.register,
# {
# 'backend': 'comics.accounts.backends.RegistrationBackend',
# 'extra_context': {'active': {'register': True}},
# },
# name='registration_register'),
url(r'^register/complete/$',
TemplateView.as_view(
template_name='registration/registration_complete.html'),
name='registration_complete'),
url(r'^register/closed/$',
TemplateView.as_view(
template_name='registration/registration_closed.html'),
name='registration_disallowed'),
url(r'^activate/complete/$',
TemplateView.as_view(
template_name='registration/activation_complete.html'),
name='registration_activation_complete'),
url(r'^activate/(?P<activation_key>\w+)/$',
reg_views.activate,
{'backend': 'comics.accounts.backends.RegistrationBackend'},
name='registration_activate'),
### django.contrib.auth
url(r'^login/$',
auth_views.login,
{
'authentication_form': AuthenticationForm,
'extra_context': {'active': {'login': True}},
'template_name': 'auth/login.html',
},
name='login'),
url(r'^logout/$',
auth_views.logout,
{'next_page': '/account/login/'},
name='logout'),
url(r'^password/change/$',
auth_views.password_change,
{
'template_name': 'auth/password_change.html',
'extra_context': {'active': {
'account': True,
'password_change': True,
}},
},
name='password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
{'template_name': 'auth/password_change_done.html'},
name='password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
{
'template_name': 'auth/password_reset.html',
'email_template_name': 'auth/password_reset_email.txt',
'subject_template_name': 'auth/password_reset_email_subject.txt',
'password_reset_form': PasswordResetForm,
},
name='password_reset'),
url(r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'template_name': 'auth/password_reset_confirm.html'},
name='password_reset_confirm'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
{'template_name': 'auth/password_reset_complete.html'},
name='password_reset_complete'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
{'template_name': 'auth/password_reset_done.html'},
name='password_reset_done'),
### comics.accounts
url(r'^$',
account_views.account_details, name='account'),
url(r'^secret-key/$',
account_views.secret_key, name='secret_key'),
url(r'^toggle-comic/$',
account_views.mycomics_toggle_comic, name='toggle_comic'),
url(r'^edit-comics/$',
account_views.mycomics_edit_comics, name='edit_comics'),
)
if 'comics.sets' in settings.INSTALLED_APPS:
urlpatterns += patterns(
'',
url(r'^import-set/$',
account_views.mycomics_import_named_set, name='import_named_set'),
)
| agpl-3.0 | -4,966,277,444,263,677,000 | 31.597315 | 78 | 0.579576 | false |
imoverclocked/ServoBot | apwm_home/controller/controller/wsgi.py | 1 | 1142 | """
WSGI config for controller project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "controller.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mit | 6,876,020,816,911,958,000 | 39.785714 | 79 | 0.801226 | false |
bsquidwrd/Squid-Bot | gaming/migrations/0007_auto_20161029_2354.py | 1 | 1089 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-30 06:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('gaming', '0006_auto_20161029_2347'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='created_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='channel',
name='expire_date',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True),
),
migrations.AlterField(
model_name='gamesearch',
name='created_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='gamesearch',
name='expire_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| mit | -768,404,492,787,284,200 | 29.25 | 97 | 0.599633 | false |
hashbang/provisor | provisor/utils.py | 1 | 1845 | def drop_privileges(uid_name='nobody', gid_name='nogroup'):
import grp, pwd, os, resource
if os.getuid() != 0: # not root. #yolo
return
running_uid = pwd.getpwnam(uid_name).pw_uid
running_gid = grp.getgrnam(gid_name).gr_gid
os.setgroups([])
os.setgid(running_gid)
os.setuid(running_uid)
os.umask(0o077)
resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
def getch():
import sys, termios, tty
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def validate_pubkey(value):
import base64
if len(value) > 8192 or len(value) < 80:
raise ValueError("Expected length to be between 80 and 8192 characters")
value = value.replace("\"", "").replace("'", "").replace("\\\"", "")
value = value.split(' ')
types = [ 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384',
'ecdsa-sha2-nistp521', 'ssh-rsa', 'ssh-dss', 'ssh-ed25519' ]
if value[0] not in types:
raise ValueError(
"Expected " + ', '.join(types[:-1]) + ', or ' + types[-1]
)
try:
base64.decodestring(bytes(value[1]))
except TypeError:
raise ValueError("Expected string of base64 encoded data")
return "%s %s" % (value[0], value[1])
def validate_username(value):
import re
from reserved import RESERVED_USERNAMES
# Regexp must be kept in sync with
# https://github.com/hashbang/hashbang.sh/blob/master/src/hashbang.sh#L186-196
if re.compile(r"^[a-z][a-z0-9]{,30}$").match(value) is None:
raise ValueError('Username is invalid')
if value in RESERVED_USERNAMES:
raise ValueError('Username is reserved')
return value
| mit | 5,792,575,811,254,287,000 | 28.758065 | 83 | 0.617344 | false |
dirn/Simon | tests/test_meta.py | 1 | 11888 | """Tests of the Meta class"""
try:
import unittest2 as unittest
except ImportError:
import unittest
from bson import ObjectId
import mock
import pymongo
from simon.meta import Meta
def skip_with_mongoclient(f):
if pymongo.version_tuple[:2] >= (2, 4):
return unittest.skip('`MongoClient` is supported.')
else:
return f
def skip_without_mongoclient(f):
if pymongo.version_tuple[:2] >= (2, 4):
return f
else:
return unittest.skip('`MongoClient` is not supported.')
class TestClass(object):
"""This class can be used with `TestMeta` tests."""
class TestMeta(unittest.TestCase):
def tearDown(self):
if hasattr(TestClass, '_meta'):
delattr(TestClass, '_meta')
def test_add_to_original(self):
"""Test the `add_to_original()` method."""
meta = Meta(None)
meta.add_to_original(TestClass, '_meta')
# Test the default
# Use assertEqual for all of these tests to make them easier to
# read and maintain.
self.assertEqual(meta.auto_timestamp, True)
self.assertEqual(meta.class_name, 'TestClass')
self.assertEqual(meta.collection, 'testclasss')
self.assertEqual(meta.database, 'default')
self.assertEqual(meta.field_map, {'id': '_id'})
self.assertEqual(meta.map_id, True)
self.assertEqual(meta.required_fields, None)
self.assertEqual(meta.sort, None)
self.assertEqual(meta.typed_fields, {'_id': ObjectId})
if pymongo.version_tuple[:2] >= (2, 4):
self.assertEqual(meta.write_concern, 1)
else:
self.assertEqual(meta.write_concern, True)
self.assertFalse(hasattr(meta, 'safe'))
self.assertFalse(hasattr(meta, 'w'))
# core_attributes is a bit tougher to test
self.assertTrue(all(k.startswith('_') for k in meta.core_attributes))
self.assertIn('_document', meta.core_attributes)
self.assertIn('_meta', meta.core_attributes)
# Make sure the meta attribute is removed
self.assertFalse(hasattr(meta, 'meta'))
# And most importantly of all...
self.assertTrue(hasattr(TestClass, '_meta'))
self.assertEqual(TestClass._meta, meta)
def test_auto_timestamp(self):
"""Test the `auto_timestamp` attribute."""
meta = Meta(mock.Mock(auto_timestamp=False))
meta.add_to_original(TestClass, '_meta')
self.assertFalse(TestClass._meta.auto_timestamp)
def test_collection(self):
"""Test the `collection` attribute."""
meta = Meta(mock.Mock(collection='collection'))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.collection, 'collection')
def test_database(self):
"""Test the `database` attribute."""
meta = Meta(mock.Mock(database='database'))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.database, 'database')
def test_extra_attributes(self):
"""Test that extra attributes are not added."""
meta = Meta(mock.Mock(bad_attribute=1))
self.assertFalse(hasattr(meta, 'bad_attribute'))
def test_field_map(self):
"""Test the `field_map` attribute."""
meta = Meta(mock.Mock(field_map={'fake': 'real'}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.field_map,
{'fake': 'real', 'id': '_id'})
meta = Meta(mock.Mock(field_map={'fake': 'real'}, map_id=False))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.field_map, {'fake': 'real'})
def test_field_map_typeerror(self):
"""Test the `field_map` attribute for `TypeError`."""
meta = Meta(mock.Mock(field_map=1))
with self.assertRaises(TypeError) as e:
meta.add_to_original(TestClass, '_meta')
actual = str(e.exception)
expected = "'field_map' must be a dict."
self.assertEqual(actual, expected)
meta = Meta(mock.Mock(field_map='a'))
with self.assertRaises(TypeError) as e:
meta.add_to_original(TestClass, '_meta')
actual = str(e.exception)
expected = "'field_map' must be a dict."
self.assertEqual(actual, expected)
def test_init(self):
"""Test the `__init__()` method."""
mock_meta = mock.Mock()
meta = Meta(mock_meta)
# test that what you give for the meta class is used as meta
self.assertEqual(meta.meta, mock_meta)
# Use assertEqual for all of these tests to make them easier to
# read and maintain.
self.assertEqual(meta.auto_timestamp, True)
self.assertEqual(meta.database, 'default')
self.assertEqual(meta.field_map, {})
self.assertEqual(meta.map_id, True)
self.assertEqual(meta.required_fields, None)
self.assertEqual(meta.sort, None)
self.assertEqual(meta.typed_fields, {})
if pymongo.version_tuple[:2] >= (2, 4):
self.assertEqual(meta.write_concern, 1)
else:
self.assertEqual(meta.write_concern, True)
self.assertFalse(hasattr(meta, 'safe'))
self.assertFalse(hasattr(meta, 'w'))
# make sure attributes added later haven't been added
self.assertFalse(hasattr(meta, 'class_name'))
self.assertFalse(hasattr(meta, 'collection'))
def test_map_id(self):
"""Test the `map_id` attribute."""
meta = Meta(mock.Mock(map_id=False))
meta.add_to_original(TestClass, '_meta')
self.assertFalse(TestClass._meta.map_id)
def test_repr(self):
"""Test the `__repr__()` method."""
meta = Meta(None)
meta.add_to_original(TestClass, '_meta')
self.assertEqual('{0!r}'.format(meta),
'<Meta options for TestClass>')
def test_required_fields(self):
"""Test the `required_fields` attribute."""
# single value
meta = Meta(mock.Mock(required_fields='a'))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.required_fields, ('a',))
# multiple values
meta = Meta(mock.Mock(required_fields=['a', 'b']))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.required_fields, ['a', 'b'])
@skip_with_mongoclient
def test_safe(self):
"""Test the `safe` attribute."""
meta = Meta(mock.Mock(safe=False))
meta.add_to_original(TestClass, '_meta')
self.assertFalse(TestClass._meta.write_concern)
@skip_without_mongoclient
def test_safe_deprecationwarning(self):
("Test that `safe` triggers `DeprecationWarning` for PyMongo "
"with MongoClient.")
with mock.patch('simon.meta.warnings') as warnings:
meta = Meta(mock.Mock(safe=True))
meta.add_to_original(TestClass, '_meta')
message = 'safe has been deprecated. Please use w instead.'
warnings.warn.assert_called_with(message, DeprecationWarning)
def test_sort(self):
"""Test the `sort` attribute."""
# single value
meta = Meta(mock.Mock(sort='a'))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.sort, ('a',))
# multiple values
meta = Meta(mock.Mock(sort=['a', '-b']))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.sort, ['a', '-b'])
def test_str(self):
"""Test the `__str__()` method."""
meta = Meta(None)
meta.add_to_original(TestClass, '_meta')
self.assertEqual('{0!s}'.format(meta),
'TestClass.Meta')
def test_typed_fields(self):
"""Test the `typed_fields` attribute."""
# default
meta = Meta(None)
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.typed_fields, {'_id': ObjectId})
# custom
meta = Meta(mock.Mock(typed_fields={'a': int}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.typed_fields,
{'_id': ObjectId, 'a': int})
# list
meta = Meta(mock.Mock(typed_fields={'a': [int]}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.typed_fields,
{'_id': ObjectId, 'a': [int]})
# nested
meta = Meta(mock.Mock(typed_fields={'a.b': int}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.typed_fields,
{'_id': ObjectId, 'a.b': int})
# with _id
meta = Meta(mock.Mock(typed_fields={'a': int, 'id': None}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.typed_fields, {'a': int, '_id': None})
def test_typed_fields_typeerror(self):
"""Test the `typed_fields` attribute for `TypeError`."""
meta = Meta(mock.Mock(typed_fields={'a': 1}))
with self.assertRaises(TypeError) as e:
meta.add_to_original(TestClass, '_meta')
actual = str(e.exception)
expected = 'Fields must be a type, a typed list, or None.'
self.assertEqual(actual, expected)
meta = Meta(mock.Mock(typed_fields={'a': 'b'}))
with self.assertRaises(TypeError) as e:
meta.add_to_original(TestClass, '_meta')
actual = str(e.exception)
expected = 'Fields must be a type, a typed list, or None.'
self.assertEqual(actual, expected)
meta = Meta(mock.Mock(typed_fields={'a': ['b']}))
with self.assertRaises(TypeError) as e:
meta.add_to_original(TestClass, '_meta')
actual = str(e.exception)
expected = 'Fields must be a type, a typed list, or None.'
self.assertEqual(actual, expected)
def test_unicode(self):
"""Test the `__unicode__()` method."""
meta = Meta(None)
meta.add_to_original(TestClass, '_meta')
self.assertEqual(u'{0}'.format(meta),
u'TestClass.Meta')
@skip_without_mongoclient
def test_w(self):
"""Test the `w` attribute."""
meta = Meta(mock.Mock(w=0))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.write_concern, 0)
def test_write_conern(self):
"""Test the write concern attributes."""
if pymongo.version_tuple[:2] >= (2, 4):
have_attribute = 'w'
have_not_attribute = 'safe'
have_on = 1
have_not_on = True
have_off = 0
have_not_off = False
else:
have_attribute = 'w'
have_not_attribute = 'safe'
have_on = 1
have_not_on = True
have_off = 0
have_not_off = False
# The correct attribute on
meta = Meta(mock.Mock(**{have_attribute: have_on}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.write_concern, have_on)
# The correct attribute off
meta = Meta(mock.Mock(**{have_attribute: have_off}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.write_concern, have_off)
# The wrong attribute on
meta = Meta(mock.Mock(**{have_not_attribute: have_not_on}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.write_concern, have_on)
# The wrong attribute off
meta = Meta(mock.Mock(**{have_not_attribute: have_not_off}))
meta.add_to_original(TestClass, '_meta')
self.assertEqual(TestClass._meta.write_concern, have_off)
| bsd-3-clause | -4,480,163,938,053,110,000 | 28.72 | 79 | 0.587904 | false |
abhijeet-talaulikar/Automatic-Helmet-Detection | K-Fold/Logistic_Regression.py | 1 | 2663 | import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc
from sklearn.model_selection import KFold
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import *
from timeit import default_timer as timer
from random import randint
from sklearn.feature_selection import *
from sklearn.decomposition import PCA
helmet_data = np.genfromtxt ('helmet.csv', delimiter=",")
face_data = np.genfromtxt ('face.csv', delimiter=",")
data_full = np.concatenate((helmet_data, face_data), 0)
np.random.shuffle(data_full) #shuffle the tuples
#feature reduction (on HOG part)
#gain, j = mutual_info_classif(data_full[:, 8:-1], data_full[:, -1], discrete_features='auto', n_neighbors=3, copy=True, random_state=None), 0
#for i in np.arange(len(gain)):
# if gain[i] <= 0.001:
# data_full = np.delete(data_full, 8+i-j, 1)
# j += 1
#data = np.copy(data_full)
#principal component analysis
pca = PCA(n_components=150)
data = pca.fit_transform(data_full[:, 8:-1])
data = np.concatenate((data_full[:, 0:8], data, np.array([data_full[:, -1]]).T), axis=1)
precision, recall, f1, accuracy, support, fn, roc_auc = 0, 0, 0, 0, 0, 0, 0
colors = ['cyan', 'indigo', 'seagreen', 'yellow', 'blue', 'darkorange']
k = 10
kf = KFold(n_splits = k)
start = timer()
for train, test in kf.split(data):
X_train, X_test = data[train, 0:-1], data[test, 0:-1]
y_train, y_test = data[train, -1], data[test, -1]
clf = LogisticRegression().fit(X_train, y_train)
y_pred = clf.predict(X_test)
#ROC curve
y_prob = clf.predict_proba(X_test)[:,1]
fpr, tpr, thresholds = roc_curve(y_test, y_prob, pos_label=1)
roc_auc += auc(fpr, tpr)
plt.plot(fpr, tpr, color=colors[randint(0, len(colors)-1)])
precision += precision_score(y_test, y_pred, average = 'macro')
recall += recall_score(y_test, y_pred, average = 'macro')
f1 += f1_score(y_test, y_pred, average = 'macro')
accuracy += accuracy_score(y_test, y_pred)
y = y_test - y_pred
fn += sum(y[y > 0]) / len(y_test)
end = timer()
precision /= k
recall /= k
f1 /= k
accuracy /= k
fn /= k
print("Precision \t: %s" % round(precision, 4))
print("Recall \t\t: %s" % round(recall, 4))
print("F1 \t\t: %s" % round(f1, 4))
print("Accuracy \t: %s" % round(accuracy, 4))
print("False Neg \t: %s%%" % round(fn * 100, 4))
print("Mean AUC \t: %s" % round(roc_auc / k, 4))
print("\nExecution time: %s ms" % round((end - start) * 1000, 4))
#ROC curve
plt.title('Logistic Regression (AUC = %s)' % round(roc_auc, 4))
plt.legend(loc='lower right')
plt.plot([0,1],[0,1],'r--')
plt.xlim([-0.05,1.0])
plt.ylim([0.0,1.05])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')
plt.show()
| gpl-3.0 | -5,013,241,237,569,052,000 | 32.2875 | 142 | 0.662035 | false |
cristobaltapia/sajou | sajou/sections.py | 1 | 3525 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Define the classes and methods to work with sections.
"""
import numpy as np
class BeamSection(object):
"""Defines a beam section
Parameters
----------
name: str
name of the section
material: Material instance
material of the section defined as an instance of Material object
data: tuple
properties of the section
type: str
defines the type of cross-section
+-------------------------+------------------------------+
| type | *data* format |
+=========================+==============================+
|'rectangular': |``data=(width, height,)`` |
+-------------------------+------------------------------+
|'circular': |``data=(r, )`` |
+-------------------------+------------------------------+
|'I-section': |``data=(H, h_f, w_web, w_f)`` |
+-------------------------+------------------------------+
|'general': |``data=(A, I_3,)`` |
+-------------------------+------------------------------+
"""
def __init__(self, name, material, data, type='rectangular'):
self._name = name
self._material = material
self._data = data
self._type = type
self._area = 0
self._Iz = 0
self._Iy = 0
self._Jx = 0
self.compute_properties()
def print_properties(self):
"""Prints the properties of the BeamSection instance
:returns: TODO
"""
if self._type == 'rectangular':
props = {'width': self._data[0], 'height': self._data[1]}
else:
props = 'undefined'
return 'Properties: ' + str(props)
def compute_properties(self):
"""Compute all the mechanical properties for the given section
:returns: TODO
"""
# Calculate the area
self._area = self.calc_area()
self._Iz, self._Iy = self.calc_inertia()
def calc_area(self):
"""Calculate the area of the section
:returns: TODO
"""
type = self._type
if type == 'rectangular':
width = self._data[0]
height = self._data[1]
return width * height
elif type == 'general':
return self._data[0]
elif type == 'circular':
radius = self._data[0]
return np.pi * radius**2
def calc_inertia(self):
"""Calculate the moment of inertia of the beam section
:returns: Iz, Iy
"""
type = self._type
if type == 'rectangular':
width = self._data[0]
height = self._data[1]
I_z = width * height**3 / 12.
I_y = height * width**3 / 12.
return I_z, I_y
elif type == 'general':
return self._data[1], 0
def __str__(self):
"""
Returns the printable string for this object
"""
return 'Beam Section: {name}, type: {t}'.format(name=self._name,
t=self._type)
def __repr__(self):
"""
Returns the printable string for this object
"""
return 'Beam Section: {name}, type: {t}'.format(name=self._name,
t=self._type)
| mit | 9,096,255,454,926,391,000 | 28.621849 | 73 | 0.41844 | false |
n3wb13/OpenNfrGui-5.0-1 | lib/python/Plugins/Extensions/MediaPortal/additions/fun/geo_de.py | 1 | 4389 | # -*- coding: utf-8 -*-
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
from Plugins.Extensions.MediaPortal.resources.simpleplayer import SimplePlayer, SimplePlaylist
from Plugins.Extensions.MediaPortal.resources.twagenthelper import twAgentGetPage
STV_Version = "GEO.de v0.95"
STV_siteEncoding = 'iso8859-1'
class GEOdeGenreScreen(MPScreen, ThumbsHelper):
def __init__(self, session):
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/dokuListScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/dokuListScreen.xml"
print path
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"yellow" : self.keyTxtPageUp,
"blue" : self.keyTxtPageDown,
"ok" : self.keyOK,
"cancel" : self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"0" : self.closeAll,
"left" : self.keyLeft
}, -1)
self['title'] = Label(STV_Version)
self['ContentTitle'] = Label("GEOaudio - Hören und Reisen")
self['F1'] = Label(_("Exit"))
self['F3'] = Label(_("Text-"))
self['F4'] = Label(_("Text+"))
self['Page'] = Label(_("Page:"))
self.keyLocked = True
self.baseUrl = "http://www.geo.de"
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.lastservice = self.session.nav.getCurrentlyPlayingServiceReference()
self.onClose.append(self.restoreLastService)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.keyLocked = True
stvLink = self.baseUrl + '/GEO/reisen/podcast/reise-podcast-geoaudio-hoeren-und-reisen-5095.html'
print "getPage: ",stvLink
twAgentGetPage(stvLink).addCallback(self.genreData).addErrback(self.dataError)
def genreData(self, data):
print "genreData:"
for m in re.finditer('id:"(.*?)".*?name:"(.*?)".*?mp3:"(.*?)".*?iption:"(.*?)".*?poster: "(.*?)"', data, re.S):
# print "Podcasts found"
id, name, mp3, desc, img = m.groups()
self.filmliste.append(("%s. " % id, decodeHtml2(name), mp3, decodeHtml2(desc),img))
if self.keyLocked:
self.keyLocked = False
if not self.filmliste:
self.filmliste.append(('Keine Podcasts gefunden !','','','',''))
self.ml.setList(map(self.GEOdeListEntry, self.filmliste))
self.th_ThumbsQuery(self.filmliste, 1, 0, 4, None, None, 1, 1, mode=1)
self.showInfos()
def showInfos(self):
stvTitle = self['liste'].getCurrent()[0][1]
stvImage = self['liste'].getCurrent()[0][4]
stvDesc = self['liste'].getCurrent()[0][3]
print stvImage
self['name'].setText(stvTitle)
self['handlung'].setText(stvDesc)
CoverHelper(self['coverArt']).getCover(stvImage)
def keyOK(self):
if self.keyLocked:
return
self.session.open(
GEOdePlayer,
self.filmliste,
playIdx = self['liste'].getSelectedIndex()
)
def restoreLastService(self):
if config.mediaportal.restorelastservice.value == "1" and not config.mediaportal.backgroundtv.value:
self.session.nav.playService(self.lastservice)
class GEOdePlaylist(SimplePlaylist):
def playListEntry(self, entry):
width = self['liste'].instance.size().width()
height = self['liste'].l.getItemSize().height()
self.ml.l.setFont(0, gFont('mediaportal', height - 2 * mp_globals.sizefactor))
res = [entry]
res.append((eListboxPythonMultiContent.TYPE_TEXT, 0, 0, width, height, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, entry[0] + entry[1]))
return res
class GEOdePlayer(SimplePlayer):
def __init__(self, session, playList, playIdx):
print "GEOdePlayer:"
SimplePlayer.__init__(self, session, playList, playIdx=playIdx, playAll=True, listTitle="GEOaudio - Hören und Reisen", autoScrSaver=True, ltype='geo.de', playerMode='MP3')
def getVideo(self):
stvLink = self.playList[self.playIdx][2]
stvTitle = "%s%s" % (self.playList[self.playIdx][0], self.playList[self.playIdx][1])
stvImage = self.playList[self.playIdx][4]
self.playStream(stvTitle, stvLink, imgurl=stvImage)
def openPlaylist(self, pl_class=GEOdePlaylist):
SimplePlayer.openPlaylist(self, pl_class) | gpl-2.0 | 2,190,676,951,641,538,800 | 34.088 | 173 | 0.696693 | false |
osuripple/lets | helpers/aeshelper.py | 1 | 10866 | """
A pure python (slow) implementation of rijndael with a decent interface
To include -
from rijndael import rijndael
To do a key setup -
r = rijndael(key, block_size = 16)
key must be a string of length 16, 24, or 32
blocksize must be 16, 24, or 32. Default is 16
To use -
ciphertext = r.encrypt(plaintext)
plaintext = r.decrypt(ciphertext)
If any strings are of the wrong length a ValueError is thrown
"""
# ported from the Java reference code by Bram Cohen, April 2001
# this code is public domain, unless someone makes
# an intellectual property claim against the reference
# code, in which case it can be made public domain by
# deleting all the comments and renaming all the variables
import copy
import base64
shifts = [[[0, 0], [1, 3], [2, 2], [3, 1]],
[[0, 0], [1, 5], [2, 4], [3, 3]],
[[0, 0], [1, 7], [3, 5], [4, 4]]]
# [keysize][block_size]
num_rounds = {16: {16: 10, 24: 12, 32: 14}, 24: {16: 12, 24: 12, 32: 14}, 32: {16: 14, 24: 14, 32: 14}}
A = [[1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 0, 0, 0, 1]]
# produce log and alog tables, needed for multiplying in the
# field GF(2^m) (generator = 3)
alog = [1]
for i in range(255):
j = (alog[-1] << 1) ^ alog[-1]
if j & 0x100 != 0:
j ^= 0x11B
alog.append(j)
log = [0] * 256
for i in range(1, 255):
log[alog[i]] = i
# multiply two elements of GF(2^m)
def mul(a, b):
if a == 0 or b == 0:
return 0
return alog[(log[a & 0xFF] + log[b & 0xFF]) % 255]
# substitution box based on F^{-1}(x)
box = [[0] * 8 for i in range(256)]
box[1][7] = 1
for i in range(2, 256):
j = alog[255 - log[i]]
for t in range(8):
box[i][t] = (j >> (7 - t)) & 0x01
B = [0, 1, 1, 0, 0, 0, 1, 1]
# affine transform: box[i] <- B + A*box[i]
cox = [[0] * 8 for i in range(256)]
for i in range(256):
for t in range(8):
cox[i][t] = B[t]
for j in range(8):
cox[i][t] ^= A[t][j] * box[i][j]
# S-boxes and inverse S-boxes
S = [0] * 256
Si = [0] * 256
for i in range(256):
S[i] = cox[i][0] << 7
for t in range(1, 8):
S[i] ^= cox[i][t] << (7-t)
Si[S[i] & 0xFF] = i
# T-boxes
G = [[2, 1, 1, 3],
[3, 2, 1, 1],
[1, 3, 2, 1],
[1, 1, 3, 2]]
AA = [[0] * 8 for i in range(4)]
for i in range(4):
for j in range(4):
AA[i][j] = G[i][j]
AA[i][i+4] = 1
for i in range(4):
pivot = AA[i][i]
if pivot == 0:
t = i + 1
while AA[t][i] == 0 and t < 4:
t += 1
assert t != 4, 'G matrix must be invertible'
for j in range(8):
AA[i][j], AA[t][j] = AA[t][j], AA[i][j]
pivot = AA[i][i]
for j in range(8):
if AA[i][j] != 0:
AA[i][j] = alog[(255 + log[AA[i][j] & 0xFF] - log[pivot & 0xFF]) % 255]
for t in range(4):
if i != t:
for j in range(i+1, 8):
AA[t][j] ^= mul(AA[i][j], AA[t][i])
AA[t][i] = 0
iG = [[0] * 4 for i in range(4)]
for i in range(4):
for j in range(4):
iG[i][j] = AA[i][j + 4]
def mul4(a, bs):
if a == 0:
return 0
r = 0
for b in bs:
r <<= 8
if b != 0:
r |= mul(a, b)
return r
T1 = []
T2 = []
T3 = []
T4 = []
T5 = []
T6 = []
T7 = []
T8 = []
U1 = []
U2 = []
U3 = []
U4 = []
for t in range(256):
s = S[t]
T1.append(mul4(s, G[0]))
T2.append(mul4(s, G[1]))
T3.append(mul4(s, G[2]))
T4.append(mul4(s, G[3]))
s = Si[t]
T5.append(mul4(s, iG[0]))
T6.append(mul4(s, iG[1]))
T7.append(mul4(s, iG[2]))
T8.append(mul4(s, iG[3]))
U1.append(mul4(t, iG[0]))
U2.append(mul4(t, iG[1]))
U3.append(mul4(t, iG[2]))
U4.append(mul4(t, iG[3]))
# round constants
rcon = [1]
r = 1
for t in range(1, 30):
r = mul(2, r)
rcon.append(r)
del A
del AA
del pivot
del B
del G
del box
del log
del alog
del i
del j
del r
del s
del t
del mul
del mul4
del cox
del iG
class rijndael:
def __init__(self, key, block_size = 16):
if block_size != 16 and block_size != 24 and block_size != 32:
raise ValueError('Invalid block size: ' + str(block_size))
if len(key) != 16 and len(key) != 24 and len(key) != 32:
raise ValueError('Invalid key size: ' + str(len(key)))
self.block_size = block_size
ROUNDS = num_rounds[len(key)][block_size]
BC = block_size // 4
# encryption round keys
Ke = [[0] * BC for i in range(ROUNDS + 1)]
# decryption round keys
Kd = [[0] * BC for i in range(ROUNDS + 1)]
ROUND_KEY_COUNT = (ROUNDS + 1) * BC
KC = len(key) // 4
# copy user material bytes into temporary ints
tk = []
for i in range(0, KC):
tk.append((ord(key[i * 4]) << 24) | (ord(key[i * 4 + 1]) << 16) |
(ord(key[i * 4 + 2]) << 8) | ord(key[i * 4 + 3]))
# copy values into round key arrays
t = 0
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t // BC][t % BC] = tk[j]
Kd[ROUNDS - (t // BC)][t % BC] = tk[j]
j += 1
t += 1
tt = 0
rconpointer = 0
while t < ROUND_KEY_COUNT:
# extrapolate using phi (the round key evolution function)
tt = tk[KC - 1]
tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \
(S[ tt & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) ^ \
(rcon[rconpointer] & 0xFF) << 24
rconpointer += 1
if KC != 8:
for i in range(1, KC):
tk[i] ^= tk[i-1]
else:
for i in range(1, KC // 2):
tk[i] ^= tk[i-1]
tt = tk[KC // 2 - 1]
tk[KC // 2] ^= (S[ tt & 0xFF] & 0xFF) ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) << 24
for i in range(KC // 2 + 1, KC):
tk[i] ^= tk[i-1]
# copy values into round key arrays
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t // BC][t % BC] = tk[j]
Kd[ROUNDS - (t // BC)][t % BC] = tk[j]
j += 1
t += 1
# inverse MixColumn where needed
for r in range(1, ROUNDS):
for j in range(BC):
tt = Kd[r][j]
Kd[r][j] = U1[(tt >> 24) & 0xFF] ^ \
U2[(tt >> 16) & 0xFF] ^ \
U3[(tt >> 8) & 0xFF] ^ \
U4[ tt & 0xFF]
self.Ke = Ke
self.Kd = Kd
def encrypt(self, plaintext):
if len(plaintext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
Ke = self.Ke
BC = self.block_size // 4
ROUNDS = len(Ke) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][0]
s2 = shifts[SC][2][0]
s3 = shifts[SC][3][0]
a = [0] * BC
# temporary work array
t = []
# plaintext to ints + key
for i in range(BC):
t.append((ord(plaintext[i * 4 ]) << 24 |
ord(plaintext[i * 4 + 1]) << 16 |
ord(plaintext[i * 4 + 2]) << 8 |
ord(plaintext[i * 4 + 3]) ) ^ Ke[0][i])
# apply round transforms
for r in range(1, ROUNDS):
for i in range(BC):
a[i] = (T1[(t[ i ] >> 24) & 0xFF] ^
T2[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T3[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T4[ t[(i + s3) % BC] & 0xFF] ) ^ Ke[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in range(BC):
tt = Ke[ROUNDS][i]
result.append((S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((S[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((S[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((S[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return ''.join(map(chr, result))
def decrypt(self, ciphertext):
if len(ciphertext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(ciphertext)))
Kd = self.Kd
BC = self.block_size // 4
ROUNDS = len(Kd) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][1]
s2 = shifts[SC][2][1]
s3 = shifts[SC][3][1]
a = [0] * BC
# temporary work array
t = [0] * BC
# ciphertext to ints + key
for i in range(BC):
t[i] = (ord(ciphertext[i * 4 ]) << 24 |
ord(ciphertext[i * 4 + 1]) << 16 |
ord(ciphertext[i * 4 + 2]) << 8 |
ord(ciphertext[i * 4 + 3]) ) ^ Kd[0][i]
# apply round transforms
for r in range(1, ROUNDS):
for i in range(BC):
a[i] = (T5[(t[ i ] >> 24) & 0xFF] ^
T6[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T7[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T8[ t[(i + s3) % BC] & 0xFF] ) ^ Kd[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in range(BC):
tt = Kd[ROUNDS][i]
result.append((Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((Si[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((Si[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((Si[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return ''.join(map(chr, result))
def encrypt(key, block):
return rijndael(key, len(block)).encrypt(block)
def decrypt(key, block):
return rijndael(key, len(block)).decrypt(block)
class zeropad:
def __init__(self, block_size):
assert 0 < block_size < 256
self.block_size = block_size
def pad(self, pt):
ptlen = len(pt)
padsize = self.block_size - ((ptlen + self.block_size - 1) % self.block_size + 1)
return pt + "\0" * padsize
def unpad(self, ppt):
assert len(ppt) % self.block_size == 0
offset = len(ppt)
if offset == 0:
return ''
end = offset - self.block_size + 1
while offset > end:
offset -= 1
if ppt[offset] != "\0":
return ppt[:offset + 1]
assert False
class cbc:
def __init__(self, padding, cipher, iv):
assert padding.block_size == cipher.block_size
assert len(iv) == cipher.block_size
self.padding = padding
self.cipher = cipher
self.iv = iv
def encrypt(self, pt):
ppt = self.padding.pad(pt)
offset = 0
ct = ''
v = self.iv
while offset < len(ppt):
block = ppt[offset:offset + self.cipher.block_size]
block = self.xorblock(block, v)
block = self.cipher.encrypt(block)
ct += block
offset += self.cipher.block_size
v = block
return ct
def decrypt(self, ct):
assert len(ct) % self.cipher.block_size == 0
ppt = ''
offset = 0
v = self.iv
while offset < len(ct):
block = ct[offset:offset + self.cipher.block_size]
decrypted = self.cipher.decrypt(block)
ppt += self.xorblock(decrypted, v)
offset += self.cipher.block_size
v = block
pt = self.padding.unpad(ppt)
return pt
def xorblock(self, b1, b2):
# sorry, not very Pythonesk
i = 0
r = ''
while i < self.cipher.block_size:
r += chr(ord(b1[i]) ^ ord(b2[i]))
i += 1
return r
def decryptRinjdael(key, iv, data, areBase64 = False):
"""
Where the magic happens
key -- AES key (string)
IV -- IV thing (string)
data -- data to decrypt (string)
areBase64 -- if True, iv and data are passed in base64
"""
if areBase64:
iv = base64.b64decode(iv).decode("latin_1")
data = base64.b64decode(data).decode("latin_1")
r = rijndael(key, 32)
p = zeropad(32)
c = cbc(p, r, iv)
return str(c.decrypt(data))
| agpl-3.0 | 8,741,889,376,303,064,000 | 23.200445 | 108 | 0.534511 | false |
googleapis/googleapis-gen | google/cloud/bigquery/datatransfer/v1/bigquery-datatransfer-v1-py/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py | 1 | 1239 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import DataTransferServiceTransport
from .grpc import DataTransferServiceGrpcTransport
from .grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[DataTransferServiceTransport]]
_transport_registry['grpc'] = DataTransferServiceGrpcTransport
_transport_registry['grpc_asyncio'] = DataTransferServiceGrpcAsyncIOTransport
__all__ = (
'DataTransferServiceTransport',
'DataTransferServiceGrpcTransport',
'DataTransferServiceGrpcAsyncIOTransport',
)
| apache-2.0 | -2,402,536,470,973,751,000 | 36.545455 | 90 | 0.785311 | false |
martinjrobins/hobo | pints/tests/test_nested_ellipsoid_sampler.py | 1 | 7162 | #!/usr/bin/env python
#
# Tests ellipsoidal nested sampler.
#
# This file is part of PINTS.
# Copyright (c) 2017-2019, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import unittest
import numpy as np
import pints
import pints.toy
# Unit testing in Python 2 and 3
try:
unittest.TestCase.assertRaisesRegex
except AttributeError:
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
class TestNestedEllipsoidSampler(unittest.TestCase):
"""
Unit (not functional!) tests for :class:`NestedEllipsoidSampler`.
"""
@classmethod
def setUpClass(cls):
""" Prepare for the test. """
# Create toy model
model = pints.toy.LogisticModel()
cls.real_parameters = [0.015, 500]
times = np.linspace(0, 1000, 1000)
values = model.simulate(cls.real_parameters, times)
# Add noise
np.random.seed(1)
cls.noise = 10
values += np.random.normal(0, cls.noise, values.shape)
cls.real_parameters.append(cls.noise)
# Create an object with links to the model and time series
problem = pints.SingleOutputProblem(model, times, values)
# Create a uniform prior over both the parameters and the new noise
# variable
cls.log_prior = pints.UniformLogPrior(
[0.01, 400],
[0.02, 600]
)
# Create a log-likelihood
cls.log_likelihood = pints.GaussianKnownSigmaLogLikelihood(
problem, cls.noise)
def test_construction_errors(self):
# Tests if invalid constructor calls are picked up.
# First arg must be a log likelihood
self.assertRaisesRegex(
ValueError, 'must extend pints.LogPrior',
pints.NestedEllipsoidSampler, self.log_likelihood)
def test_hyper_params(self):
# Tests the hyper parameter interface is working.
sampler = pints.NestedEllipsoidSampler(self.log_prior)
self.assertEqual(sampler.n_hyper_parameters(), 6)
sampler.set_hyper_parameters([220, 130, 2.0, 133, 1, 0.8])
self.assertEqual(sampler.n_active_points(), 220)
self.assertEqual(sampler.n_rejection_samples(), 130)
self.assertEqual(sampler.enlargement_factor(), 2.0)
self.assertEqual(sampler.ellipsoid_update_gap(), 133)
self.assertTrue(sampler.dynamic_enlargement_factor())
self.assertTrue(sampler.alpha(), 0.8)
def test_getters_and_setters(self):
# Tests various get() and set() methods.
sampler = pints.NestedEllipsoidSampler(self.log_prior)
# Active points
x = sampler.n_active_points() + 1
self.assertNotEqual(sampler.n_active_points(), x)
sampler.set_n_active_points(x)
self.assertEqual(sampler.n_active_points(), x)
self.assertRaisesRegex(
ValueError, 'greater than 5', sampler.set_n_active_points, 5)
# Rejection samples
x = sampler.n_rejection_samples() + 1
self.assertNotEqual(sampler.n_rejection_samples(), x)
sampler.set_n_rejection_samples(x)
self.assertEqual(sampler.n_rejection_samples(), x)
self.assertRaisesRegex(
ValueError, 'negative', sampler.set_n_rejection_samples, -1)
# Enlargement factor
x = sampler.enlargement_factor() * 2
self.assertNotEqual(sampler.enlargement_factor(), x)
sampler.set_enlargement_factor(x)
self.assertEqual(sampler.enlargement_factor(), x)
self.assertRaisesRegex(
ValueError, 'exceed 1', sampler.set_enlargement_factor, 0.5)
self.assertRaisesRegex(
ValueError, 'exceed 1', sampler.set_enlargement_factor, 1)
# Ellipsoid update gap
x = sampler.ellipsoid_update_gap() * 2
self.assertNotEqual(sampler.ellipsoid_update_gap(), x)
sampler.set_ellipsoid_update_gap(x)
self.assertEqual(sampler.ellipsoid_update_gap(), x)
self.assertRaisesRegex(
ValueError, 'exceed 1', sampler.set_ellipsoid_update_gap, 0.5)
self.assertRaisesRegex(
ValueError, 'exceed 1', sampler.set_ellipsoid_update_gap, 1)
# dynamic enlargement factor
self.assertTrue(not sampler.dynamic_enlargement_factor())
sampler.set_dynamic_enlargement_factor(1)
self.assertTrue(sampler.dynamic_enlargement_factor())
# alpha
self.assertRaises(ValueError, sampler.set_alpha, -0.2)
self.assertRaises(ValueError, sampler.set_alpha, 1.2)
self.assertEqual(sampler.alpha(), 0.2)
sampler.set_alpha(0.4)
self.assertEqual(sampler.alpha(), 0.4)
# initial phase
self.assertTrue(sampler.needs_initial_phase())
self.assertTrue(sampler.in_initial_phase())
sampler.set_initial_phase(False)
self.assertTrue(not sampler.in_initial_phase())
self.assertEqual(sampler.name(), 'Nested ellipsoidal sampler')
def test_ask_tell(self):
# Tests ask and tell
# test that ellipses are estimated
sampler = pints.NestedEllipsoidSampler(self.log_prior)
A1 = np.copy(sampler._A)
c1 = sampler._centroid
sampler.set_n_rejection_samples(100)
sampler.set_ellipsoid_update_gap(10)
for i in range(5000):
pt = sampler.ask(1)
fx = self.log_likelihood(pt)
sampler.tell(fx)
A2 = sampler._A
c2 = sampler._centroid
self.assertTrue(not np.array_equal(A1, A2))
self.assertTrue(not np.array_equal(c1, c2))
# test multiple points being asked and tell'd
sampler = pints.NestedEllipsoidSampler(self.log_prior)
pts = sampler.ask(50)
self.assertEqual(len(pts), 50)
fx = [self.log_likelihood(pt) for pt in pts]
proposed = sampler.tell(fx)
self.assertTrue(len(proposed) > 1)
# test multiple ask points after rejection samples
sampler = pints.NestedEllipsoidSampler(self.log_prior)
sampler.set_n_rejection_samples(10)
for i in range(100):
self.assertEqual(len(sampler.ask(20)), 20)
def test_dynamic_enlargement_factor(self):
# tests dynamic enlargement factor runs
sampler = pints.NestedController(self.log_likelihood,
self.log_prior)
sampler._sampler.set_dynamic_enlargement_factor(1)
sampler.set_log_to_screen(False)
ef1 = sampler._sampler.enlargement_factor()
sampler.run()
ef2 = sampler._sampler.enlargement_factor()
self.assertTrue(ef2 < ef1)
def test_sensitivities(self):
# tests whether sensitivities bit runs
sampler = pints.NestedController(self.log_likelihood,
self.log_prior)
# hacky but currently no samplers need sensitivities
sampler._needs_sensitivities = True
sampler._initialise_callable()
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
unittest.main()
| bsd-3-clause | -975,578,752,272,847,100 | 36.108808 | 78 | 0.638928 | false |
lpramuk/robottelo | tests/foreman/cli/test_role.py | 1 | 45035 | # -*- encoding: utf-8 -*-
"""Test for Roles CLI
:Requirement: Role
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UsersRoles
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from math import ceil
from random import choice
from fauxfactory import gen_string
from robottelo.cli.base import CLIDataBaseError
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.factory import make_filter
from robottelo.cli.factory import make_location
from robottelo.cli.factory import make_org
from robottelo.cli.factory import make_role
from robottelo.cli.factory import make_user
from robottelo.cli.filter import Filter
from robottelo.cli.role import Role
from robottelo.cli.settings import Settings
from robottelo.cli.user import User
from robottelo.constants import PERMISSIONS
from robottelo.constants import ROLES
from robottelo.datafactory import generate_strings_list
from robottelo.decorators import stubbed
from robottelo.decorators import tier1
from robottelo.decorators import tier2
from robottelo.decorators import tier3
from robottelo.decorators import upgrade
from robottelo.test import CLITestCase
class RoleTestCase(CLITestCase):
"""Test class for Roles CLI"""
@tier1
def test_positive_create_with_name(self):
"""Create new roles with provided name
:id: 6883177c-6926-428c-92ab-9effbe1372ae
:expectedresults: Role is created and has correct name
:BZ: 1138553
:CaseImportance: Critical
"""
for name in generate_strings_list(length=10):
with self.subTest(name):
role = make_role({'name': name})
self.assertEqual(role['name'], name)
@tier1
def test_positive_create_with_filter(self):
"""Create new role with a filter
:id: 6c99ee25-4e58-496c-af42-f8ad2da6cf07
:expectedresults: Role is created and correct filter is assigned
:CaseImportance: Critical
"""
role = make_role()
# Pick permissions by its resource type
permissions = [
permission['name']
for permission in Filter.available_permissions({'resource-type': 'Organization'})
]
# Assign filter to created role
filter_ = make_filter({'role-id': role['id'], 'permissions': permissions})
self.assertEqual(role['name'], filter_['role'])
@tier1
@upgrade
def test_positive_create_with_permission(self):
"""Create new role with a set of permission
:id: 7cb2b2e2-ad4d-41e9-b6b2-c0366eb09b9a
:expectedresults: Role is created and has correct set of permissions
:CaseImportance: Critical
"""
role = make_role()
# Pick permissions by its resource type
permissions = [
permission['name']
for permission in Filter.available_permissions({'resource-type': 'Organization'})
]
# Assign filter to created role
make_filter({'role-id': role['id'], 'permissions': permissions})
self.assertEqual(set(Role.filters({'id': role['id']})[0]['permissions']), set(permissions))
@tier1
def test_positive_delete_by_id(self):
"""Create a new role and then delete role by its ID
:id: 351780b4-697c-4f87-b989-dd9a9a2ad012
:expectedresults: Role is created and then deleted by its ID
:CaseImportance: Critical
"""
for name in generate_strings_list(length=10):
with self.subTest(name):
role = make_role({'name': name})
self.assertEqual(role['name'], name)
Role.delete({'id': role['id']})
with self.assertRaises(CLIReturnCodeError):
Role.info({'id': role['id']})
@tier1
def test_positive_update_name(self):
"""Create new role and update its name
:id: 3ce1b337-fd52-4460-b8a8-df49c94ffed1
:expectedresults: Role is created and its name is updated
:CaseImportance: Critical
"""
role = make_role({'name': gen_string('alpha', 15)})
for new_name in generate_strings_list(length=10):
with self.subTest(new_name):
Role.update({'id': role['id'], 'new-name': new_name})
role = Role.info({'id': role['id']})
self.assertEqual(role['name'], new_name)
@tier1
def test_positive_list_filters_by_id(self):
"""Create new role with a filter and list it by role id
:id: 6979ad8d-629b-481e-9d3a-8f3b3bca53f9
:expectedresults: Filter is listed for specified role
:CaseImportance: Critical
"""
role = make_role()
# Pick permissions by its resource type
permissions = [
permission['name']
for permission in Filter.available_permissions({'resource-type': 'Organization'})
]
# Assign filter to created role
filter_ = make_filter({'role-id': role['id'], 'permissions': permissions})
self.assertEqual(role['name'], filter_['role'])
self.assertEqual(Role.filters({'id': role['id']})[0]['id'], filter_['id'])
@tier1
def test_positive_list_filters_by_name(self):
"""Create new role with a filter and list it by role name
:id: bbcb3982-f484-4dde-a3ea-7145fd28ab1f
:expectedresults: Filter is listed for specified role
:CaseImportance: Critical
"""
role = make_role()
# Pick permissions by its resource type
permissions = [
permission['name']
for permission in Filter.available_permissions({'resource-type': 'Organization'})
]
# Assign filter to created role
filter_ = make_filter({'role': role['name'], 'permissions': permissions})
self.assertEqual(role['name'], filter_['role'])
self.assertEqual(Role.filters({'name': role['name']})[0]['id'], filter_['id'])
@tier1
def test_negative_list_filters_without_parameters(self):
"""Try to list filter without specifying role id or name
:id: 56cafbe0-d1cb-413e-8eac-0e01a3590fd2
:expectedresults: Proper error message is shown instead of SQL error
:CaseImportance: Critical
:BZ: 1296782
"""
with self.assertRaises(CLIReturnCodeError) as err:
with self.assertNotRaises(CLIDataBaseError):
Role.filters()
self.assertRegex(err.exception.msg, 'At least one of options .* is required')
@tier1
@upgrade
def test_positive_list_filters_with_pagination(self):
"""Make sure filters list can be displayed with different items per
page value
:id: b9c7c6c1-70c2-4d7f-8d36-fa8613acc865
:BZ: 1428516
:expectedresults: `per-page` correctly sets amount of items displayed
per page, different `per-page` values divide a list into correct
number of pages
:CaseImportance: Critical
"""
role = make_role()
res_types = iter(PERMISSIONS.keys())
permissions = []
# Collect more than 20 different permissions
while len(permissions) <= 20:
permissions += [
permission['name']
for permission in Filter.available_permissions({'resource-type': next(res_types)})
]
# Create a filter for each permission
for perm in permissions:
make_filter({'role': role['name'], 'permissions': perm})
# Test different `per-page` values
for per_page in (1, 5, 20):
with self.subTest(per_page):
# Verify the first page contains exactly the same items count
# as `per-page` value
filters = Role.filters({'name': role['name'], 'per-page': per_page})
self.assertEqual(len(filters), per_page)
# Verify pagination and total amount of pages by checking the
# items count on the last page
last_page = ceil(len(permissions) / per_page)
filters = Role.filters(
{'name': role['name'], 'page': last_page, 'per-page': per_page}
)
self.assertEqual(len(filters), len(permissions) % per_page or per_page)
@tier1
@upgrade
def test_positive_delete_cloned_builtin(self):
"""Clone a builtin role and attempt to delete it
:id: 1fd9c636-596a-4cb2-b100-de19238042cc
:BZ: 1426672
:expectedresults: role was successfully deleted
:CaseImportance: Critical
"""
role_list = Role.list({'search': 'name=\\"{}\\"'.format(choice(ROLES))})
self.assertEqual(len(role_list), 1)
cloned_role = Role.clone(
{'id': role_list[0]['id'], 'new-name': gen_string('alphanumeric')}
)
Role.delete({'id': cloned_role['id']})
with self.assertRaises(CLIReturnCodeError):
Role.info({'id': cloned_role['id']})
class CannedRoleTestCases(CLITestCase):
"""Implements Canned Roles tests from UI
:CaseAutomation: notautomated
"""
@stubbed()
@tier1
@upgrade
def test_positive_create_role_with_taxonomies(self):
"""create role with taxonomies
:id: 4ce9fd35-4d3d-47f7-8bc6-7cf0b3b2d2f5
:steps: Create new role with taxonomies
:expectedresults: New role is created with taxonomies
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_positive_create_role_without_taxonomies(self):
"""Create role without taxonomies
:id: 4dc80114-9629-487f-805c-c14241bdcde1
:steps: Create new role without any taxonomies
:expectedresults: New role is created without taxonomies
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_positive_create_filter_without_override(self):
"""Create filter in role w/o overriding it
:id: 247ab670-29e6-4c14-9140-51966f4632f4
:steps:
1. Create a role with taxonomies assigned
2. Create filter in role without overriding it
:expectedresults:
1. Filter w/o override is created in role
2. The taxonomies of role are inherited to filter
3. Override check is not marked by default in filters table
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_positive_create_non_overridable_filter(self):
"""Create non overridable filter in role
:id: c53713a3-d4b6-47a1-b19e-8d2020f98efd
:steps:
1. Create a filter to which taxonomies cannot be associated.
e.g Architecture filter
:expectedresults:
1. Filter is created without taxonomies
2. Filter doesnt inherit taxonomies from role
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_negative_override_non_overridable_filter(self):
"""Override non overridable filter
:id: 163313eb-4401-4bb0-bf9a-58030251643b
:steps: Attempt to override a filter to which taxonomies cannot be
associated. e.g Architecture filter
:expectedresults: Filter is not overrided as taxonomies cannot be
applied to that filter
:CaseImportance: Critical
"""
@stubbed()
@tier1
@upgrade
def test_positive_create_overridable_filter(self):
"""Create overridable filter in role
:id: 47816636-d215-45a8-9d21-495b1e193913
:steps:
1. Create a filter to which taxonomies can be associated.
e.g Domain filter
2. Override a filter with some taxonomies
:expectedresults:
1. Filter is created with taxonomies
2. Override check is set to true
3. Filter doesnt inherits taxonomies from role
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_positive_update_role_taxonomies(self):
"""Update role taxonomies which applies to its non-overrided filters
:id: 988cf8c6-8f6e-49de-be54-d17085f260b6
:steps:
1. Create role with organization A and Location A
2. Add filter in above role without overriding
3. Update role set Organization to B and Location to B
4. List roles overridden filter taxonomies
:expectedresults: The taxonomies of filter should be updated with
role taxonomies
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_negative_update_role_taxonomies(self):
"""Update of role taxonomies doesnt applies on its overridden filters
:id: 43ae1561-4362-47e4-b964-c5e2be791927
:steps:
1. Create role with organization A and Location A
2. Add overridden filter in above role with Organization A and
Location A
3. Update role set Organization to B and Location to B
4. List roles overridden filter taxonomies
:expectedresults: The taxonomies of overridden filter should not be
updated with role taxonomies
"""
@stubbed()
@tier2
def test_positive_override_flag(self):
"""Overridden role filters flag
:id: 08925cb0-856e-48a6-ba88-eda21c8d3619
:steps:
1. Create role with an overridden filter
2. List above role filters
:expectedresults: The override flag should be displayed for
overridden filter in role filters table
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_disable_filter_override(self):
"""Unsetting override flag resets filter taxonomies
:id: 985d87c1-3db7-40d1-9719-a7e7a85dce4d
:steps:
1. Create role organization A and Location A
2. Create an overridden filter in role with organization B
and Location B
3. Unset filter override flag in role
4. List above role filters
:expectedresults: On unsetting filter override, the override flag
should be set to false in role filters table
:CaseLevel: Integration
"""
@stubbed()
@tier1
def test_positive_create_org_admin_from_clone(self):
"""Create Org Admin role which has access to most of the resources
within organization
:id: a173f00b-60eb-4cc2-9a10-1ab3a18563a0
:steps:
1. create Org Admin role by cloning 'Organization admin' role which
has most resources permissions
:expectedresults: Org Admin role should be created successfully
"""
@stubbed()
@tier1
def test_positive_create_cloned_role_with_taxonomies(self):
"""Taxonomies can be assigned to cloned role
:id: 56d29da5-27e0-4855-974c-e4fa50a1631b
:steps:
1. Create Org Admin role by cloning 'Organization admin' role
2. Set new taxonomies (locations and organizations) to cloned role
:expectedresults:
1. While cloning, role allows to set taxonomies
2. New taxonomies should be applied to cloned role successfully
"""
@stubbed()
@tier3
@upgrade
def test_positive_access_entities_from_org_admin(self):
"""User can access resources within its taxonomies if assigned role
has permission for same taxonomies
:id: 555a4942-a4bb-499f-95a2-88e686518073
:steps:
1. Create Org Admin role and assign taxonomies to it
2. Create user with same taxonomies as role above
3. Assign cloned role to user above
4. Attempt to access resources with user
:expectedresults: User should be able to access all the resources and
permissions in taxonomies selected in Org Admin role
:CaseLevel: System
"""
@stubbed()
@tier3
def test_negative_access_entities_from_org_admin(self):
"""User can not access resources in taxonomies assigned to role if
its own taxonomies are not same as its role
:id: 2c0b6e8e-c8b7-4212-af79-d329bd803558
:steps:
1. Create Org Admin and assign taxonomies to it
2. Create user with different taxonomies than above Org Admin
taxonomies
3. Assign above cloned role to user
4. Attempt to access resources with user
:expectedresults: User should not be able to access any resources and
permissions in taxonomies selected in Org Admin role
:CaseLevel: System
"""
@stubbed()
@tier3
def test_negative_access_entities_from_user(self):
"""User can not access resources within its own taxonomies if assigned
role does not have permissions for user taxonomies
:id: 512d2758-2ca0-49c2-b80e-f8a7bffd35b4
:steps:
1. Create Org Admin and assign taxonomies to it
2. Create user with different taxonomies than above Org Admin
taxonomies
3. Assign above cloned role to user.
4. Attempt to access resources with user
:expectedresults: User should not be able to access any resources and
permissions in his own taxonomies
:CaseLevel: System
"""
@stubbed()
@tier2
def test_positive_override_cloned_role_filter(self):
"""Cloned role filter overrides
:id: 0711541f-1af6-4493-b1f2-552367541d99
:steps:
1. Create a role with overridden filter
2. Clone above role
3. Attempt to override the filter in cloned role
:expectedresults: Filter in cloned role should be overridden
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_emptiness_of_filter_taxonomies_on_role_clone(self):
"""Taxonomies of filters in cloned role are set to None for filters that
are overridden in parent role
:id: 20179b43-9db7-4af4-beca-fecc7ff7490c
:steps:
1. Create a role with an overridden filter
2. Overridden filter should have taxonomies assigned
3. Clone above role
4. View cloned role filters
:expectedresults:
1. Taxonomies of the 'parent role overridden filters' are set to
None in cloned role
2. Override flag is set to True in filters table
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_override_empty_filter_taxonomies_in_cloned_role(self):
"""Taxonomies of filters in cloned role can be overridden for filters that
are overridden in parent role
:id: 7a12aba4-565e-4a17-8952-132158d1e0aa
:steps:
1. Create a role with an overridden filter
2. Overridden filter should have taxonomies assigned
3. In cloned role, override 'parent role overridden filters' by
assigning some taxonomies to it
:expectedresults: In cloned role, The taxonomies should be able to
assign to 'parent role overridden filters'
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_clone_role_having_overridden_filter_with_taxonomies(self): # noqa
"""When taxonomies assigned to cloned role, Unlimited and Override flag
sets on filter that is overridden in parent role
:id: 905f40ba-f6e7-45d3-a213-8deec9968374
:steps:
1. Create a role with organization A and Location A
2. Create overridden role filter in organization B
and Location B
3. Clone above role and assign Organization A and Location A
while cloning
4. List cloned role filter
:expectedresults: Unlimited and Override flags should be set to True on
filter that is overridden in parent role
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_clone_role_with_taxonomies_having_non_overridden_filter(self): # noqa
"""When taxonomies assigned to cloned role, Neither unlimited nor
override sets on filter that is not overridden in parent role
:id: 6985358c-c666-4cf5-b6c8-9030de8cf27c
:steps:
1. Create a role with organization A and Location A
2. Create role filter without overriding
3. Clone above role and assign Organization A and Location A
while cloning
4. List cloned role filter
:expectedresults: Both unlimited and override flag should be set to
False on filter that is not overridden in parent role
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_clone_role_having_unlimited_filter_with_taxonomies(self):
"""When taxonomies assigned to cloned role, Neither unlimited nor
override sets on filter that is unlimited in parent role
:id: 0774fca4-fa00-4067-8ac6-a77615b5651a
:steps:
1. Create a role with organization A and Location A
2. Create role filter with unlimited check
3. Clone above role and assign Organization A and Location A
while cloning
4. List cloned role filter
:expectedresults: Both unlimited and override flags should be set to
False on filter that is unlimited in parent role
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_clone_role_having_overridden_filter_without_taxonomies(self): # noqa
"""When taxonomies not assigned to cloned role, Unlimited and override
flags sets on filter that is overridden in parent role
:id: c792fc37-503d-4a85-8bd6-a5506e70dd3e
:steps:
1. Create a role with organization A and Location A
2. Create overridden role filter in organization B
and Location B
3. Clone above role without assigning taxonomies
4. List cloned role filter
:expectedresults: Both unlimited and Override flags should be set to
True on filter that is overridden in parent role
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_clone_role_without_taxonomies_non_overided_filter(self):
"""When taxonomies not assigned to cloned role, only unlimited but not
override flag sets on filter that is overridden in parent role
:id: 92264a5f-7cd8-4a91-8089-f2f546f556b3
:steps:
1. Create a role with organization A and Location A
2. Create role filter without overriding
3. Clone above role without assigning taxonomies
4. List cloned role filter
:expectedresults:
1. Unlimited flag should be set to True
2. Override flag should be set to False
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_clone_role_without_taxonomies_unlimited_filter(self):
"""When taxonomies not assigned to cloned role, Unlimited and override
flags sets on filter that is unlimited in parent role
:id: 2f205923-f590-4797-b63b-adf389f802e6
:steps:
1. Create a role with organization A and Location A
2. Create role filter with unlimited check
3. Clone above role without assigning taxonomies
4. List cloned role filter
:expectedresults:
1. Unlimited flag should be set to True
2. Override flag should be set to False
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_force_unlimited(self):
"""Unlimited flag forced sets to filter when no taxonomies are set to role
and filter
:id: 2de03e36-7f8d-4b17-819a-0d3b4468e32c
:steps:
1. Create a role with organization A and Location A
2. Create a role filter without override and unlimited check
3. Remove taxonomies assigned earlier to role and ensure
no taxonomies are assigned to role
4. List Role filter
:expectedresults: Unlimited flag should be forcefully set on filter
when no taxonomies are set to role and filter
:CaseLevel: Integration
"""
@stubbed()
@tier3
@upgrade
def test_positive_user_group_users_access_as_org_admin(self):
"""Users in usergroup can have access to the resources in taxonomies if
the taxonomies of Org Admin role is same
:id: 630fcd05-5c27-44a7-9bea-fcef1143b252
:steps:
1. Create an Org Admin role by cloning 'Organization admin' role
2. Assign an organization A and Location A to the Org Admin role
3. Create two users without assigning roles while creating them
4. Assign Organization A and Location A to both users
5. Create an user group with above two users
:expectedresults: Both users should have access to the resources of
organization A and Location A
:CaseLevel: System
"""
@stubbed()
@tier3
def test_positive_user_group_users_access_contradict_as_org_admins(self):
"""Users in usergroup can/cannot have access to the resources in
taxonomies depends on the taxonomies of Org Admin role is same/not_same
:id: 55099979-de11-4730-83ce-e190a3b8ecaa
:steps:
1. Create an Org Admin role by cloning 'Organization admin' role
2. Assign an organization A and Location A to the Org Admin role
3. Create an user without assigning roles while creating them and
assign Organization B and Location B
4. Create another user without assigning roles while creating them
and assign Organization A and Location A
5. Create an user group add above two users to the user group
:expectedresults:
1. User assigned to Organization B and Location B shouldn't have
access to the resources of organization A,B and Location A,B
2. User assigned to Organization A and Location A should have
access to the resources of organization A and Location A
:CaseLevel: System
"""
@stubbed()
@tier3
def test_positive_assign_org_admin_to_user_group(self):
"""Users in usergroup can access to the resources in taxonomies if
the taxonomies of Org Admin role are same
:id: 07fa1bb4-1cce-4afa-a4f3-669704450947
:steps:
1. Create an Org Admin role by cloning 'Organization admin' role
2. Assign an organization A and Location A to the Org Admin role
3. Create two users without assigning roles while creating them
4. Assign Organization A and Location A to both users
5. Create an user group add above two users to the user group
:expectedresults: Both the user should have access to the resources of
organization A and Location A
:CaseLevel: System
"""
@stubbed()
@tier3
def test_negative_assign_org_admin_to_user_group(self):
"""Users in usergroup can not have access to the resources in
taxonomies if the taxonomies of Org Admin role is not same
:id: 81c076ba-d61c-4d03-96be-6db8458a2470
:steps:
1. Create an Org Admin role by cloning 'Organization admin' role
2. Assign an organization A and Location A to the Org Admin role
3. Create two users without assigning roles while creating them
4. Assign Organization B and Location B to both users
5. Create an user group add above two users to the user group
:expectedresults: Both the user shouldn't have access to the resources
of organization A,B and Location A,B
:CaseLevel: System
"""
@stubbed()
@tier2
def test_negative_assign_taxonomies_by_org_admin(self):
"""Org Admin doesn't have permissions to assign org/loc to any of
its entities
:id: da44d206-e5d9-4353-bc8c-dda99299fae4
:steps:
1. Create Org Admin role by cloning 'Organization admin' role
2. Assign an organization A,B and Location A,B to the Org Admin
role
3. Create user and assign above Org Admin role
4. Assign Organization A,B and Location A,B to the user
5. Attempt to assign organization(s) and location(s) to any
resource from new user
:expectedresults: Org Admin should not be able to assign the taxonomies
to any of its resources
:CaseLevel: Integration
"""
@stubbed()
@tier1
@upgrade
def test_positive_remove_org_admin_role(self):
"""Super Admin user can remove Org Admin role
:id: 57ba763d-66b4-4f40-8e53-064141277960
:steps:
1. Create Org Admin by cloning 'Organization admin' role
2. Assign any taxonomies to it
3. Create an user and assign above role to user
4. Delete the Org Admin role
:expectedresults: Super Admin should be able to remove Org Admin role
"""
@stubbed()
@tier2
def test_positive_taxonomies_control_to_superadmin_with_org_admin(self):
"""Super Admin can access entities in taxonomies assigned to Org Admin
:id: bc5a3ad2-1f1f-4cda-a1ba-88b0f2e452c8
:steps:
1. Create Org Admin role and assign organization A and Location A
2. Create User and assign above Org Admin role
3. Attempt to access entities in Organization A and Location A from
superadmin user who created org admin
:expectedresults: Super admin should be able to access the entities in
taxonomies assigned to Org Admin
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_taxonomies_control_to_superAdmin_without_org_admin(self):
"""Super Admin can access entities in taxonomies assigned to Org Admin
after deleting Org Admin role/user
:id: 2ed27587-d25a-4cd6-9baa-74de9e035bf5
:steps:
1. Create Org Admin role and assign organization A and Location A
2. Create User and assign above Org Admin role
3. Delete Org Admin role also the User created above
4. Login with SuperAdmin who created the above Org Admin role and
access entities in Organization A and Location A
:expectedresults: Super admin should be able to access the entities in
taxonomies assigned to Org Admin after deleting Org Admin
:CaseLevel: Integration
"""
@stubbed()
@tier1
def test_negative_create_roles_by_org_admin(self):
"""Org Admin has no permissions to create new roles
:id: 13fb38b6-2e38-4031-a57c-8ce75b333960
:steps:
1. Create Org Admin role and assign any taxonomies to it
2. Create user and assign above org Admin role to it
3. Attempt to create a new role using Org Admin user
:expectedresults: Org Admin should not have permissions to create
new role
"""
@stubbed()
@tier1
def test_negative_modify_roles_by_org_admin(self):
"""Org Admin has no permissions to modify existing roles
:id: fa4a1b65-52b3-4920-9784-748dea8f51a0
:steps:
1. Create Org Admin role and assign any taxonomies to it
2. Create user and assign above Org Admin role to it
3. Attempt to update any existing role using Org Admin user
:expectedresults: Org Admin should not have permissions to update
existing roles
"""
@stubbed()
@tier2
def test_negative_admin_permissions_to_org_admin(self):
"""Org Admin has no access to Super Admin user
:id: 6903ed39-6e53-406e-abd9-634c7a749f1e
:steps:
1. Create Org Admin role and assign any taxonomies to it
2. Create user and assign above Org Admin role to it
3. Login with above Org Admin user
4. Attempt to get super admin info command details
:expectedresults: Org Admin should not have access of Admin user
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_create_user_by_org_admin(self):
"""Org Admin can create new users
:id: 02f283ac-7d89-4622-be8e-640c775500c4
:steps:
1. Create Org Admin role and assign any taxonomies to it
2. Create user and assign above Org Admin role to it
3. Login with above Org Admin user
4. Attempt to create new users
:expectedresults:
1. Org Admin should be able to create new users
2. Only Org Admin role should be available to assign to its users
3. Org Admin should be able to assign Org Admin role to its users
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_access_users_inside_org_admin_taxonomies(self):
"""Org Admin can access users inside its taxonomies
:id: e9efce12-a017-4100-8262-c9db666fd890
:steps:
1. Create Org Admin role and assign Org A and Location A
2. Create new user A and assign Org A and Location A
3. Assign Org Admin role to User A
4. Create another user B and assign Org A and Location A
5. Assign any role to user B that does have access to Org A and
Location A
6. Login with Org Admin user A and attempt to view user B
:expectedresults: Org Admin should be able to access users inside
its taxonomies
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_negative_access_users_outside_org_admin_taxonomies(self):
"""Org Admin can not access users outside its taxonomies
:id: 81081c75-031d-4aca-acd6-25868a492a84
:steps:
1. Create Org Admin role and assign Org A and Location A
2. Create new user A and assign Org A and Location A
3. Assign Org Admin role to User A
4. Create another user B and assign Org B and Location B
5. Assign any role to user B that doesnt have access to Org A and
Location A
6. Attempt to view user B using Org Admin user A
:expectedresults: Org Admin should not be able to access users outside
its taxonomies
:CaseLevel: Integration
"""
@stubbed()
@tier1
def test_negative_create_taxonomies_by_org_admin(self):
"""Org Admin cannot define/create organizations and locations
:id: 115c46ea-f2fc-4be0-bbdb-26faf9246809
:steps:
1. Create Org Admin role and assign any taxonomies to it
2. Create user and assign above Org Admin role to it
3. Attempt to create Organizations and Locations using Org Admin
user
:expectedresults: Org Admin should not have access to create taxonomies
"""
@stubbed()
@tier1
def test_negative_access_all_global_entities_by_org_admin(self):
"""Org Admin can access all global entities in any taxonomies
regardless of its own assigned taxonomies
:id: 6ebccf86-1766-432a-ad7c-4f2f606e1604
:steps:
1. Create Org Admin role and assign Org A and Location A
2. Create new user and assign Org A,B and Location A,B
3. Assign Org Admin role to User
4. Attempt to create all the global entities in org B and Loc B
using Org Admin user. e.g Architectures, Operating System
:expectedresults: Org Admin should have access to all the global
entities in any taxonomies
"""
@stubbed()
@tier3
@upgrade
def test_positive_access_entities_from_ldap_org_admin(self):
"""LDAP User can access resources within its taxonomies if assigned
role has permission for same taxonomies
:id: 086c7e50-4db9-4422-a960-d9702976e4e6
:steps:
1. Create Org Admin and assign taxonomies to it
2. Create LDAP user with same taxonomies as role above
3. Assign Org Admin role to user above
4. Attempt to access resources from above LDAP user
:expectedresults: LDAP User should be able to access all the resources
and permissions in taxonomies selected in Org Admin role
:CaseLevel: System
"""
@stubbed()
@tier3
def test_negative_access_entities_from_ldap_org_admin(self):
"""LDAP User can not access resources in taxonomies assigned to role if
its own taxonomies are not same as its role
:id: 17a78a6d-d443-4700-8fd5-6a9336e96f91
:steps:
1. Create Org Admin and assign taxonomies to it
2. Create LDAP user with different taxonomies than above Org Admin
taxonomies
3. Assign above cloned role to LDAP user
4. Login with LDAP user and attempt to access resources
:expectedresults: LDAP User should not be able to access resources and
permissions in taxonomies selected in Org Admin role
:CaseLevel: System
"""
@stubbed()
@tier3
def test_negative_access_entities_from_ldap_user(self):
"""LDAP User can not access resources within its own taxonomies if
assigned role does not have permissions for same taxonomies
:id: e44614ab-7af3-40a1-a3a2-8d47041e0daa
:steps:
1. Create Org Admin and assign taxonomies to it
2. Create LDAP user with different taxonomies than above Org Admin
taxonomies
3. Assign above cloned role to LDAP user
4. Login with LDAP user and attempt to access resources
:expectedresults: LDAP User should not be able to access any resources
and permissions in its own taxonomies
:CaseLevel: System
"""
@stubbed()
@tier3
@upgrade
def test_positive_assign_org_admin_to_ldap_user_group(self):
"""Users in LDAP usergroup can access to the resources in taxonomies if
the taxonomies of Org Admin role are same
:id: 552737df-24ef-4eb9-8054-e261c3dbf2b3
:steps:
1. Create an Org Admin role by cloning 'Organization admin' role
2. Assign an organization A and Location A to the Org Admin role
3. Create an LDAP usergroup with two users
4. Assign Organization A and Location A to LDAP usergroup
5. Assign Org Admin role to LDAP usergroup
:expectedresults: Users in LDAP usergroup should have access to the
resources in taxonomies if the taxonomies of Org Admin role are
same
:CaseLevel: System
"""
@stubbed()
@tier3
def test_negative_assign_org_admin_to_ldap_user_group(self):
"""Users in LDAP usergroup can not have access to the resources in
taxonomies if the taxonomies of Org Admin role is not same
:id: c3385e14-f589-4101-b76a-59cd9d518cb8
:steps:
1. Create an Org Admin role by cloning 'Organization admin' role
2. Assign an organization A and Location A to the Org Admin role
3. Create an LDAP usergroup with two users
4. Assign Organization B and Location B to LDAP usergroup
5. Assign Org Admin role to LDAP usergroup
:expectedresults: Users in LDAP usergroup should not have access to the
resources in taxonomies if the taxonomies of Org Admin role is not
same
:CaseLevel: System
"""
class SystemAdminTestCases(CLITestCase):
"""Test class for System Admin role end to end CLI"""
def tearDown(self):
"""Will reset the changed value of settings"""
Settings.set({'name': "outofsync_interval", 'value': "30"})
@upgrade
@tier3
def test_system_admin_role_end_to_end(self):
"""Test System admin role with a end to end workflow
:id: da6b3549-d1cf-44fc-869f-08d15d407fa2
:steps:
1. Create a System admin role user1
2. Login with the user1 and change global settings
"Out of sync interval" to 31
3. Create user2 with system admin role
4. Login with user2 to create a Organization
5. Clone a Org-admin role
6. Edit the Architecture Filter and search name = x86_64
7. Create a User with Cloned Org admin
8. Login with user.
:expectedresults:
1. User should be assigned with System Admin role.
2. User with sys admin role should be able to update settings
3. User with sys admin role should be able to create users and
assign Organizations to them.
4. System Admin role should be able to create Organization admins
5. User with sys admin role should be able to edit filters on roles
:CaseLevel: System
"""
org = make_org()
location = make_location()
common_pass = gen_string('alpha')
role = Role.info({'name': 'System admin'})
system_admin_1 = make_user(
{
'password': common_pass,
'organization-ids': org['id'],
'location-ids': location['id'],
}
)
User.add_role({'id': system_admin_1['id'], 'role-id': role['id']})
Settings.with_user(username=system_admin_1['login'], password=common_pass).set(
{'name': "outofsync_interval", 'value': "32"}
)
sync_time = Settings.list({'search': 'name=outofsync_interval'})[0]
# Asserts if the setting was updated successfully
self.assertEqual('32', sync_time['value'])
# Create another System Admin user using the first one
system_admin = User.with_user(
username=system_admin_1['login'], password=common_pass
).create(
{
'auth-source-id': 1,
'firstname': gen_string('alpha'),
'lastname': gen_string('alpha'),
'login': gen_string('alpha'),
'mail': '{0}@example.com'.format(gen_string('alpha')),
'password': common_pass,
'organizations': org['name'],
'role-ids': role['id'],
'locations': location['name'],
}
)
# Create the Org Admin user
org_role = Role.with_user(username=system_admin['login'], password=common_pass).clone(
{
'name': 'Organization admin',
'new-name': gen_string('alpha'),
'organization-ids': org['id'],
'location-ids': location['id'],
}
)
org_admin = User.with_user(username=system_admin['login'], password=common_pass).create(
{
'auth-source-id': 1,
'firstname': gen_string('alpha'),
'lastname': gen_string('alpha'),
'login': gen_string('alpha'),
'mail': '{0}@example.com'.format(gen_string('alpha')),
'password': common_pass,
'organizations': org['name'],
'role-ids': org_role['id'],
'location-ids': location['id'],
}
)
# Assert if the cloning was successful
self.assertIsNotNone(org_role['id'])
org_role_filters = Role.filters({'id': org_role['id']})
search_filter = None
for arch_filter in org_role_filters:
if arch_filter['resource-type'] == 'Architecture':
search_filter = arch_filter
break
Filter.with_user(username=system_admin['login'], password=common_pass).update(
{'role-id': org_role['id'], 'id': arch_filter['id'], 'search': 'name=x86_64'}
)
# Asserts if the filter is updated
self.assertIn('name=x86_64', Filter.info({'id': search_filter['id']}).values())
org_admin = User.with_user(username=system_admin['login'], password=common_pass).info(
{'id': org_admin['id']}
)
# Asserts Created Org Admin
self.assertIn(org_role['name'], org_admin['roles'])
self.assertIn(org['name'], org_admin['organizations'])
| gpl-3.0 | -738,638,804,037,887,600 | 32.508185 | 99 | 0.620784 | false |
thinkWhere/Roadnet | street_browser/add.py | 1 | 15108 | # -*- coding: utf-8 -*-
import datetime
from PyQt4.QtSql import QSqlQuery, QSqlQueryModel
from PyQt4.QtGui import QMessageBox, QLineEdit, QComboBox
from PyQt4.QtCore import Qt, QDate
from qgis.core import QgsMapLayerRegistry
from ..generic_functions import SwitchStreetBrowserMode, ZoomSelectCanvas, ipdb_breakpoint
from ..roadnet_dialog import SaveRecordDlg
from edit import EditEsuLink, EditStartEndCoords, UpdateEsuSymbology
from mod_validation import ValidateDescription, ValidateStreetType
__author__ = 'matthew.walsh'
class AddRecord:
"""
Add a new street record to the model
"""
def __init__(self, iface, street_browser, model, mapper, db, params):
self.street_browser = street_browser
self.iface = iface
self.model = model
self.mapper = mapper
self.db = db
self.username = params['UserName']
self.modify = SwitchStreetBrowserMode(self.street_browser)
self.save_dlg = SaveRecordDlg()
self.save_dlg.ui.savePushButton.clicked.connect(self.save_new_record)
self.save_dlg.ui.revertPushButton.clicked.connect(self.cancel_new_record)
self.save_dlg.ui.cancelPushButton.clicked.connect(lambda: self.save_dlg.close())
self.esu_layer = QgsMapLayerRegistry.instance().mapLayersByName('ESU Graphic')[0]
self.lineedits = {1: self.street_browser.ui.usrnLineEdit,
8: self.street_browser.ui.startDateDateEdit,
7: self.street_browser.ui.updateDateLineEdit,
2: self.street_browser.ui.versionLineEdit,
6: self.street_browser.ui.entryDateLineEdit,
18: self.street_browser.ui.stateDateLineEdit,
11: self.street_browser.ui.startXLineEdit,
12: self.street_browser.ui.startYLineEdit,
13: self.street_browser.ui.endXLineEdit,
14: self.street_browser.ui.endYLineEdit,
15: self.street_browser.ui.tolLineEdit}
self.combos = {4: self.street_browser.ui.recordTypeComboBox,
20: self.street_browser.ui.localityComboBox,
22: self.street_browser.ui.townComboBox,
21: self.street_browser.ui.countyComboBox,
9: self.street_browser.ui.authorityComboBox,
17: self.street_browser.ui.stateComboBox,
19: self.street_browser.ui.classComboBox}
self.start_idx = None
self.start_desc = None
self.start_tol = None
self.edit_esu = None
self.new_usrn_no = None
self.esu_version = ZoomSelectCanvas(self.iface, self.street_browser, self.db)
def add(self):
"""
Main method to decide whether to setup for adding or complete/commit record
"""
add_text = str(self.street_browser.ui.addPushButton.text())
# Setup blank form
if add_text.lower() == "add":
self.street_browser.ui.editEsuPushButton.clicked.connect(self.create_esu_link)
self.street_browser.ui.editCoordsPushButton.clicked.connect(self.create_start_end_coords)
self.setup_sb_add()
# Completion event
else:
self.save_dlg.setWindowFlags(Qt.Window | Qt.WindowTitleHint | Qt.CustomizeWindowHint)
self.save_dlg.exec_()
def current_desc_tol_idx(self):
"""
Grab the current record index and desc
"""
self.start_idx = self.mapper.currentIndex()
self.start_desc = self.street_browser.ui.descriptionTextEdit.toPlainText()
self.start_tol = self.street_browser.ui.tolLineEdit.text()
def setup_sb_add(self):
"""
Setup the street browser for adding a new record
"""
# Grab current idx's desc, tol
self.current_desc_tol_idx()
n_usrn = self.new_usrn()
self.street_browser.ui.addPushButton.setText("Complete")
self.street_browser.ui.descriptionLabel.setStyleSheet("color : red")
self.modify.edit()
# Clear lineedits
all_lineedits = self.street_browser.findChildren(QLineEdit)
for lineedit in all_lineedits:
lineedit.setText("")
self.clear_xref_and_esu_tables()
self.set_combo_index()
self.set_current_dates()
self.street_browser.ui.tolLineEdit.setStyleSheet("background-color: white")
self.street_browser.ui.tolLineEdit.setReadOnly(False)
self.street_browser.ui.tolLineEdit.setText("10")
self.street_browser.ui.descriptionTextEdit.setText("")
# Set new usrn + version 1
self.street_browser.ui.byLineEdit.setText(self.username)
self.street_browser.ui.usrnLineEdit.setText(str(n_usrn))
self.street_browser.ui.versionLineEdit.setText("1")
# Set the ESU layer to read only
self.esu_layer.setReadOnly(True)
def revert_sb_add(self):
"""
Revert street browser back to read-only mode
"""
self.edit_esu = None
self.modify.read_only()
self.street_browser.ui.tolLineEdit.setReadOnly(True)
self.street_browser.ui.tolLineEdit.setStyleSheet("background-color: rgb(213,234,234)")
self.street_browser.ui.addPushButton.setText("Add")
self.esu_layer.setReadOnly(False)
def clear_xref_and_esu_tables(self):
"""
Blank model clears the xref table
"""
# Set xref to empty model
empty_model = QSqlQueryModel()
self.street_browser.ui.crossReferenceTableView.setModel(empty_model)
# Clear list widget
self.street_browser.ui.linkEsuListWidget.clear()
def set_combo_index(self):
"""
Set the index of the comboboxes
"""
all_combos = self.street_browser.findChildren(QComboBox)
for combo in all_combos:
combo.setCurrentIndex(0)
def set_current_dates(self):
"""
Set date lineedits/date picker to current date
"""
now_date = datetime.datetime.now()
now_formatted = now_date.strftime("%d/%m/%Y")
self.street_browser.ui.updateDateLineEdit.setText(now_formatted)
self.street_browser.ui.entryDateLineEdit.setText(now_formatted)
self.street_browser.ui.stateDateLineEdit.setText(now_formatted)
date_obj = QDate(now_date.year, now_date.month, now_date.day)
self.street_browser.ui.startDateDateEdit.setDate(date_obj)
def cancel_new_record(self):
"""
Revert street browser to read only
"""
self.revert_sb_add()
self.mapper.setCurrentIndex(self.mapper.currentIndex())
self.disconnect_esu_and_coords()
self.save_dlg.close()
def new_usrn(self):
"""
Returns a new usrn (max usrn + 1)
:rtype : int
:return: USRN
"""
query = QSqlQuery("SELECT MAX(usrn) from tblSTREET", self.db)
query.seek(0)
try:
usrn = int(query.value(0)) + 1
except TypeError:
# Throws if there are no USRNs yet. Example for demo db inserted here
# This must be set manually for a new local authority
usrn = 12700001
self.new_usrn_no = usrn
return usrn
def failed_validation_msg(self, mandatory, desc, esu_valid):
# TODO: Attach esu's to error message (see bad_esu = [] in validate_mandatory)
"""
Display appropriate error message for failed validation
:param mandatory: mand check bool
:param desc: desc present bool
:param esu_valid: valid esu links bool
"""
err = "Unable to save record:"
errors = []
if not mandatory:
errors.append("All mandatory fields must be complete")
if not desc:
errors.append("Description already exists within this town/locality")
if not esu_valid:
errors.append("Invalid ESU links")
for error in errors:
err = err + "\n" + str(error)
val_fail_msg_box = QMessageBox(QMessageBox.Warning, " ", err, QMessageBox.Ok, None)
val_fail_msg_box.setWindowFlags(Qt.CustomizeWindowHint | Qt.WindowTitleHint)
val_fail_msg_box.exec_()
def save_new_record(self):
"""
Insert new record if all validation is passed
"""
self._strip_whitespace_from_description()
usrn = self.street_browser.ui.usrnLineEdit.text()
mandatory = self.modify.mandatory_field_check()
if self._record_is_type_3_or_4():
unique_desc = True
else:
unique_desc = ValidateDescription(self.street_browser, self.db).validate()
if self.edit_esu:
final_sel = self.edit_esu.get_final_selection()[0]
esu_valid = ValidateStreetType(self.street_browser, self.db).validate(usrn, final_sel)
else:
esu_valid = True
if mandatory and unique_desc and esu_valid:
self.insert_record()
self.revert_sb_add()
self.disconnect_esu_and_coords()
# Update Esu Graphic symbology attribute for all linked Esu's
self.esu_layer = QgsMapLayerRegistry.instance().mapLayersByName('ESU Graphic')[0]
UpdateEsuSymbology(self.db, self.esu_layer).update(usrn)
else:
self.failed_validation_msg(mandatory, unique_desc, esu_valid)
self.save_dlg.close()
def _strip_whitespace_from_description(self):
"""
Strip whitespace from the text in the description field
"""
description = str(self.street_browser.ui.descriptionTextEdit.toPlainText())
description = description.strip()
self.street_browser.ui.descriptionTextEdit.setPlainText(description)
def _record_is_type_3_or_4(self):
"""
Check the combo box to see if record is Type 3 or 3
:return boolean:
"""
record_type_combo = self.street_browser.ui.recordTypeComboBox
record_type = int(record_type_combo.itemData(record_type_combo.currentIndex()))
if record_type in (3, 4):
return True
else:
return False
def disconnect_esu_and_coords(self):
try:
self.street_browser.ui.editEsuPushButton.clicked.disconnect()
self.street_browser.ui.editCoordsPushButton.clicked.disconnect()
except TypeError:
pass
def insert_record(self):
"""
Insert a record/row into the model + commit
"""
record = self.model.record()
record.setValue(1, str(self.street_browser.ui.usrnLineEdit.text())) # usrn
record.setValue(3, str(0)) # currency_flag 0
record.setValue(5, str(self.street_browser.ui.descriptionTextEdit.toPlainText()))
record.setValue(23, self.username)
# Set values from lineedits
date_cols = [6, 7, 8, 18]
for idx, lineedit in self.lineedits.iteritems():
txt = str(lineedit.text())
if txt:
# re-format dates for db
if idx in date_cols:
txt = self.database_dates(txt)
record.setValue(idx, txt)
# Set values from comboboxes
for idx, combo in self.combos.iteritems():
combo_idx = combo.currentIndex()
# if combo_idx != 0:
record.setValue(idx, str(combo.itemData(combo_idx)))
# Append record after last current record
self.model.insertRecord(-1, record)
# Commit to db + insert any esu links
self.model.submitAll()
self.commit_esu_link()
self.repopulate_model()
def repopulate_model(self):
"""
Repopulate the model to show the new model
"""
while self.model.canFetchMore():
self.model.fetchMore()
# jump to new record (appended to end)
self.mapper.toLast()
def database_dates(self, date):
"""
Format dates from lineedits for database (yyyymmdd)
:param date: Date string
:return: formattted date string
"""
date_obj = datetime.datetime.strptime(date, "%d/%m/%Y")
db_date = str(date_obj.strftime("%Y%m%d"))
return db_date
def create_esu_link(self):
"""
Add esu links to a street
"""
button = self.street_browser.ui.editEsuPushButton
layer = 'ESU Graphic'
display_attr = 'esu_id'
if self.edit_esu:
previous_unsaved = self.edit_esu.get_final_selection()[0]
self.edit_esu = EditEsuLink(self.iface, button, self.db, street_browser=self.street_browser,
layer_name=layer, dis_attr=display_attr, unsaved=previous_unsaved)
else:
self.edit_esu = EditEsuLink(self.iface, button, self.db, street_browser=self.street_browser,
layer_name=layer, dis_attr=display_attr)
self.edit_esu.show()
def commit_esu_link(self):
"""
Updates existing esu links on edit and deal with adding/remove links via editing
"""
usrn = str(self.new_usrn_no)
if self.edit_esu:
# get new set of esu links
esus = self.edit_esu.get_final_selection()
final = esus[0]
else:
# No esu edits made so query for existing esu links
final = self.esu_version.query_esu(usrn)
date = str(datetime.datetime.now().strftime("%Y%m%d"))
try:
for esu in final:
query_str = "SELECT version_no FROM tblESU WHERE esu_id = %s AND currency_flag = 0;" % esu
query = QSqlQuery(query_str, self.db)
seek = query.seek(0)
if seek:
esu_ver = query.value(0)
else:
esu_ver = str(1)
# Create new links
insert_sql = "INSERT INTO lnkESU_STREET (esu_id, usrn, esu_version_no, usrn_version_no, currency_flag," \
" entry_date, update_date) VALUES (%s, %s, %s, 1, 0, %s, %s)" \
% (esu, usrn, esu_ver, date, date)
new_lnk_query = QSqlQuery(insert_sql, self.db)
except TypeError:
# No esu's attached to record
pass
def create_start_end_coords(self):
"""
Create instance of cooord edit class
"""
coord_le = {"start_xref": self.street_browser.ui.startXLineEdit,
"start_yref": self.street_browser.ui.startYLineEdit,
"end_xref": self.street_browser.ui.endXLineEdit,
"end_yref": self.street_browser.ui.endYLineEdit}
button = self.street_browser.ui.editCoordsPushButton
usrn = self.street_browser.ui.usrnLineEdit.text()
coords = EditStartEndCoords(self.iface, coord_le, self.model, self.mapper, button, usrn=usrn, edit=False)
coords.show()
| gpl-2.0 | -7,157,423,312,518,393,000 | 39.943089 | 121 | 0.601403 | false |
josephyli/py-db-cluster | runDDL.py | 1 | 9526 | import argparse
import os
import pymysql.cursors
import re
import sys
from ConfigParser import SafeConfigParser
from StringIO import StringIO
from pymysql import OperationalError
# returns a list of sql commands as strings
def read_DDL(ddlfilename):
f = open(ddlfilename, 'r')
ddlfile = f.read()
f.close()
temp = filter(None, ddlfile.split(';'))
sql_commands = []
# filter out white space from file input
for c in temp:
if c != "\n":
sql_commands.append(c)
return sql_commands
# returns a dict with all nodes information
# responsible for parsing the config file
def get_node_config(configfilename):
config_dict = {}
if os.path.isfile(configfilename):
with open(configfilename) as stream:
# pass into string & add a header
stream = StringIO("[fakesection]\n" + stream.read())
# read/parse catalog data
cp = SafeConfigParser()
cp.readfp(stream)
config_dict['catalog.driver'] = cp.get('fakesection', 'catalog.driver')
config_dict['catalog.hostname'] = cp.get('fakesection', 'catalog.hostname')
config_dict['catalog.username'] = cp.get('fakesection', 'catalog.username')
config_dict['catalog.passwd'] = cp.get('fakesection', 'catalog.passwd')
config_dict['catalog.database'] = cp.get('fakesection', 'catalog.hostname').rsplit('/', 1)[-1]
# read the number of nodes
numnodes = cp.getint('fakesection', 'numnodes')
config_dict['catalog.numnodes'] = numnodes
# read node data and print out info
for node in range(1, numnodes + 1):
for candidate in ['driver', 'hostname', 'username', 'passwd', 'database']:
# test if candidate exists before adding to dictionary
if cp.has_option('fakesection', "node" + str(node) + "." + candidate):
# print cp.get('fakesection', "node" + str(node) + "." + candidate)
config_dict["node" + str(node) + "." + candidate] = cp.get('fakesection', "node" + str(node) + "." + candidate)
else:
if candidate == "database":
config_dict["node" + str(node) + ".database"] = cp.get('fakesection', "node" + str(node) + ".hostname").rsplit('/', 1)[-1]
else:
print "error: candidate not found"
return config_dict
else:
print("No config file found at", configfilename)
return null
def check_dtables_exists(config_dict):
cat_hn = re.findall( r'[0-9]+(?:\.[0-9]+){3}', config_dict['catalog.hostname'] )[0]
cat_usr = config_dict['catalog.username']
cat_pw = config_dict['catalog.passwd']
cat_dr = config_dict['catalog.driver']
cat_db = config_dict['catalog.database']
sql = "SELECT * FROM information_schema.tables WHERE table_schema = '%s' AND table_name = 'dtables' LIMIT 1;" % cat_db
res = None;
try:
# connect and execute the sql statement
connection = pymysql.connect(host=cat_hn,
user=cat_usr,
password=cat_pw,
db=cat_db,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
print "[SUCCESSFUL CATALOG CONNECTION] <"+connection.host+" - "+connection.db+">", connection
print
with connection.cursor() as cursor:
res = cursor.execute(sql.strip() + ';')
connection.commit()
except pymysql.err.InternalError as d:
print "[FAILED TO CHECK IF CATALOG EXISTS]"
print d
if res:
return True
else:
return False
# stores metadata about the DDL in a catalog database
# using a list of tables that need to be created in the catalog
def update_catalog(config_dict, table_list):
cat_hn = re.findall( r'[0-9]+(?:\.[0-9]+){3}', config_dict['catalog.hostname'] )[0]
cat_usr = config_dict['catalog.username']
cat_pw = config_dict['catalog.passwd']
cat_dr = config_dict['catalog.driver']
cat_db = config_dict['catalog.database']
if check_dtables_exists(config_dict):
sql = []
else:
sql = ["CREATE TABLE IF NOT EXISTS dtables (tname char(32), nodedriver char(64), nodeurl char(128), nodeuser char(16), nodepasswd char(16), partmtd int, nodeid int, partcol char(32), partparam1 char(32), partparam2 char(32));"]
# prepares the sql statement to insert into catalog the tables in each node
for table in table_list:
for i in range(config_dict["catalog.numnodes"]):
hn = config_dict['node'+str(i + 1)+'.hostname']
usr = config_dict['node'+str(i + 1)+'.username']
pw = config_dict['node'+str(i + 1)+'.passwd']
dr = config_dict['node'+str(i + 1)+'.driver']
sql.append("INSERT INTO dtables VALUES (\'%s\', \'%s\', \'%s\', \'%s\',\'%s\', NULL,%d,NULL,NULL,NULL);" % (table,dr,hn,usr,pw,i+1))
try:
# connect and execute the sql statement
connection = pymysql.connect(host=cat_hn,
user=cat_usr,
password=cat_pw,
db=cat_db,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
print "[SUCCESSFUL CATALOG CONNECTION] <"+connection.host+" - "+connection.db+">", connection
print
with connection.cursor() as cursor:
# execute every sql command
for command in sql:
try:
print command
print
cursor.execute(command.strip() + ';')
connection.commit()
except OperationalError, msg:
print "Command skipped: ", msg
except pymysql.err.InternalError as d:
print "[FAILED TO UPDATE CATALOG]"
print d
# returns a list of connections to all nodes
def get_connections(config_dict):
connections = []
for i in range(config_dict["catalog.numnodes"]):
try:
hn = re.findall( r'[0-9]+(?:\.[0-9]+){3}', config_dict['node'+str(i + 1)+'.hostname'] )[0]
usr = config_dict['node'+str(i + 1)+'.username']
pw = config_dict['node'+str(i + 1)+'.passwd']
db = config_dict['node'+str(i + 1)+'.database']
connections.append(pymysql.connect(host=hn,
user=usr,
password=pw,
db=db,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor))
except pymysql.MySQLError as e:
print "[NODE", i + 1, "CONNECTION FAILED]:"
print "hostname:".rjust(12), re.findall( r'[0-9]+(?:\.[0-9]+){3}', config_dict['node'+str(i + 1)+'.hostname'] )[0]
print "username:".rjust(12), config_dict['node'+str(i + 1)+'.username']
print "password:".rjust(12), config_dict['node'+str(i + 1)+'.passwd']
print "database:".rjust(12), config_dict['node'+str(i + 1)+'.database']
print 'Got error {!r}, errno is {}'.format(e, e.args[0])
print
return connections
# runs the list of commands against the list of connections
# later, this will implement multi-threading
def run_commmands_against_nodes(connections, sql_commands):
import time
from threading import Thread
from threading import active_count
# create a list of jobs
list_of_threads = []
for connection in connections:
print "[JOB CREATED] <"+ connection.host+ " - " + connection.db+ ">"
print connection
list_of_threads.append(Thread(target=run_sql_commands_against_node, args=(connection, sql_commands)))
print
# start up all jobs
for t in list_of_threads:
t.start()
# wait for all jobs to complete before moving on
while active_count() > 1:
time.sleep(1)
def run_sql_commands_against_node(connection, sql_commands):
with connection.cursor() as cursor:
try:
for c in sql_commands:
cursor.execute(c.strip() + ';')
connection.commit()
print "[JOB SUCCESSFUL] <"+connection.host+ " - " + connection.db+ ">"
connection.close()
except pymysql.MySQLError as e:
print "[JOB FAILED] <"+connection.host+ " - " + connection.db+ "> ERROR: {!r}, ERROR NUMBER: {}".format(e, e.args[0])
def print_pretty_dict(idict):
import json
print json.dumps(idict, indent=1)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("configfile", help="Location of Config File, See the README for more information")
parser.add_argument("ddlfile", help="Location of DDL File, See the README for more information")
args = parser.parse_args()
print
print "=" * 80
print
# read configuration and return a dictionary -------------------------------
temp = "PARSING " + str(args.configfile) + "..."
print
print temp.center(80, " ")
nodes_dict = get_node_config(args.configfile)
print_pretty_dict(nodes_dict)
print
print "-" * 80
print
# return a list of connections to all nodes --------------------------------
print "CREATING CONNECTIONS...".center(80, " ")
print
node_connections = get_connections(nodes_dict)
# if no connections were made, terminate the program, comment this out for testing
if len(node_connections) == 0:
print "Terminating due to connection failures..."
sys.exit()
print "# of connections:", str(len(node_connections))
print
for c in node_connections:
print "HOST: " + c.host + " DB: " + c.db + " " + str(c)
print
print "-" * 80
print
# read DDL and return a list of sql commands -------------------------------
print "PARSING SQL COMMANDS...".center(80, " ")
print
sql_commands = read_DDL(args.ddlfile)
# list of tables is used to update catalog with metadata
table_list = []
for command in sql_commands:
if command.split()[0].upper() == "CREATE":
table_list.append((re.split('\s|\(',command)[2]))
print "[SQL COMMANDS]:"
for s in sql_commands:
print s.strip()
print
print "TABLES:"
print table_list
print
print "-" * 80
print
# update catalog ----------------------------------------------------------
print "UPDATING CATALOG...".center(80, " ")
print
update_catalog(nodes_dict,table_list)
print
print "-" * 80
print
# run the commands against the nodes ---------------------------------------
print "EXECUTING SQL COMMANDS ON NODES...".center(80, " ")
print
run_commmands_against_nodes(node_connections, sql_commands)
print
print "=" * 80
print
if __name__ == "__main__":
main()
| gpl-3.0 | 3,556,770,068,061,387,300 | 32.780142 | 229 | 0.655364 | false |
alibozorgkhan/django-boilerplate | django_boilerplate/settings/base.py | 1 | 4882 | """
Django settings for django_boilerplate project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+!=wvvf$f^jytsaol8_50@)+xw*7m4@v&9=xm!()b(n_731dhm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
LOCAL_APPS = [
'accounts',
'app',
]
EXTERNAL_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'social_django',
]
INSTALLED_APPS = LOCAL_APPS + EXTERNAL_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'social_django.middleware.SocialAuthExceptionMiddleware'
]
ROOT_URLCONF = 'django_boilerplate.urls'
TEMPLATES_DIR = ["{}/templates".format(app) for app in LOCAL_APPS] + ['django_boilerplate/templates']
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': TEMPLATES_DIR,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_boilerplate.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'assets/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Social Auth
AUTHENTICATION_BACKENDS = (
'social_core.backends.google.GoogleOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
'social_core.backends.linkedin.LinkedinOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'
LOGIN_REDIRECT_URL = 'index'
SOCIAL_AUTH_FACEBOOK_KEY = os.environ.get('SOCIAL_AUTH_FACEBOOK_KEY')
SOCIAL_AUTH_FACEBOOK_SECRET = os.environ.get('SOCIAL_AUTH_FACEBOOK_SECRET')
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_FACEBOOK_PROFILE_EXTRA_PARAMS = {
'fields': 'id,name,email',
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY')
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET')
SOCIAL_AUTH_LINKEDIN_OAUTH2_KEY = os.environ.get('SOCIAL_AUTH_LINKEDIN_OAUTH2_KEY')
SOCIAL_AUTH_LINKEDIN_OAUTH2_SECRET = os.environ.get('SOCIAL_AUTH_LINKEDIN_OAUTH2_SECRET')
SOCIAL_AUTH_LINKEDIN_OAUTH2_SCOPE = ['r_basicprofile', 'r_emailaddress']
SOCIAL_AUTH_LINKEDIN_OAUTH2_FIELD_SELECTORS = ['email-address']
SOCIAL_AUTH_LINKEDIN_OAUTH2_OAUTH2_EXTRA_DATA = [('id', 'id'),
('firstName', 'first_name'),
('lastName', 'last_name'),
('emailAddress', 'email_address')]
| mit | 2,478,633,743,751,343,000 | 28.409639 | 101 | 0.681073 | false |
djangraw/PsychoPyParadigms | Reading/DistractionTask_eyelink_d6.py | 1 | 30141 | #!/usr/bin/env python2
"""Display multi-page text with simultaneous auditory distractions, recording eye position data using the EyeLink eye tracker."""
# DistractionTask_eyelink_d6.py
# Created 3/16/15 by DJ based on VidLecTask.py
# Updated 3/31/15 by DJ - renamed from ReadingTask_dict_d2.py.
# Updated 4/1-16/15 by DJ - incorporated eyelink fully, renamed ReadingTask_eyelink_d1.py.
# Updated 4/16/15 by DJ - removed questions, added randomized thought probes and automatic pick up where you left off.
# Updated 4/17/15 by DJ - removed Eyelink again to have same behavioral version
# Updated 6/29/15 by DJ - removed random session length ranges and probe times - page ranges specified in params.
# Updated 7/7/15 by DJ - Renamed from ReadingImageTask_dict_d4, added audio.
# Updated 7/15/15 by DJ - added sound time limits
# Updated 7/20/15 by DJ - switched to premade sound files, switched back to eyelink version, debugged
# Updated 7/24/15 by DJ - added quiz files list, imagePrefix list, readingQuiz list and audioQuiz list
# Updated 7/28/15 by DJ - made sounds play on page-by-page basis, sound is randomized,
# Updated 8/18/15 by DJ - added serial port (and changed name from _behavior to _serial), but haven't tried it yet.
# Updated 8/21/15 by DJ - tested in 3T-C and debugged as necessary
# Updated 9/17/15 by DJ - added logging of each message sent
# Updated 10/22/15 by DJ - added saving
# Updated 10/29/15 by DJ - cleaned up slightly, edited PromptTools to ask subjects not to skip around.
# Updated 11/11/15 by DJ - added additional calibration parameters (changed name to _d6)
# Updated 11/12/15 by DJ - switched to 1024x768 (max res of rear projector)
# Updated 12/2/15 by DJ - adapted serial version back to EyeLink version
# Import packages
from psychopy import core, gui, data, event, sound, logging #, visual # visual causes a bug in the guis, so I moved it down.
from psychopy.tools.filetools import fromFile, toFile
import time as ts, numpy as np
import AppKit, os # for monitor size detection, files
import PromptTools
import random
"""
# Import SMI libraries
import serial
from LibSmi_PsychoPy import LibSmi_PsychoPy
"""
#"""
# Import eyelink libraries
from pylink import *
from EyeLinkCoreGraphicsPsychoPy import EyeLinkCoreGraphicsPsychoPy
#"""
# ====================== #
# ===== PARAMETERS ===== #
# ====================== #
# Save the parameters declared below?
saveParams = True
newParamsFilename = 'DistractionParams_eyelink_d6.pickle'
expInfoFilename = 'lastDistractionInfo_eyelink_d6.pickle'
# Declare primary task parameters.
params = {
# FOR INITIAL PILOTS
'imagePrefixList': ['Greeks_Lec02_stretch_gray','Greeks_Lec02_stretch_gray','Greeks_Lec02_stretch_gray','Greeks_Lec02_stretch_gray','Greeks_Lec07_stretch_gray','Greeks_Lec07_stretch_gray'],
'startPageList': [1,31,61,91,1,31], # page where each session should start
'endPageList': [30,60,90,120,30,60], # inclusive
'readingQuizList':['Lecture02Questions_d4_read1.txt','Lecture02Questions_d4_read2.txt','Lecture02Questions_d4_read3.txt','Lecture02Questions_d4_read4.txt','Lecture07Questions_d3_read1.txt','Lecture07Questions_d3_read2.txt',],
'soundFileList': ['Lecture10_40min.wav']*6,
# 'imagePrefixList': ['Greeks_Lec07_stretch_gray','Greeks_Lec07_stretch_gray','Greeks_Lec10_stretch_gray','Greeks_Lec10_stretch_gray','Greeks_Lec02_stretch_gray','Greeks_Lec02_stretch_gray'],
# 'startPageList': [1,31,1,31,61,91], # page where each session should start
# 'endPageList': [30,60,30,60,90,120], # inclusive
# 'soundFileList': ['Lecture02_40min.wav']*6,
# 'readingQuizList':['Lecture07Questions_d3_read1.txt','Lecture07Questions_d3_read2.txt','Lecture10Questions_d4_read1.txt','Lecture10Questions_d4_read2.txt','Lecture02Questions_d4_read3.txt','Lecture02Questions_d4_read4.txt'],
# 'soundFileList': ['Lecture02_40min.wav']*6,
'promptTypeList': ['AttendReading','AttendBoth_short','AttendReading_short','AttendBoth_short','AttendBoth_short','AttendReading_short'],
'soundQuizList':['BLANK.txt']*6,
'quizPromptList':['TestReading_box']*6,
'probSoundList':[0.5]*6,
# REST OF PARAMS
'skipPrompts': False, # go right to the scanner-wait page
'maxPageTime': 14, # max time the subject is allowed to read each page (in seconds)
'pageFadeDur': 3, # for the last pageFadeDur seconds, the text will fade to white.
'IPI': 2, # time between when one page disappears and the next appears (in seconds)
'probSound': 0.5, # probability that sound will be played on any given page
'IBI': 1, # time between end of block/probe and beginning of next block (in seconds)
'tStartup': 2, # pause time before starting first page
'probeDur': 60, # max time subjects have to answer a Probe Q
'keyEndsProbe': True, # will a keypress end the probe?
'pageKey': 'b',#'space', # key to turn page
'respKeys': ['g','r','b','y'], # keys to be used for responses (clockwise from 9:00) - "DIAMOND" RESPONSE BOX
'wanderKey': 'z', # key to be used to indicate mind-wandering
'triggerKey': 't', # key from scanner that says scan is starting
# declare image and question files
'imageDir': 'ReadingImages/',
'imagePrefix': '', # images must start with this and end with _page<number>.jpg
'soundDir': 'sounds/',
'soundFile': '', # fill in later
'promptType': '', # fill in later
'soundVolume': 0.5,
'whiteNoiseFile': 'Lecture10_40min_phasescrambled.wav', #'WhiteNoise-7m30s.wav', # this plays when the lecture doesn't.
'pageRange': [1, 1], # pages (starting from 1) at which reading should start and stop in each block
'textDir': 'questions/', # directory containing questions and probes
'probesFile': 'BLANK.txt', #'ReadingProbes_d2.txt', #'ReadingProbes_behavior.txt', #
'readingQuiz':'', # fill in later
'soundQuiz':'', # fill in later
'quizPrompt':'', # fill in later
'questionOrder':[], # fill in later
# declare other stimulus parameters
'fullScreen': True, # run in full screen mode?
'screenToShow': 1, # display on primary screen (0) or secondary (1)?
'screenColor':(128,128,128), # in rgb255 space
'imageSize': (960,709), # (FOR 1024x768 SCREEN) # in pixels... set to None for exact size of screen #(1201,945), # (FOR 1280x1024 SCREEN)
'fixCrossSize': 10, # size of cross, in pixels
'fixCrossPos': (-480,354), # (x,y) pos of fixation cross displayed before each page (for drift correction) #[-600, 472],
'usePhotodiode': False, # add sync square in corner of screen
#"""
'isEyeLinkConnected': False # is there an EyeLink tracker connected via ethernet?
}
#"""
"""
# declare serial port & calibration parameters for SMI (remove bracket and add comma to lines just above)
'portName': '/dev/tty.usbserial',
'portBaud': 115200,
'calNPoints': 13, # number of points in the calibration (and validation)The number of points to be used for the validation (standard=9)
'calAutoAccept': False, # Let SMI pick when to accept a point (True [default]) or accept manually (False).
'calGoFast': False, # Go quickly from point to point (True) or slower and more precise (False [default]).
'calCheckLevel': 3 #calibration check level (0=none,1=weak,2=medium,3=strong [default])
}
"""
# save parameters
if saveParams:
print("Opening save dialog:")
dlgResult = gui.fileSaveDlg(prompt='Save Params...',initFilePath = os.getcwd() + '/params', initFileName = newParamsFilename,
allowed="PICKLE files (.pickle)|.pickle|All files (.*)|")
newParamsFilename = dlgResult
print("dlgResult: %s"%dlgResult)
if newParamsFilename is None: # keep going, but don't save
saveParams = False
print("Didn't save params.")
else:
toFile(newParamsFilename, params)# save it!
print("Saved params to %s."%newParamsFilename)
# toFile(newParamsFilename, params)
# print("saved params to %s."%newParamsFilename)
# ========================== #
# ===== SET UP LOGGING ===== #
# ========================== #
try:#try to get a previous parameters file
expInfo = fromFile(expInfoFilename)
expInfo['session'] +=1 # automatically increment session number
expInfo['paramsFile'] = [expInfo['paramsFile'],'Load...']
except:#if not there then use a default set
expInfo = {'subject':'1', 'session':1, 'skipPrompts':False, 'tSound':0.0, 'paramsFile':['DEFAULT','Load...']}
# overwrite if you just saved a new parameter set
if saveParams:
expInfo['paramsFile'] = [newParamsFilename,'Load...']
dateStr = ts.strftime("%b_%d_%H%M", ts.localtime()) # add the current time
#present a dialogue to change params
dlg = gui.DlgFromDict(expInfo, title='Distraction task', order=['subject','session','skipPrompts','paramsFile'])
if not dlg.OK:
core.quit()#the user hit cancel so exit
# find parameter file
if expInfo['paramsFile'] == 'Load...':
dlgResult = gui.fileOpenDlg(prompt='Select parameters file',tryFilePath=os.getcwd(),
allowed="PICKLE files (.pickle)|.pickle|All files (.*)|")
expInfo['paramsFile'] = dlgResult[0]
# load parameter file
if expInfo['paramsFile'] not in ['DEFAULT', None]: # otherwise, just use defaults.
# load params file
params = fromFile(expInfo['paramsFile'])
# GET NEW START AND STOP PAGES
params['pageRange'][0] = params['startPageList'][expInfo['session']-1] # use session-1 as index of list
params['pageRange'][1] = params['endPageList'][expInfo['session']-1] # use session-1 as index of list
# GET SOUND FILE AND OTHER SESSION-DEPENDENT INFO
params['soundFile'] = params['soundFileList'][expInfo['session']-1]
params['promptType'] = params['promptTypeList'][expInfo['session']-1]
params['imagePrefix'] = params['imagePrefixList'][expInfo['session']-1]
params['readingQuiz'] = params['readingQuizList'][expInfo['session']-1]
params['soundQuiz'] = params['soundQuizList'][expInfo['session']-1]
params['quizPrompt'] = params['quizPromptList'][expInfo['session']-1]
params['probSound'] = params['probSoundList'][expInfo['session']-1]
tSound = expInfo['tSound']
# transfer skipPrompts
params['skipPrompts'] = expInfo['skipPrompts']
# read questions and answers from text files
[questions_reading,options_reading,answers_reading] = PromptTools.ParseQuestionFile(params['textDir']+params['readingQuiz'])
print('%d questions loaded from %s'%(len(questions_reading),params['readingQuiz']))
[questions_sound,options_sound,answers_sound] = PromptTools.ParseQuestionFile(params['textDir']+params['soundQuiz'])
print('%d questions loaded from %s'%(len(questions_sound),params['soundQuiz']))
# append the two
questions_all = questions_reading + questions_sound
options_all = options_reading + options_sound
answers_all = answers_reading + answers_sound
# shuffle the order
newOrder = range(0,len(questions_all))
random.shuffle(newOrder)
questions_all = [questions_all[i] for i in newOrder]
options_all = [options_all[i] for i in newOrder]
answers_all = [answers_all[i] for i in newOrder]
params['questionOrder'] = newOrder
# ========================== #
# ===== GET SCREEN RES ===== #
# ========================== #
# kluge for secondary monitor
if params['fullScreen']:
screens = AppKit.NSScreen.screens()
screenRes = (int(screens[params['screenToShow']].frame().size.width), int(screens[params['screenToShow']].frame().size.height))
# screenRes = (1920, 1200)
if params['screenToShow']>0:
params['fullScreen'] = False
else:
screenRes = (1024,768)
# save screen size to params struct
params['screenSize'] = screenRes
# adjust image size if one was not entered.
if params['imageSize'] is None:
params['imageSize'] = (screenRes[0], screenRes[1])
# ========================== #
# ===== LOG PARAMETERS ===== #
# ========================== #
# print params to Output
print 'params = {'
for key in sorted(params.keys()):
print " '%s': %s"%(key,params[key]) # print each value as-is (no quotes)
print '}'
#make a log file to save parameter/event data
filename = 'DistractionTask-%s-%d-%s'%(expInfo['subject'], expInfo['session'], dateStr) #'Sart-' + expInfo['subject'] + '-' + expInfo['session'] + '-' + dateStr
logging.LogFile((filename+'.log'), level=logging.INFO)#, mode='w') # w=overwrite
logging.log(level=logging.INFO, msg='---START PARAMETERS---')
logging.log(level=logging.INFO, msg='filename: %s'%filename)
logging.log(level=logging.INFO, msg='subject: %s'%expInfo['subject'])
logging.log(level=logging.INFO, msg='session: %s'%expInfo['session'])
logging.log(level=logging.INFO, msg='date: %s'%dateStr)
logging.log(level=logging.INFO, msg='tSound: %s'%expInfo['tSound'])
for key in sorted(params.keys()): # in alphabetical order
logging.log(level=logging.INFO, msg='%s: %s'%(key,params[key]))
logging.log(level=logging.INFO, msg='---END PARAMETERS---')
# ========================== #
# ===== SET UP TRACKER ===== #
# ========================== #
"""
# Set up SMI's serial port by declaring LibSmi object
myTracker = LibSmi_PsychoPy(experiment='DistractionTask_serial_d4',port=params['portName'], baudrate=params['portBaud'], useSound=True, w=screenRes[0], h=screenRes[1], bgcolor=params['screenColor'],fullScreen=params['fullScreen'],screenToShow=params['screenToShow'])
print "Port %s isOpen = %d"%(myTracker.tracker.name,myTracker.tracker.isOpen())
"""
#"""
# Set up EyeLink tracker
# Declare constants
LEFT_EYE = 0
RIGHT_EYE = 1
BINOCULAR = 2
# Set up tracker
if params['isEyeLinkConnected']:
eyelinktracker = EyeLink()
else:
eyelinktracker = EyeLink(None)
# Check for successful connection
if not eyelinktracker:
print('=== ERROR: Eyelink() returned None.')
core.quit()
#Initialize the graphics
genv = EyeLinkCoreGraphicsPsychoPy(screenRes[0],screenRes[1],eyelinktracker,bgcolor=params['screenColor'])
openGraphicsEx(genv)
#Opens the EDF file.
edfFileName = filename + '.EDF'
edfHostFileName = 'TEST.EDF'
getEYELINK().openDataFile(edfHostFileName)
pylink.flushGetkeyQueue(); # used to be below openDataFile
getEYELINK().setOfflineMode();
#Gets the display surface and sends a mesage to EDF file;
screenRes = genv.win.size
getEYELINK().sendCommand("screen_pixel_coords = 0 0 %d %d" %(screenRes[0] - 1, screenRes[1] - 1))
getEYELINK().sendMessage("DISPLAY_COORDS 0 0 %d %d" %(screenRes[0] - 1, screenRes[1] - 1))
# send software version
tracker_software_ver = 0
eyelink_ver = getEYELINK().getTrackerVersion()
if eyelink_ver == 3:
tvstr = getEYELINK().getTrackerVersionString()
vindex = tvstr.find("EYELINK CL")
tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip()))
if eyelink_ver>=2:
getEYELINK().sendCommand("select_parser_configuration 0")
if eyelink_ver == 2: #turn off scenelink camera stuff
getEYELINK().sendCommand("scene_camera_gazemap = NO")
else:
getEYELINK().sendCommand("saccade_velocity_threshold = 35")
getEYELINK().sendCommand("saccade_acceleration_threshold = 9500")
# set EDF file contents
getEYELINK().sendCommand("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON,INPUT")
if tracker_software_ver>=4:
getEYELINK().sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET,INPUT")
else:
getEYELINK().sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,INPUT")
# set link data (used for gaze cursor)
getEYELINK().sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON,INPUT")
if tracker_software_ver>=4:
getEYELINK().sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET,INPUT")
else:
getEYELINK().sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,INPUT")
#getEYELINK().sendCommand("button_function 5 'accept_target_fixation'");
#
#eye_used = getEYELINK().eyeAvailable() #determine which eye(s) are available
#if eye_used == RIGHT_EYE:
# getEYELINK().sendMessage("EYE_USED 1 RIGHT")
#elif eye_used == LEFT_EYE:
# getEYELINK().sendMessage("EYE_USED 0 LEFT")
#elif eye_used == BINOCULAR:
# getEYELINK().sendMessage("EYE_USED 2 BOTH")
#else:
# print("ERROR in getting the eye information!")
# Set calibration parameters
#pylink.setCalibrationColors((0, 0, 0), (192, 192, 192)); #Sets the calibration target and background color
#pylink.setTargetSize(int(screenRes[0]/70), int(screenRes[0]/300)); #select best size for calibration target
#pylink.setCalibrationSounds("", "", "");
#pylink.setDriftCorrectSounds("", "off", "off");
# Ensure that the eye(s) selected during calibration is the one that gets used in the experiment.
getEYELINK().sendCommand("select_eye_after_validation = NO")
# Check if we should exit
if (eyelinktracker is not None and (not getEYELINK().isConnected() or getEYELINK().breakPressed())):
CoolDown()
#"""
# ========================== #
# ===== SET UP STIMULI ===== #
# ========================== #
from psychopy import visual
# Initialize deadline for displaying next frame
tNextFlip = [0.0] # put in a list to make it mutable?
#create clocks and window
globalClock = core.Clock()#to keep track of time
trialClock = core.Clock()#to keep track of time
#win = visual.Window(screenRes, fullscr=params['fullScreen'], allowGUI=False, monitor='testMonitor', screen=params['screenToShow'], units='deg', name='win',rgb=[1,1,1])
"""
win = myTracker.win # SMI version
"""
#"""
win = genv.win # eyelink version
#"""
# create stimuli
fCS = params['fixCrossSize'] # size (for brevity)
fCP = params['fixCrossPos'] # position (for brevity)
fixation = visual.ShapeStim(win,lineColor='#000000',lineWidth=3.0,vertices=((fCP[0]-fCS/2,fCP[1]),(fCP[0]+fCS/2,fCP[1]),(fCP[0],fCP[1]),(fCP[0],fCP[1]+fCS/2),(fCP[0],fCP[1]-fCS/2)),units='pix',closeShape=False,name='fixCross');
message1 = visual.TextStim(win, pos=[0,+.5], wrapWidth=1.5, color='#000000', alignHoriz='center', name='topMsg', text="aaa",units='norm')
message2 = visual.TextStim(win, pos=[0,-.5], wrapWidth=1.5, color='#000000', alignHoriz='center', name='bottomMsg', text="bbb",units='norm')
# initialize main text stimulus
imageName = '%s%s/%s_page%d.jpg'%(params['imageDir'],params['imagePrefix'],params['imagePrefix'],1)
textImage = visual.ImageStim(win, pos=[0,0], name='Text',image=imageName, units='pix', size=params['imageSize'])
# initialize photodiode stimulus
squareSize = 0.4
diodeSquare = visual.Rect(win,pos=[squareSize/4-1,squareSize/4-1],lineColor='white',fillColor='black',size=[squareSize,squareSize],units='norm',name='diodeSquare')
# declare probe parameters
[probe_strings, probe_options,_] = PromptTools.ParseQuestionFile(params['textDir']+params['probesFile'])
print('%d probes loaded from %s'%(len(probe_strings),params['probesFile']))
# Look up prompts
[topPrompts,bottomPrompts] = PromptTools.GetPrompts(os.path.basename(__file__),params['promptType'],params)
print('%d prompts loaded from %s'%(len(topPrompts),'PromptTools.py'))
# Look up question prompts
[topQuizPrompts,bottomQuizPrompts] = PromptTools.GetPrompts(os.path.basename(__file__),params['quizPrompt'],params)
print('%d prompts loaded from %s'%(len(topPrompts),'PromptTools.py'))
# declare sound!
# fullSound = sound.Sound(value='%s%s'%(params['soundDir'], params['soundFile']), volume=params['soundVolume'], name='fullSound')
pageSound = sound.Sound(value='%s%s'%(params['soundDir'], params['soundFile']), volume=params['soundVolume'], start=tSound, stop=tSound+params['maxPageTime'], name='pageSound')
whiteNoiseSound = sound.Sound(value='%s%s'%(params['soundDir'], params['whiteNoiseFile']), volume=params['soundVolume'], start=0, stop=params['maxPageTime'], name='whiteNoiseSound')
# ============================ #
# ======= SUBFUNCTIONS ======= #
# ============================ #
# increment time of next window flip
def AddToFlipTime(tIncrement=1.0):
tNextFlip[0] += tIncrement
# print("%1.3f --> %1.3f"%(globalClock.getTime(),tNextFlip[0]))
def SetFlipTimeToNow():
tNextFlip[0] = globalClock.getTime()
def SendMessage(message):
"""
# send message preceded by SMI code ET_REM (generic remark) and surround multi-word remarks by quotes(?)
myTracker.log(message)
logging.log(level=logging.INFO,msg=message)
# pass
"""
#"""
# Send EyeLink message
if eyelinktracker is None:
print('MSG: %s'%message)
else:
getEYELINK().sendMessage(message)
#"""
def ShowPage(iPage, maxPageTime=float('Inf'), pageFadeDur=0, soundToPlay=None):
print('Showing Page %d'%iPage)
#"""
# Start EyeLink's RealTime mode
pylink.beginRealTimeMode(100)
#"""
# Display text
imageName = '%s%s/%s_page%d.jpg'%(params['imageDir'],params['imagePrefix'],params['imagePrefix'],iPage)
textImage.setImage(imageName)
textImage.opacity = 1
textImage.draw()
while (globalClock.getTime()<tNextFlip[0]):
pass
# win.flip(clearBuffer=False)
# draw & flip
win.logOnFlip(level=logging.EXP, msg='Display Page%d'%iPage)
# win.callOnFlip(SendMessage,'Display Page%d'%iPage)
"""
win.callOnFlip(SendMessage,'DisplayPage%d'%iPage) # SPACE REMOVED FOR SMI
"""
win.callOnFlip(SendMessage,'Display Page%d'%iPage) # Regular for EyeLink
AddToFlipTime(maxPageTime)
# win.callOnFlip(SendPortEvent,mod(page,256))
if params['usePhotodiode']:
diodeSquare.draw()
win.flip()
# erase diode square and re-draw
textImage.draw()
win.flip()
# get time at which page was displayed
pageStartTime = globalClock.getTime()
# Play sound just after window flips
if soundToPlay is not None:
soundToPlay.play()
# Flush the key buffer and mouse movements
event.clearEvents()
# Wait for relevant key press or 'maxPageTime' seconds
fadeTime = tNextFlip[0]-pageFadeDur
respKey = None
while (globalClock.getTime()<tNextFlip[0]) and respKey==None:
newKeys = event.getKeys(keyList=[params['pageKey'],params['wanderKey'],'q','escape'],timeStamped=globalClock)
if len(newKeys)>0:
for thisKey in newKeys:
if thisKey[0] in ['q','escape']:
CoolDown()
elif thisKey[0] == params['pageKey']:
respKey = thisKey
SetFlipTimeToNow() # reset flip time
now = globalClock.getTime()
if now > fadeTime:
textImage.opacity = (tNextFlip[0]-now)/pageFadeDur
textImage.draw()
win.flip()
#"""
# Stop EyeLink's RealTime mode
pylink.endRealTimeMode()
#"""
# Display the fixation cross
if params['IPI']>0:
fixation.draw()
win.logOnFlip(level=logging.EXP, msg='Display Fixation')
win.callOnFlip(SendMessage,'DisplayFixation')
if params['usePhotodiode']:
diodeSquare.draw()
win.flip()
# erase diode square and re-draw
fixation.draw()
win.flip()
# return time for which page was shown
pageDur = tNextFlip[0] - pageStartTime
return pageDur
# Handle end ofeyelink session
def CoolDown():
# display cool-down message
message1.setText("That's the end! ")
message2.setText("Press 'q' or 'escape' to end the session.")
win.logOnFlip(level=logging.EXP, msg='Display TheEnd')
win.callOnFlip(SendMessage,'DisplayTheEnd')
message1.draw()
message2.draw()
win.flip()
thisKey = event.waitKeys(keyList=['q','escape'])
"""
# stop recording SMI via serial port
myTracker.stop_recording()
# save result
myTracker.save_data(path=(filename+'.idf'))
# close serial port
myTracker.cleanup()
"""
#"""
# End EyeLink recording: add 100 msec of data to catch final events
pylink.endRealTimeMode()
pumpDelay(100)
getEYELINK().stopRecording()
while getEYELINK().getkey(): # not sure what this is for
pass
# File transfer and cleanup!
getEYELINK().setOfflineMode()
msecDelay(500)
message1.setText("Sending EyeLink File...")
message2.setText("Please Wait.")
win.logOnFlip(level=logging.EXP, msg='Display SendingFile')
message1.draw()
message2.draw()
win.flip()
#Close the file and transfer it to Display PC
getEYELINK().closeDataFile()
getEYELINK().receiveDataFile(edfHostFileName, edfFileName)
getEYELINK().close();
#Close the experiment graphicss
pylink.closeGraphics()
#"""
# stop sound
# fullSound.stop()
whiteNoiseSound.stop()
pageSound.stop()
# save experimental info (if we reached here, we didn't have an error)
expInfo['tSound'] = tSound
toFile(expInfoFilename, expInfo) # save params to file for next time
# exit
core.quit()
# =========================== #
# ======= RUN PROMPTS ======= #
# =========================== #
"""
# Run SMI calibration and validation
myTracker.run_calibration(nr_of_pts=params['calNPoints'], auto_accept=params['calAutoAccept'], go_fast=params['calGoFast'], calib_level=params['calCheckLevel'])
"""
#"""
#Do the EyeLink tracker setup at the beginning of the experiment.
getEYELINK().doTrackerSetup()
# START EyeLink RECORDING
error = getEYELINK().startRecording(1, 1, 1, 1)
if error:
print("===WARNING: eyelink startRecording returned %s"%error)
#"""
# display prompts
if not params['skipPrompts']:
PromptTools.RunPrompts(topPrompts,bottomPrompts,win,message1,message2)
# wait for scanner
message1.setText("Waiting for scanner to start...")
message2.setText("(Press '%c' to override.)"%params['triggerKey'].upper())
message1.draw()
message2.draw()
win.logOnFlip(level=logging.EXP, msg='Display WaitingForScanner')
win.callOnFlip(SendMessage,'DisplayWaitingForScanner')
win.flip()
event.waitKeys(keyList=params['triggerKey'])
tStartSession = globalClock.getTime()
AddToFlipTime(tStartSession+params['tStartup'])
"""
# START SMI RECORDING via serial port
myTracker.start_recording(stream=False)
"""
# wait before first stimulus
fixation.draw()
win.logOnFlip(level=logging.EXP, msg='Display Fixation')
win.callOnFlip(SendMessage,'DisplayFixation')
win.flip()
# =========================== #
# ===== MAIN EXPERIMENT ===== #
# =========================== #
# set up other stuff
logging.log(level=logging.EXP, msg='---START EXPERIMENT---')
nBlocks = 1
# start sound
#fullSound.play()
# Run trials
for iBlock in range(0,nBlocks): # for each block of pages
# log new block
logging.log(level=logging.EXP, msg='Start Block %d'%iBlock)
# display pages
for iPage in range(params['pageRange'][0],params['pageRange'][1]+1): # +1 to inclue final page
# decide on sound
if random.random()<=params['probSound']:
playSound = True
soundToPlay = pageSound
else:
playSound = False
soundToPlay = whiteNoiseSound
# display text
pageDur = ShowPage(iPage=iPage,maxPageTime=params['maxPageTime'],pageFadeDur=params['pageFadeDur'],soundToPlay=soundToPlay)
# update sound
soundToPlay.stop()
if playSound:
tSound += pageDur #params['maxPageTime']
logging.log(level=logging.INFO, msg='tSound: %.3f'%tSound)
pageSound = sound.Sound(value='%s%s'%(params['soundDir'], params['soundFile']), volume=params['soundVolume'], start=tSound, stop=tSound+params['maxPageTime'], name='pageSound')
if iPage < params['pageRange'][1]:
# pause
AddToFlipTime(params['IPI'])
# Mute Sounds
pageSound.setVolume(0) # mute but don't stop... save stopping for CoolDown!
whiteNoiseSound.setVolume(0) # mute but don't stop... save stopping for CoolDown!
"""
# Pause SMI recording via serial port
myTracker.pause_recording() # save stop command for CoolDown.
"""
# fullSound.setVolume(0)
# run probes
allKeys = PromptTools.RunQuestions(probe_strings,probe_options,win,message1,message2,'Probe',questionDur=params['probeDur'], isEndedByKeypress=params['keyEndsProbe'])
# check for escape keypresses
for thisKey in allKeys:
if len(thisKey)>0 and thisKey[0] in ['q', 'escape']: # check for quit keys
CoolDown()#abort experiment
# tell the subject if the lecture is over.
message1.setText("It's time for some questions! Then, after a short break, we'll continue reading where you left off.")
message2.setText("Press any key to end this recording.")
win.logOnFlip(level=logging.EXP, msg='Display TakeABreak')
win.callOnFlip(SendMessage,'DisplayTakeABreak')
message1.draw()
message2.draw()
# change the screen
win.flip()
thisKey = event.waitKeys() # any keypress will end the session
# ============================ #
# ========= RUN QUIZ ========= #
# ============================ #
# display prompts
if not params['skipPrompts']:
PromptTools.RunPrompts(topQuizPrompts,bottomQuizPrompts,win,message1,message2)
# set up other stuff
logging.log(level=logging.EXP, msg='---START QUIZ---')
# ------- Run the questions ------- #
allKeys = PromptTools.RunQuestions(questions_all,options_all,win,message1,message2,'Question',respKeys=params['respKeys'])
# --------------------------------- #
isResponse = np.zeros(len(allKeys),dtype=bool) # was any response given?
isCorrect = np.zeros(len(allKeys)) # was the response correct?
RT = np.zeros(len(allKeys)) # how long did it take them to press a key?
#print(allKeys)
for iKey in range(0,len(allKeys)):
if len(allKeys[iKey])>0:
isResponse[iKey] = 1
RT[iKey] = allKeys[iKey][1] # keep in seconds
if float(allKeys[iKey][0]) == answers_all[iKey]:
isCorrect[iKey] = 1
#give some performance output to user
print('Performance:')
print('%d/%d = %.2f%% correct' %(np.sum(isCorrect), len(isCorrect), 100*np.average(isCorrect)))
print('RT: mean = %f, std = %f' %(np.average(RT[isResponse]),np.std(RT[isResponse])))
# exit experiment
CoolDown()
| mit | 7,485,308,004,066,379,000 | 41.572034 | 266 | 0.673103 | false |
stanford-gfx/Horus | Code/HorusApp/app/views.py | 1 | 19731 | from app import server, db, trajectoryAPI
from pylab import *
import flask
from flask import jsonify, request, url_for, redirect, render_template, abort
from flask.ext import restful
import requests
import math, time
import urllib2
import json
import os
from os import path
starting_lat = 0
starting_lng = 0
vehicle_millis = 0
current_lat = 0
current_lng = 0
armed = False
mode = "NOT CONNECTED"
real_elapsed_time = -1
TIMEOUT_MILLIS = 5000
# TEMPLATED HTML ROUTE
@server.route('/')
@server.route('/index')
def index():
shots = db.get_shots()
return render_template('index.html', shots=shots)
# TEMPLATED HTML ROUTE
@server.route('/easing_curve')
def easing_curve():
shots = db.get_shots()
return render_template('easing_curve.html', shots=shots)
# TEXT ROUTE
@server.route('/edit')
def edit():
return render_template('edit.html')
@server.route('/api/get_keyframes.json', methods = ['POST'])
def get_keyframes():
print request.get_json()
return jsonify(request.json)
# Save a shot
@server.route('/api/set_shot', methods = ['POST'])
def set_shot():
parsed_json = request.get_json()
data = request.data
shotname = parsed_json['shotName']
db.set_shot(shotname, data)
return jsonify({
'test':1
})
# Load a shot
@server.route('/api/get_shot', methods = ['GET'])
def get_shot():
shotname = request.args.get('shot')
rev = request.args.get('rev')
if not shotname:
return abort(404)
data = None
revCount = 1
if rev:
data, revCount = db.get_shot(shotname, int(rev))
else:
data, revCount = db.get_shot(shotname)
if data:
return flask.Response(response = data,
status=200,
mimetype="application/json")
else:
abort(404)
# checks if name is unique
@server.route('/api/is_name_available', methods = ['GET'])
def is_name_available():
shotname = request.args.get('name')
valid = not db.shot_exists(shotname)
print("shotname: " + shotname + " is free? : %s" % (valid))
data = jsonify({"valid": valid})
return data
@server.route('/api/get_log', methods = ['GET'])
def get_log():
shotname = request.args.get('shot')
if not shotname:
return abort(404)
data = db.get_log(shotname)
if data:
return jsonify(data)
else:
abort(404)
@server.route('/api/get_easing_curve', methods = ['POST'])
def get_easing_curve():
js = request.get_json()
tvals = array(js['t'])
dlist = array(js['d'])
P = c_[dlist]
T = c_[tvals]
C,T,sd = trajectoryAPI.compute_easing_curve(P, T)
data = {
'C':C.tolist(),
'T':T.tolist(),
}
return jsonify(data)
# Get a spline
@server.route('/api/get_spline', methods = ['POST'])
def get_spline():
parsed_json = request.get_json()
#data = request.data
#camera lla, lookat lla
cameraPose_lat_list = parsed_json['cameraPoseLats']
cameraPose_lng_list = parsed_json['cameraPoseLngs']
cameraPose_alt_list = parsed_json['cameraPoseAlts']
lookAt_lat_list = parsed_json['lookAtLats']
lookAt_lng_list = parsed_json['lookAtLngs']
lookAt_alt_list = parsed_json['lookAtAlts']
P_cameraPose = c_[cameraPose_lat_list, cameraPose_lng_list, cameraPose_alt_list]
C_cameraPose,T_cameraPose,sd_cameraPose,dist_cameraPose = trajectoryAPI.compute_spatial_trajectory_and_arc_distance(P_cameraPose, inNED=False)
P_lookAt = c_[lookAt_lat_list, lookAt_lng_list, lookAt_alt_list]
C_lookAt,T_lookAt,sd_lookAt,dist_lookAt = trajectoryAPI.compute_spatial_trajectory_and_arc_distance(P_lookAt, inNED=False)
#P_eval, T_eval, dT = splineutils.evaluate_catmull_rom_spline(C, T, sd, num_samples=200);
data = {
'cameraPoseCoeff': C_cameraPose.tolist(),
'cameraPoseTvals': T_cameraPose.tolist(),
'cameraPoseDist' : dist_cameraPose.tolist(),
'lookAtCoeff': C_lookAt.tolist(),
'lookAtTvals': T_lookAt.tolist(),
'lookAtDist' : dist_lookAt.tolist()
}
return jsonify(data)
# Get a spline
@server.route('/api/get_spline_ned', methods = ['POST'])
def get_spline_ned():
js = request.get_json()
lookAtN = js['lookAtN']
lookAtE = js['lookAtE']
lookAtD = js['lookAtD']
lookFromN = js['lookFromN']
lookFromE = js['lookFromE']
lookFromD = js['lookFromD']
P_lookFromNED = c_[lookFromN, lookFromE, lookFromD]
C_lookFromNED,T_lookFromNED,sd_lookFromNED,dist_lookFromNED = trajectoryAPI.compute_spatial_trajectory_and_arc_distance(P_lookFromNED)
P_lookAtNED = c_[lookAtN, lookAtE, lookAtD]
C_lookAtNED,T_lookAtNED,sd_lookAtNED,dist_lookAtNED = trajectoryAPI.compute_spatial_trajectory_and_arc_distance(P_lookAtNED)
data = {
'C_lookFromNED': C_lookFromNED.tolist(),
'T_lookFromNED': T_lookFromNED.tolist(),
'dist_lookFromNED': dist_lookFromNED.tolist(),
'C_lookAtNED': C_lookAtNED.tolist(),
'T_lookAtNED': T_lookAtNED.tolist(),
'dist_lookAtNED': dist_lookAtNED.tolist()
}
return jsonify(data)
@server.route('/api/reparameterize_spline_ned', methods = ['POST'])
def reparameterize_spline_ned():
js = request.get_json()
lookAtN = js['lookAtN']
lookAtE = js['lookAtE']
lookAtD = js['lookAtD']
lookFromN = js['lookFromN']
lookFromE = js['lookFromE']
lookFromD = js['lookFromD']
P_lookFromNED = c_[lookFromN, lookFromE, lookFromD]
T_lookFromNED = c_[js['lookFromT'], js['lookFromT'], js['lookFromT']]
P_easingLookFrom = c_[array(js['lookFromEasingD'])]
T_easingLookFrom = c_[array(js['lookFromEasingT'])]
P_lookAtNED = c_[lookAtN, lookAtE, lookAtD]
T_lookAtNED = c_[js['lookAtT'], js['lookAtT'], js['lookAtT']]
P_easingLookAt = c_[array(js['lookAtEasingD'])]
T_easingLookAt = c_[array(js['lookAtEasingT'])]
T_linspace_norm_lookAt, T_user_progress_lookAt, P_user_progress_lookAt, ref_llh_lookAt = trajectoryAPI.reparameterize_spline(P_lookAtNED, T_lookAtNED, P_easingLookAt, T_easingLookAt)
T_linspace_norm_cameraPose, T_user_progress_lookFrom, P_user_progress_lookFrom, ref_llh_lookFrom = trajectoryAPI.reparameterize_spline(P_lookFromNED, T_lookFromNED, P_easingLookFrom, T_easingLookFrom)
data = {
'lookAtReparameterizedT': T_user_progress_lookAt.tolist(),
'reparameterizedTime': T_linspace_norm_lookAt.tolist(),
'lookFromReparameterizedT': T_user_progress_lookFrom.tolist(),
}
return jsonify(data)
@server.route('/api/reparameterize_spline', methods = ['POST'])
def reparameterize_spline():
js = request.get_json()
cameraPose_lat_list = js['cameraPoseLats']
cameraPose_lng_list = js['cameraPoseLngs']
cameraPose_alt_list = js['cameraPoseAlts']
lookAt_lat_list = js['lookAtLats']
lookAt_lng_list = js['lookAtLngs']
lookAt_alt_list = js['lookAtAlts']
T_cameraPose = c_[js['cameraPoseTvals'], js['cameraPoseTvals'], js['cameraPoseTvals']]
T_lookAt = c_[js['lookAtTvals'], js['lookAtTvals'], js['lookAtTvals']]
lookAt_easing_tvals = array(js['lookAtEasingT'])
lookAt_easing_dlist = array(js['lookAtEasingD'])
cameraPose_easing_tvals = array(js['cameraPoseEasingT'])
cameraPose_easing_dlist = array(js['cameraPoseEasingD'])
P_easingCameraPose = c_[cameraPose_easing_dlist]
T_easingCameraPose = c_[cameraPose_easing_tvals]
P_easingLookAt = c_[lookAt_easing_dlist]
T_easingLookAt = c_[lookAt_easing_tvals]
P_cameraPose = c_[cameraPose_lat_list, cameraPose_lng_list, cameraPose_alt_list]
P_lookAt = c_[lookAt_lat_list, lookAt_lng_list, lookAt_alt_list]
T_linspace_norm_lookAt, T_user_progress_lookAt, P_user_progress_lookAt, ref_llh_lookAt = trajectoryAPI.reparameterize_spline(P_lookAt, T_lookAt, P_easingLookAt, T_easingLookAt)
T_linspace_norm_cameraPose, T_user_progress_lookFrom, P_user_progress_lookFrom, ref_llh_lookFrom = trajectoryAPI.reparameterize_spline(P_cameraPose, T_cameraPose, P_easingCameraPose, T_easingCameraPose)
data = {
'lookAtReparameterizedT': T_user_progress_lookAt.tolist(),
'reparameterizedTime': T_linspace_norm_lookAt.tolist(),
'lookFromReparameterizedT': T_user_progress_lookFrom.tolist(),
}
return jsonify(data)
@server.route('/api/export_spline_to_quad_representation_ned', methods = ['POST'])
def export_spline_to_quad_representation_ned():
#which one is getting fvalled? FIGURE OUT WHAT'S GOING ON HERE
shot = request.args.get('shot', 0)
if not shot:
return
js = request.get_json()
lookAtN = js['lookAtN']
lookAtE = js['lookAtE']
lookAtD = js['lookAtD']
lookFromN = js['lookFromN']
lookFromE = js['lookFromE']
lookFromD = js['lookFromD']
# Exported Values
P_lookFromNED_spline = c_[lookFromN, lookFromE, lookFromD]
T_lookFromNED_spline = c_[js['lookFromT'], js['lookFromT'], js['lookFromT']]
P_lookFromNED_ease = c_[array(js['lookFromEasingD'])]
T_lookFromNED_ease = c_[array(js['lookFromEasingT'])]
P_lookAtNED_spline = c_[lookAtN, lookAtE, lookAtD]
T_lookAtNED_spline = c_[js['lookAtT'], js['lookAtT'], js['lookAtT']]
P_lookAtNED_ease = c_[array(js['lookAtEasingD'])]
T_lookAtNED_ease = c_[array(js['lookAtEasingT'])]
startAltitude = js['startAltitude']
lastTime = js['lastTime'];
rev = js['rev'];
refLLH = array([js['refLLH']['lat'], js['refLLH']['lng'], js['refLLH']['altitude']])
P = np.array([
P_lookFromNED_spline,
T_lookFromNED_spline,
P_lookFromNED_ease,
T_lookFromNED_ease,
P_lookAtNED_spline,
T_lookAtNED_spline,
P_lookAtNED_ease,
T_lookAtNED_ease,
[lastTime],
[startAltitude],
[refLLH]
])
# First Save, for later analysis!!!
millis = int(round(time.time() * 1000))
np.savez(("shot-%s-rev%s-%d" % (shot, rev, millis)),
P_lookFromNED_spline=P_lookFromNED_spline,
T_lookFromNED_spline=T_lookFromNED_spline,
P_lookFromNED_ease=P_lookFromNED_ease,
T_lookFromNED_ease=T_lookFromNED_ease,
P_lookAtNED_spline=P_lookAtNED_spline,
T_lookAtNED_spline=T_lookAtNED_spline,
P_lookAtNED_ease=P_lookAtNED_ease,
T_lookAtNED_ease=T_lookAtNED_ease,
lastTime=[lastTime],
startAltitude=[startAltitude],
refLLH=[refLLH])
export_data = {
"command" : js['command'],
"P_lookFromNED_spline": P_lookFromNED_spline.tolist(),
"T_lookFromNED_spline": T_lookFromNED_spline.tolist(),
"P_lookFromNED_ease": P_lookFromNED_ease.tolist(),
"T_lookFromNED_ease": T_lookFromNED_ease.tolist(),
"P_lookAtNED_spline": P_lookAtNED_spline.tolist(),
"T_lookAtNED_spline": T_lookAtNED_spline.tolist(),
"P_lookAtNED_ease": P_lookAtNED_ease.tolist(),
"T_lookAtNED_ease": T_lookAtNED_ease.tolist(),
"lastTime": [lastTime],
"startAltitude": [startAltitude],
"refLLH": c_[refLLH].tolist()
}
req = urllib2.Request("http://localhost:9000", json.dumps(js), {'Content-Type': 'application/json'})
f = urllib2.urlopen(req)
res = f.read()
f.close()
return jsonify({'result':'ok'})
@server.route('/api/export_spline_to_quad_representation', methods = ['POST'])
def export_spline_to_quad_representation():
js = request.get_json()
cameraPose_lat_list = js['cameraPoseLats']
cameraPose_lng_list = js['cameraPoseLngs']
cameraPose_alt_list = js['cameraPoseAlts']
lookAt_lat_list = js['lookAtLats']
lookAt_lng_list = js['lookAtLngs']
lookAt_alt_list = js['lookAtAlts']
lookAt_easing_tvals = array(js['lookAtEasingT'])
lookAt_easing_dlist = array(js['lookAtEasingD'])
cameraPose_easing_tvals = array(js['cameraPoseEasingT'])
cameraPose_easing_dlist = array(js['cameraPoseEasingD'])
# Exported Values
P_lookFrom_spline = c_[cameraPose_lat_list, cameraPose_lng_list, cameraPose_alt_list]
T_lookFrom_spline = c_[js['cameraPoseTvals'], js['cameraPoseTvals'], js['cameraPoseTvals']]
P_lookFrom_ease = c_[cameraPose_easing_dlist]
T_lookFrom_ease = c_[cameraPose_easing_tvals]
P_lookAt_spline = c_[lookAt_lat_list, lookAt_lng_list, lookAt_alt_list]
T_lookAt_spline = c_[js['lookAtTvals'], js['lookAtTvals'], js['lookAtTvals']]
P_lookAt_ease = c_[lookAt_easing_dlist]
T_lookAt_ease = c_[lookAt_easing_tvals]
lastTime = js['lastTime'];
millis = int(round(time.time() * 1000))
np.savez(("shot-%d" % millis),
P_lookFrom_spline=P_lookFrom_spline,
T_lookFrom_spline=T_lookFrom_spline,
P_lookFrom_ease=P_lookFrom_ease,
T_lookFrom_ease=T_lookFrom_ease,
P_lookAt_spline=P_lookAt_spline,
T_lookAt_spline=T_lookAt_spline,
P_lookAt_ease=P_lookAt_ease,
T_lookAt_ease=T_lookAt_ease,
lastTime=[lastTime])
P = np.array([
P_lookFrom_spline,
T_lookFrom_spline,
P_lookFrom_ease,
T_lookFrom_ease,
P_lookAt_spline,
T_lookAt_spline,
P_lookAt_ease,
T_lookAt_ease,
[lastTime]
])
export_data = {
"command" : js['command'],
"P_lookFrom_spline" :P_lookFrom_spline,
"T_lookFrom_spline" :T_lookFrom_spline,
"P_lookFrom_ease" :P_lookFrom_ease,
"T_lookFrom_ease" :T_lookFrom_ease,
"P_lookAt_spline" :P_lookAt_spline,
"T_lookAt_spline" :T_lookAt_spline,
"P_lookAt_ease" :P_lookAt_ease,
"T_lookAt_ease" :T_lookAt_ease,
"lastTime" :[lastTime]}
print export_data
headers = {'content-type': 'application/json'}
r = requests.post("http://localhost:9000", data = jsonify(export_data), headers = headers);
return jsonify({'result':'ok'})
@server.route('/api/calculate_feasibility_ned', methods = ['POST'])
def calculate_feasibility_ned():
js = request.get_json()
lookAtN = js['lookAtN']
lookAtE = js['lookAtE']
lookAtD = js['lookAtD']
lookFromN = js['lookFromN']
lookFromE = js['lookFromE']
lookFromD = js['lookFromD']
# Exported Values
P_lookFromNED_spline = c_[lookFromN, lookFromE, lookFromD]
T_lookFromNED_spline = c_[js['lookFromT'], js['lookFromT'], js['lookFromT']]
P_lookFromNED_ease = c_[array(js['lookFromEasingD'])]
T_lookFromNED_ease = c_[array(js['lookFromEasingT'])]
P_lookAtNED_spline = c_[lookAtN, lookAtE, lookAtD]
T_lookAtNED_spline = c_[js['lookAtT'], js['lookAtT'], js['lookAtT']]
P_lookAtNED_ease = c_[array(js['lookAtEasingD'])]
T_lookAtNED_ease = c_[array(js['lookAtEasingT'])]
refLLH = js['refLLH']
total_time = js['totalShotTime']
# make a call to the trajectoryAPI
u_nominal, p_body_nominal, p_body_dot_nominal, p_body_dot_dot_nominal, theta_body_nominal, phi_body_nominal, theta_cam_nominal, theta_cam_dot_nominal, psi_cam_nominal, phi_cam_nominal, phi_cam_dot_nominal = trajectoryAPI.calculate_feasibility_ned(P_lookFromNED_spline, T_lookFromNED_spline, P_lookAtNED_spline, T_lookAtNED_spline, P_lookFromNED_ease, T_lookFromNED_ease, P_lookAtNED_ease, T_lookAtNED_ease, total_time, refLLH);
data = {
'u_nominal': u_nominal.tolist(),
'p_body_nominal': p_body_nominal.tolist(),
'p_body_dot_nominal': p_body_dot_nominal.tolist(),
'p_body_dot_dot_nominal': p_body_dot_dot_nominal.tolist(),
'theta_body_nominal': theta_body_nominal.tolist(),
'phi_body_nominal': phi_body_nominal.tolist(),
'theta_cam_nominal': theta_cam_nominal.tolist(),
'theta_cam_dot_nominal': theta_cam_dot_nominal.tolist(),
'psi_cam_nominal': psi_cam_nominal.tolist(),
'phi_cam_nominal': phi_cam_nominal.tolist(),
'phi_cam_dot_nominal': phi_cam_dot_nominal.tolist(),
}
return jsonify(data)
@server.route('/api/calculate_feasibility', methods = ['POST'])
def calculate_feasibility():
js = request.get_json()
cameraPose_lat_list = js['cameraPoseLats']
cameraPose_lng_list = js['cameraPoseLngs']
cameraPose_alt_list = js['cameraPoseAlts']
lookAt_lat_list = js['lookAtLats']
lookAt_lng_list = js['lookAtLngs']
lookAt_alt_list = js['lookAtAlts']
T_cameraPose = c_[js['cameraPoseTvals'], js['cameraPoseTvals'], js['cameraPoseTvals']]
T_lookAt = c_[js['lookAtTvals'], js['lookAtTvals'], js['lookAtTvals']]
lookAt_easing_tvals = array(js['lookAtEasingT'])
lookAt_easing_dlist = array(js['lookAtEasingD'])
cameraPose_easing_tvals = array(js['cameraPoseEasingT'])
cameraPose_easing_dlist = array(js['cameraPoseEasingD'])
P_easingCameraPose = c_[cameraPose_easing_dlist]
T_easingCameraPose = c_[cameraPose_easing_tvals]
P_easingLookAt = c_[lookAt_easing_dlist]
T_easingLookAt = c_[lookAt_easing_tvals]
P_cameraPose = c_[cameraPose_lat_list, cameraPose_lng_list, cameraPose_alt_list]
P_lookAt = c_[lookAt_lat_list, lookAt_lng_list, lookAt_alt_list]
total_time = js['totalShotTime']
# make a call to the trajectoryAPI
u_nominal, p_body_nominal, p_body_dot_nominal, p_body_dot_dot_nominal, theta_body_nominal, phi_body_nominal, theta_cam_nominal, theta_cam_dot_nominal, psi_cam_nominal, phi_cam_nominal, phi_cam_dot_nominal = trajectoryAPI.calculate_feasibility(P_cameraPose, T_cameraPose, P_lookAt, T_lookAt, P_easingCameraPose, T_easingCameraPose, P_easingLookAt, T_easingLookAt, total_time)
data = {
'u_nominal': u_nominal.tolist(),
'p_body_nominal': p_body_nominal.tolist(),
'p_body_dot_nominal': p_body_dot_nominal.tolist(),
'p_body_dot_dot_nominal': p_body_dot_dot_nominal.tolist(),
'theta_body_nominal': theta_body_nominal.tolist(),
'phi_body_nominal': phi_body_nominal.tolist(),
'theta_cam_nominal': theta_cam_nominal.tolist(),
'theta_cam_dot_nominal': theta_cam_dot_nominal.tolist(),
'psi_cam_nominal': psi_cam_nominal.tolist(),
'phi_cam_nominal': phi_cam_nominal.tolist(),
'phi_cam_dot_nominal': phi_cam_dot_nominal.tolist(),
}
return jsonify(data)
@server.route('/api/get_fov.kml', methods = ['GET'])
def get_fov():
GoProView = request.args.get('GoProView')
GoProFOV = {'NARROW':64.4, 'MEDIUM':94.4, 'WIDE':118.2}
if GoProView not in GoProFOV:
GoProView = 'WIDE'
fov = GoProFOV[GoProView]
lat = request.args.get('lat') or 37.42726975867168
lng = request.args.get('lng') or -122.16676019825722
altitude = request.args.get('altitude') or 125
heading = request.args.get('heading') or -31.127314342134174
tilt = request.args.get('tilt') or 51.24538395621526
view = {'lng':lng, 'lat':lat, 'altitude':altitude, 'heading': heading, 'tilt': tilt, 'fov':fov}
return render_template('fov.kml', view=view)
@server.route('/api/set_vehicle_location', methods = ['GET'])
def set_vehicle_location():
global starting_lat
global starting_lng
global vehicle_millis
global current_lat
global current_lng
global mode
global armed
vehicle_millis = int(round(time.time() * 1000))
armed = (request.args.get('armed') == 'True')
mode = request.args.get('mode')
if armed:
current_lat = request.args.get('lat', 0)
current_lng = request.args.get('lng', 0)
else:
starting_lat = request.args.get('lat', 0)
starting_lng = request.args.get('lng', 0)
return "OK"
@server.route('/api/get_vehicle_pos', methods= ['GET'])
def get_vehicle_pos():
global vehicle_millis
global starting_lat
global starting_lng
global vehicle_millis
global current_lat
global current_lng
global mode
global armed
current_millis = int(round(time.time() * 1000))
success = "success"
if current_millis - vehicle_millis > TIMEOUT_MILLIS:
mode = "NOT CONNECTED"
armed = False
starting_lat = starting_lng = 0
success = 'no data'
data = {'status':success, 'starting_lat':starting_lat, 'starting_lng':starting_lng, 'current_lat':current_lat, 'current_lng':current_lng, 'mode':mode}
return jsonify(data)
@server.route('/api/set_elapsed_time', methods = ['GET'])
def set_elapsed_time():
global real_elapsed_time
real_elapsed_time = request.args.get('elapsed', -1)
return "OK"
@server.route('/api/get_elapsed_time', methods= ['GET'])
def get_elapsed_time():
data = {'status':'no data'}
if real_elapsed_time != -1:
data = {'status':'success', 'elapsed':real_elapsed_time}
return jsonify(data)
| bsd-3-clause | 2,205,061,663,268,043,300 | 33.07772 | 431 | 0.67959 | false |
DOV-Vlaanderen/pydov | tests/test_types_grondwatervergunning.py | 1 | 1531 | """Module grouping tests for the pydov.types.boring module."""
from pydov.types.grondwatervergunning import GrondwaterVergunning
from tests.abstract import AbstractTestTypes
location_wfs_getfeature = \
'tests/data/types/grondwatervergunning/wfsgetfeature.xml'
location_wfs_feature = 'tests/data/types/grondwatervergunning/feature.xml'
location_dov_xml = None
class TestGrondwaterVergunning(AbstractTestTypes):
"""Class grouping tests for the
pydov.types.grondwatervergunning.GrondwaterVergunning class."""
datatype_class = GrondwaterVergunning
namespace = 'http://dov.vlaanderen.be/grondwater/gw_vergunningen'
pkey_base = None
field_names = [
'id_vergunning', 'pkey_installatie', 'x', 'y',
'diepte', 'exploitant_naam', 'watnr', 'vlaremrubriek',
'vergund_jaardebiet', 'vergund_dagdebiet',
'van_datum_termijn', 'tot_datum_termijn',
'aquifer_vergunning', 'inrichtingsklasse', 'nacebelcode',
'actie_waakgebied', 'cbbnr', 'kbonr']
field_names_subtypes = None
field_names_nosubtypes = [
'id_vergunning', 'pkey_installatie', 'x', 'y',
'diepte', 'exploitant_naam', 'watnr', 'vlaremrubriek',
'vergund_jaardebiet', 'vergund_dagdebiet',
'van_datum_termijn', 'tot_datum_termijn',
'aquifer_vergunning', 'inrichtingsklasse', 'nacebelcode',
'actie_waakgebied', 'cbbnr', 'kbonr']
valid_returnfields = ('id_vergunning', 'diepte')
valid_returnfields_subtype = None
inexistent_field = 'onbestaand'
| mit | -3,558,128,204,813,029,400 | 38.25641 | 74 | 0.692358 | false |
fake-name/ReadableWebProxy | amqpstorm/channel0.py | 1 | 6351 | """AMQPStorm Connection.Channel0."""
import logging
import platform
from pamqp import specification
from pamqp.heartbeat import Heartbeat
from amqpstorm import __version__
from amqpstorm.base import AUTH_MECHANISM
from amqpstorm.base import FRAME_MAX
from amqpstorm.base import LOCALE
from amqpstorm.base import MAX_CHANNELS
from amqpstorm.base import Stateful
from amqpstorm.compatibility import try_utf8_decode
from amqpstorm.exception import AMQPConnectionError
LOGGER = logging.getLogger(__name__)
class Channel0(object):
"""Internal Channel0 handler."""
def __init__(self, connection):
super(Channel0, self).__init__()
self.is_blocked = False
self.server_properties = {}
self._connection = connection
self._heartbeat = connection.parameters['heartbeat']
self._parameters = connection.parameters
def on_frame(self, frame_in):
"""Handle frames sent to Channel0.
:param frame_in: Amqp frame.
:return:
"""
LOGGER.debug('Frame Received: %s', frame_in.name)
if frame_in.name == 'Heartbeat':
return
elif frame_in.name == 'Connection.Close':
self._close_connection(frame_in)
elif frame_in.name == 'Connection.CloseOk':
self._close_connection_ok()
elif frame_in.name == 'Connection.Blocked':
self._blocked_connection(frame_in)
elif frame_in.name == 'Connection.Unblocked':
self._unblocked_connection()
elif frame_in.name == 'Connection.OpenOk':
self._set_connection_state(Stateful.OPEN)
elif frame_in.name == 'Connection.Start':
self.server_properties = frame_in.server_properties
self._send_start_ok(frame_in)
elif frame_in.name == 'Connection.Tune':
self._send_tune_ok()
self._send_open_connection()
else:
LOGGER.error('[Channel0] Unhandled Frame: %s', frame_in.name)
def send_close_connection(self):
"""Send Connection Close frame.
:return:
"""
self._write_frame(specification.Connection.Close())
def send_heartbeat(self):
"""Send Heartbeat frame.
:return:
"""
if not self._connection.is_open:
return
self._write_frame(Heartbeat())
def _close_connection(self, frame_in):
"""Connection Close.
:param specification.Connection.Close frame_in: Amqp frame.
:return:
"""
self._set_connection_state(Stateful.CLOSED)
if frame_in.reply_code != 200:
reply_text = try_utf8_decode(frame_in.reply_text)
message = (
'Connection was closed by remote server: %s' % reply_text
)
exception = AMQPConnectionError(message,
reply_code=frame_in.reply_code)
self._connection.exceptions.append(exception)
def _close_connection_ok(self):
"""Connection CloseOk frame received.
:return:
"""
self._set_connection_state(Stateful.CLOSED)
def _blocked_connection(self, frame_in):
"""Connection is Blocked.
:param frame_in:
:return:
"""
self.is_blocked = True
LOGGER.warning(
'Connection is blocked by remote server: %s',
try_utf8_decode(frame_in.reason)
)
def _unblocked_connection(self):
"""Connection is Unblocked.
:return:
"""
self.is_blocked = False
LOGGER.info('Connection is no longer blocked by remote server')
def _plain_credentials(self):
"""AMQP Plain Credentials.
:rtype: str
"""
return '\0%s\0%s' % (self._parameters['username'],
self._parameters['password'])
def _send_start_ok(self, frame_in):
"""Send Start OK frame.
:param specification.Connection.Start frame_in: Amqp frame.
:return:
"""
if 'PLAIN' not in try_utf8_decode(frame_in.mechanisms):
exception = AMQPConnectionError(
'Unsupported Security Mechanism(s): %s' %
frame_in.mechanisms
)
self._connection.exceptions.append(exception)
return
credentials = self._plain_credentials()
start_ok_frame = specification.Connection.StartOk(
mechanism=AUTH_MECHANISM,
client_properties=self._client_properties(),
response=credentials,
locale=LOCALE
)
self._write_frame(start_ok_frame)
def _send_tune_ok(self):
"""Send Tune OK frame.
:return:
"""
tune_ok_frame = specification.Connection.TuneOk(
channel_max=MAX_CHANNELS,
frame_max=FRAME_MAX,
heartbeat=self._heartbeat)
self._write_frame(tune_ok_frame)
def _send_open_connection(self):
"""Send Open Connection frame.
:return:
"""
open_frame = specification.Connection.Open(
virtual_host=self._parameters['virtual_host']
)
self._write_frame(open_frame)
def _set_connection_state(self, state):
"""Set Connection state.
:param state:
:return:
"""
self._connection.set_state(state)
def _write_frame(self, frame_out):
"""Write a pamqp frame from Channel0.
:param frame_out: Amqp frame.
:return:
"""
self._connection.write_frame(0, frame_out)
LOGGER.debug('Frame Sent: %s', frame_out.name)
@staticmethod
def _client_properties():
"""AMQPStorm Client Properties.
:rtype: dict
"""
return {
'product': 'AMQPStorm',
'platform': 'Python %s (%s)' % (platform.python_version(),
platform.python_implementation()),
'capabilities': {
'basic.nack': True,
'connection.blocked': True,
'publisher_confirms': True,
'consumer_cancel_notify': True,
'authentication_failure_close': True,
},
'information': 'See https://github.com/eandersson/amqpstorm',
'version': __version__
}
| bsd-3-clause | -8,175,721,914,685,432,000 | 29.830097 | 78 | 0.571406 | false |
benjolitz/trollius-redis | trollius_redis/encoders.py | 1 | 2145 | """
The redis protocol only knows about bytes, but we like to have strings inside
Python. This file contains some helper classes for decoding the bytes to
strings and encoding the other way around. We also have a `BytesEncoder`, which
provides raw access to the redis server.
"""
__all__ = (
'BaseEncoder',
'BytesEncoder',
'UTF8Encoder',
)
import six
class BaseEncoder(object):
"""
Abstract base class for all encoders.
"""
#: The native Python type from which we encode, or to which we decode.
native_type = None
def encode_from_native(self, data):
"""
Encodes the native Python type to network bytes.
Usually this will encode a string object to bytes using the UTF-8
encoding. You can either override this function, or set the
`encoding` attribute.
"""
raise NotImplementedError
def decode_to_native(self, data):
"""
Decodes network bytes to a Python native type.
It should always be the reverse operation of `encode_from_native`.
"""
raise NotImplementedError
class BytesEncoder(BaseEncoder):
"""
For raw access to the Redis database.
"""
#: The native Python type from which we encode, or to which we decode.
native_type = six.binary_type
def encode_from_native(self, data):
return data
def decode_to_native(self, data):
return data
class StringEncoder(BaseEncoder):
"""
Abstract base class for all string encoding encoders.
"""
#: Redis keeps all values in binary. Set the encoding to be used to
#: decode/encode Python string values from and to binary.
encoding = None
#: The native Python type from which we encode, or to which we decode.
native_type = six.text_type
def encode_from_native(self, data):
""" string to bytes """
return data.encode(self.encoding)
def decode_to_native(self, data):
""" bytes to string """
return data.decode(self.encoding)
class UTF8Encoder(StringEncoder):
"""
Encode strings to and from utf-8 bytes.
"""
encoding = 'utf-8'
| bsd-2-clause | -7,322,729,814,872,177,000 | 26.5 | 79 | 0.653613 | false |
kHarshit/DAT210x_Microsoft | Module2/assignment3.py | 1 | 1178 | import pandas as pd
# TODO: Load up the dataset Ensuring you set the appropriate header column names
df = pd.read_csv('Datasets/servo.data')
df.columns = ['motor', 'screw', 'pgain', 'vgain', 'class']
print(df.describe())
# TODO: Create a slice that contains all entries having a vgain equal to 5. Then print the length of(# of samples in) that slice:
k = df[df.iloc[:, 3] == 5]
print(k.describe())
print(len(k))
# TODO: Create a slice that contains all entries having a motor equal to E and screw equal
# to E. Then print the length of (# of samples in) that slice:
print(df[(df.iloc[:, 0] == 'E') & (df.iloc[:, 1] == 'E')])
l = df[(df['motor'] == 'E') & (df['screw'] == 'E')]
print(l.describe())
print(len(l)) # the answer should be 6; checkout read_csv() api documentation that will fix your issue!
# TODO: Create a slice that contains all entries having a pgain equal to 4. Use one of the various methods of finding
# the mean vgain value for the samples in that slice. Once you've found it, print it:
m = df[df.pgain == 4]
print(m.mean())
print(m.vgain.mean())
# TODO: (Bonus) See what happens when you run the .dtypes method on your dataframe!
print(df.dtypes)
| mit | 6,821,107,749,193,167,000 | 34.69697 | 129 | 0.684211 | false |
upptalk/uppsell | uppsell/migrations/0002_auto__add_unique_store_code__add_field_listing_price__chg_field_listin.py | 1 | 19453 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'Store', fields ['code']
db.create_unique('stores', ['code'])
# Adding field 'Listing.price'
db.add_column('listings', 'price',
self.gf('django.db.models.fields.DecimalField')(default=0.0, max_digits=8, decimal_places=2),
keep_default=False)
# Changing field 'Listing.subtitle'
db.alter_column('listings', 'subtitle', self.gf('django.db.models.fields.CharField')(max_length=200, null=True))
# Changing field 'Listing.description'
db.alter_column('listings', 'description', self.gf('django.db.models.fields.CharField')(max_length=10000, null=True))
# Changing field 'Listing.title'
db.alter_column('listings', 'title', self.gf('django.db.models.fields.CharField')(max_length=200, null=True))
# Changing field 'Listing.name'
db.alter_column('listings', 'name', self.gf('django.db.models.fields.CharField')(max_length=200, null=True))
# Adding field 'Product.provisioning_codes'
db.add_column('products', 'provisioning_codes',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Removing unique constraint on 'Store', fields ['code']
db.delete_unique('stores', ['code'])
# Deleting field 'Listing.price'
db.delete_column('listings', 'price')
# Changing field 'Listing.subtitle'
db.alter_column('listings', 'subtitle', self.gf('django.db.models.fields.CharField')(default='', max_length=200))
# Changing field 'Listing.description'
db.alter_column('listings', 'description', self.gf('django.db.models.fields.CharField')(default='', max_length=10000))
# Changing field 'Listing.title'
db.alter_column('listings', 'title', self.gf('django.db.models.fields.CharField')(default='', max_length=200))
# Changing field 'Listing.name'
db.alter_column('listings', 'name', self.gf('django.db.models.fields.CharField')(default='', max_length=200))
# Deleting field 'Product.provisioning_codes'
db.delete_column('products', 'provisioning_codes')
models = {
u'uppsell.address': {
'Meta': {'object_name': 'Address', 'db_table': "'addresses'"},
'city': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'country_code': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_used': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'other': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'province': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'uppsell.cart': {
'Meta': {'object_name': 'Cart', 'db_table': "'carts'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.cartitem': {
'Meta': {'object_name': 'CartItem', 'db_table': "'cart_items'"},
'cart': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Cart']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'uppsell.coupon': {
'Meta': {'object_name': 'Coupon', 'db_table': "'coupons'"},
'code': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']", 'null': 'True', 'blank': 'True'}),
'discount_amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'discount_pct': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_uses': ('django.db.models.fields.PositiveIntegerField', [], {}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']", 'null': 'True', 'blank': 'True'}),
'product_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.ProductGroup']", 'null': 'True', 'blank': 'True'}),
'relation': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'remaining': ('django.db.models.fields.PositiveIntegerField', [], {}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'valid_until': ('django.db.models.fields.DateTimeField', [], {})
},
u'uppsell.couponspend': {
'Meta': {'unique_together': "(('customer', 'coupon'),)", 'object_name': 'CouponSpend', 'db_table': "'coupon_spends'"},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Coupon']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'uppsell.customer': {
'Meta': {'object_name': 'Customer', 'db_table': "'customers'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_logged_in_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'uppsell.invoice': {
'Meta': {'object_name': 'Invoice', 'db_table': "'invoices'"},
'billing_address': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
'order_shipping_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'order_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'payment_made_ts': ('django.db.models.fields.DateTimeField', [], {}),
'product_id': ('django.db.models.fields.IntegerField', [], {}),
'psp_id': ('django.db.models.fields.IntegerField', [], {}),
'psp_response_code': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'psp_response_text': ('django.db.models.fields.CharField', [], {'max_length': '10000'}),
'psp_type': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shipping_address': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'store_id': ('django.db.models.fields.IntegerField', [], {}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_email': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_fullname': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'user_jid': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_mobile_msisdn': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'uppsell.linkedaccount': {
'Meta': {'object_name': 'LinkedAccount', 'db_table': "'linked_accounts'"},
'account_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'linked_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.LinkedAccountType']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.linkedaccounttype': {
'Meta': {'object_name': 'LinkedAccountType', 'db_table': "'linked_account_types'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
u'uppsell.listing': {
'Meta': {'object_name': 'Listing', 'db_table': "'listings'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '10000', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '8', 'decimal_places': '2'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Product']"}),
'sales_tax_rate': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'uppsell.order': {
'Meta': {'object_name': 'Order', 'db_table': "'orders'"},
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'billing_address'", 'null': 'True', 'to': u"orm['uppsell.Address']"}),
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Coupon']", 'null': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
'fraud_state': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_shipping_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'order_state': ('django.db.models.fields.CharField', [], {'default': "'init'", 'max_length': '30'}),
'order_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'payment_made_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'payment_state': ('django.db.models.fields.CharField', [], {'default': "'init'", 'max_length': '30'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'shipping_address'", 'null': 'True', 'to': u"orm['uppsell.Address']"}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.orderevent': {
'Meta': {'object_name': 'OrderEvent', 'db_table': "'order_events'"},
'action_type': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Order']"}),
'state_after': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'state_before': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'uppsell.orderitem': {
'Meta': {'object_name': 'OrderItem', 'db_table': "'order_items'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Order']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'uppsell.product': {
'Meta': {'object_name': 'Product', 'db_table': "'products'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '10000'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.ProductGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'provisioning_codes': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'sku': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'stock_units': ('django.db.models.fields.FloatField', [], {}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.productcode': {
'Meta': {'object_name': 'ProductCode', 'db_table': "'product_codes'"},
'code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.ProductGroup']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'uppsell.productgroup': {
'Meta': {'object_name': 'ProductGroup', 'db_table': "'product_groups'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'uppsell.store': {
'Meta': {'object_name': 'Store', 'db_table': "'stores'"},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default_currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'default_lang': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'sales_tax_rate': ('django.db.models.fields.FloatField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['uppsell'] | mit | 2,520,933,076,126,869,500 | 74.403101 | 189 | 0.55575 | false |
alexanderfefelov/nav | python/nav/web/status/sections.py | 1 | 29405 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2009, 2012 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
# You should have received a copy of the GNU General Public License along with
# NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Status sections.
Used to build up different sections for display.
"""
from datetime import datetime
from django.db.models import Q
from django.core.urlresolvers import reverse
from nav.metrics.templates import metric_prefix_for_device
from nav.models.profiles import StatusPreference, StatusPreferenceCategory
from nav.models.profiles import StatusPreferenceOrganization
from nav.models.event import AlertHistory, AlertHistoryVariable
from nav.models.manage import Netbox, Category, Organization
from nav.models.thresholds import ThresholdRule
from nav.web import servicecheckers
from nav.web.status.forms import SectionForm, NetboxForm
from nav.web.status.forms import NetboxMaintenanceForm, ServiceForm
from nav.web.status.forms import ServiceMaintenanceForm, ModuleForm
from nav.web.status.forms import ThresholdForm, LinkStateForm, SNMPAgentForm
MAINTENANCE_STATE = 'maintenanceState'
BOX_STATE = 'boxState'
SERVICE_STATE = 'serviceState'
MODULE_STATE = 'moduleState'
THRESHOLD_STATE = 'thresholdState'
LINK_STATE = 'linkState'
SNMP_STATE = 'snmpAgentState'
PSU_STATE = 'psuState'
def get_section_model(section_type):
"""Dispatch table"""
dtable = {
StatusPreference.SECTION_NETBOX: NetboxSection,
StatusPreference.SECTION_NETBOX_MAINTENANCE: NetboxMaintenanceSection,
StatusPreference.SECTION_MODULE: ModuleSection,
StatusPreference.SECTION_SERVICE: ServiceSection,
StatusPreference.SECTION_SERVICE_MAINTENANCE: ServiceMaintenanceSection,
StatusPreference.SECTION_THRESHOLD: ThresholdSection,
StatusPreference.SECTION_LINKSTATE: LinkStateSection,
StatusPreference.SECTION_SNMPAGENT: SNMPAgentSection,
StatusPreference.SECTION_PSU: PSUSection,
}
return dtable[section_type]
def get_user_sections(account):
'''Fetches all status sections for account in one swoop.
'''
sections = []
preferences = StatusPreference.objects.filter(
account=account
).order_by('position')
# Pre-fetching all categories and organisations
all_cats = Category.objects.values_list('pk', flat=True)
all_orgs = Organization.objects.values_list('pk', flat=True)
categories = {}
organizations = {}
cats = StatusPreferenceCategory.objects.filter(
statuspreference__in=preferences
)
orgs = StatusPreferenceOrganization.objects.filter(
statuspreference__in=preferences
)
# Buld dicts with statuspreference_id as keys.
for cat in cats:
if not cat.statuspreference_id in categories:
categories[cat.statuspreference_id] = []
categories[cat.statuspreference_id].append(cat.category_id)
for org in orgs:
if not org.statuspreference_id in organizations:
organizations[org.statuspreference_id] = []
organizations[org.statuspreference_id].append(org.organization_id)
# Add pre fetched categories and organisations to section preferences.
# Adds all categories and organisations if nothing is found in database.
for pref in preferences:
if pref.id in categories:
pref.fetched_categories = categories[pref.id]
pref.all_categories = False
else:
pref.fetched_categories = all_cats
pref.all_categories = True
if pref.id in organizations:
pref.fetched_organizations = organizations[pref.id]
pref.all_organizations = False
else:
pref.fetched_organizations = all_orgs
pref.all_organizations = True
for pref in preferences:
section_model = get_section_model(pref.type)
section = section_model(prefs=pref)
section.fetch_history()
sections.append(section)
return sections
class _Section(object):
'''Base class for sections.
Attributes:
columns - tuples of the wanted columns. First part gives the displayed
name of the column, while the second defines the field that
are looked up in the database.
history - the query used to look up the history
type_title - readable type name of this section
devicehistory_type - used in links to devicehistory
'''
columns = []
history = []
type_title = ''
devicehistory_type = ''
def __init__(self, prefs=None):
self.prefs = prefs
self.categories = self.prefs.fetched_categories
self.organizations = self.prefs.fetched_organizations
self.states = self.prefs.states.split(',')
for key, title in StatusPreference.SECTION_CHOICES:
if self.prefs.type == key:
self.type_title = title
break
def fetch_history(self):
"""Empty method,- should get overridden in
sub-classes"""
self.history = []
def devicehistory_url(self):
"""Make history urls for this device"""
url = reverse('devicehistory-view')
url += "?eventtype=%s" % self.devicehistory_type
url += "&group_by=datetime"
if not self.prefs.all_organizations:
for org in self.organizations:
url += "&org=%s" % org
if not self.prefs.all_categories:
for cat in self.categories:
url += "&cat=%s" % cat
# If custom orgs and cats, use AND search
if not self.prefs.all_categories and not self.prefs.all_organizations:
url += "&mode=and"
return url
@staticmethod
def form_class():
"""Return the chosen form"""
return SectionForm
@staticmethod
def form_data(status_prefs):
"""Insert data in the form for the view"""
data = {
'id': status_prefs.id,
'name': status_prefs.name,
'type': status_prefs.type,
'organizations': list(status_prefs.organizations.values_list(
'id', flat=True)) or [''],
}
data['categories'] = list(status_prefs.categories.values_list(
'id', flat=True)) or ['']
data['states'] = status_prefs.states.split(",")
return data
@classmethod
def form(cls, status_prefs):
"""Get the appropriate form"""
form_model = cls.form_class()
data = cls.form_data(status_prefs)
return form_model(data)
class NetboxSection(_Section):
columns = [
'Sysname',
'IP',
'Начало',
'Продолжительность',
'История',
'',
]
devicehistory_type = 'a_boxDown'
@staticmethod
def form_class():
return NetboxForm
def fetch_history(self):
maintenance = self._maintenance()
alert_types = self._alerttype()
netbox_history = AlertHistory.objects.select_related(
'netbox'
).filter(
~Q(netbox__in=maintenance),
Q(netbox__up='n') | Q(netbox__up='s'),
alert_type__name__in=alert_types,
end_time__gte=datetime.max,
netbox__category__in=self.categories,
netbox__organization__in=self.organizations,
).extra(
select={'downtime': "date_trunc('second', NOW() - start_time)"}
).order_by('-start_time', 'end_time')
history = []
for h in netbox_history:
row = {'netboxid': h.netbox.id,
'tabrow': (
(
h.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[h.netbox.sysname])
),
(h.netbox.ip, None),
(h.start_time, None),
(h.downtime, None),
(
'history',
reverse('devicehistory-view') +
'?netbox=%(id)s&eventtype=a_boxDown&group_by=datetime' % {
'id': h.netbox.id,
}
),
),
}
history.append(row)
self.history = history
def _maintenance(self):
return AlertHistory.objects.filter(
event_type=MAINTENANCE_STATE,
end_time__gte=datetime.max,
netbox__isnull=False,
).values('netbox').query
def _alerttype(self):
states = []
if 'y' in self.states:
states.append('boxUp')
if 'n' in self.states:
states.append('boxDown')
if 's' in self.states:
states.append('boxShadow')
return states
class NetboxMaintenanceSection(_Section):
columns = [
'Sysname',
'IP',
'Начало',
'Продолжительность',
'',
]
devicehistory_type = 'e_maintenanceState'
@staticmethod
def form_class():
return NetboxMaintenanceForm
def fetch_history(self):
maintenance = self._maintenance()
boxes_down = self._boxes_down()
history = []
for m in maintenance:
# Find out if the box is down as well as on maintenance
down = boxes_down.get(m.alert_history.netbox.id, None)
if m.alert_history.netbox.up == 'y':
down_since = 'Up'
downtime = ''
else:
if down:
down_since = down['start_time']
downtime = down['downtime']
else:
down_since = 'N/A'
downtime = 'N/A'
row = {'netboxid': m.alert_history.netbox.id,
'tabrow': (
(
m.alert_history.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[m.alert_history.netbox.sysname])
),
(m.alert_history.netbox.ip, None),
(down_since, None),
(downtime, None),
(
'history',
reverse('devicehistory-view') +
('?netbox=%(id)s&eventtype=e_maintenanceState'
'&group_by=datetime' %
{'id': m.alert_history.netbox.id})
),
),
}
history.append(row)
self.history = history
def _maintenance(self):
return AlertHistoryVariable.objects.select_related(
'alert_history', 'alert_history__netbox'
).filter(
alert_history__netbox__category__in=self.categories,
alert_history__netbox__organization__in=self.organizations,
alert_history__netbox__up__in=self.states,
alert_history__end_time__gte=datetime.max,
alert_history__event_type=MAINTENANCE_STATE,
variable='maint_taskid',
).order_by('-alert_history__start_time')
def _boxes_down(self):
history = AlertHistory.objects.select_related(
'netbox'
).filter(
end_time__gte=datetime.max,
event_type=BOX_STATE,
).extra(
select={'downtime': "date_trunc('second', NOW() - start_time)"}
).order_by('-start_time').values(
'netbox', 'start_time', 'downtime'
)
ret = {}
for h in history:
ret[h['netbox']] = h
return ret
class ServiceSection(_Section):
columns = [
'Sysname',
'Handler',
'Начало',
'Продолжительность',
'',
]
devicehistory_type = 'e_serviceState'
@staticmethod
def form_class():
return ServiceForm
@staticmethod
def form_data(status_prefs):
data = {
'id': status_prefs.id,
'name': status_prefs.name,
'type': status_prefs.type,
'organizations': list(status_prefs.organizations.values_list(
'id', flat=True)) or [''],
}
data['services'] = status_prefs.services.split(",") or ['']
data['states'] = status_prefs.states.split(",")
return data
def __init__(self, prefs=None):
super(ServiceSection, self).__init__(prefs=prefs)
if self.prefs.services:
self.services = self.prefs.services.split(',')
else:
self.services = [s for s in servicecheckers.get_checkers()]
def fetch_history(self):
maintenance = AlertHistory.objects.filter(
end_time__gte=datetime.max,
event_type=MAINTENANCE_STATE,
).values('netbox').query
services = AlertHistory.objects.select_related(
'netbox'
).filter(
~Q(netbox__in=maintenance),
end_time__gte=datetime.max,
event_type=SERVICE_STATE,
netbox__organization__in=self.organizations,
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
'handler': 'service.handler',
},
tables=['service'],
where=[
'alerthist.subid = service.serviceid::text',
'service.handler IN %s',
],
params=[tuple(self.services)]
)
history = []
for s in services:
row = {'netboxid': s.netbox.id,
'tabrow': (
(
s.netbox.sysname,
reverse('ipdevinfo-details-by-name', args=[
s.netbox.sysname
])
),
(
s.handler,
reverse('ipdevinfo-service-list-handler', args=[
s.handler
])
),
(s.start_time, None),
(s.downtime, None),
(
'history',
reverse('devicehistory-view') +
('?netbox=%(id)s&eventtype=e_serviceState'
'&group_by=datetime' %
{'id': s.netbox.id})
)
),
}
history.append(row)
self.history = history
def devicehistory_url(self):
url = reverse('devicehistory-view')
url += "?eventtype=%s" % self.devicehistory_type
url += "&group_by=datetime"
if not self.prefs.all_organizations:
# FIXME filter service
# Service is joined in on the alerthist.subid field, which is not a
# part of this query. Yay
netboxes = Netbox.objects.filter(
organization__in=self.organizations,
).values('id')
for n in netboxes:
url += "&netbox=%s" % n['id']
return url
class ServiceMaintenanceSection(ServiceSection):
devicehistory_type = 'e_maintenanceState'
@staticmethod
def form_class():
return ServiceMaintenanceForm
def fetch_history(self):
maintenance = AlertHistoryVariable.objects.select_related(
'alert_history', 'alert_history__netbox'
).filter(
alert_history__end_time__gte=datetime.max,
alert_history__event_type=MAINTENANCE_STATE,
variable='maint_taskid',
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
'handler': 'service.handler',
'up': 'service.up',
},
tables=['service'],
where=['subid = serviceid::text'],
).order_by('-alert_history__start_time')
service_history = AlertHistory.objects.filter(
end_time__gte=datetime.max,
event_type=SERVICE_STATE,
).extra(
select={'downtime': "date_trunc('second', NOW() - start_time)"}
).values('netbox', 'start_time', 'downtime')
service_down = {}
for s in service_history:
service_down[s['netbox']] = s
history = []
for m in maintenance:
down = service_down.get(m.alert_history.netbox.id, None)
if m.up == 'y':
down_since = 'Up'
downtime = ''
else:
if down:
down_since = down['start_time']
downtime = down['downtime']
else:
down_since = 'N/A'
downtime = 'N/A'
row = {'netboxid': m.alert_history.netbox.id,
'tabrow': (
(
m.alert_history.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[m.alert_history.netbox.sysname])
),
(m.handler, reverse('ipdevinfo-service-list-handler',
args=[m.handler])),
(down_since, None),
(downtime, None),
(
'history',
reverse('devicehistory-view') +
('?netbox=%(id)s&eventtype=e_maintenanceState'
'&group_by=datetime' %
{'id': m.alert_history.netbox.id})
),
),
}
history.append(row)
self.history = history
class ModuleSection(_Section):
columns = [
'Sysname',
'IP',
'Module',
'Начало',
'Продолжительность',
'',
]
devicehistory_type = 'a_moduleDown'
@staticmethod
def form_class():
return ModuleForm
def fetch_history(self, module_history=None):
module_history = AlertHistory.objects.select_related(
'netbox', 'device'
).filter(
end_time__gte=datetime.max,
event_type=MODULE_STATE,
alert_type__name='moduleDown',
netbox__organization__in=self.organizations,
netbox__category__in=self.categories,
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
'module_id': 'module.moduleid',
'module_name': 'module.name',
},
tables=['module'],
where=[
'alerthist.deviceid = module.deviceid',
'module.up IN %s',
],
params=[tuple(self.states)]
).order_by('-start_time') if module_history is None else module_history
history = []
for module in module_history:
row = {'netboxid': module.netbox.id,
'tabrow': (
(
module.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[module.netbox.sysname])
),
(module.netbox.ip, None),
(
module.module_name,
reverse('ipdevinfo-module-details', args=[
module.netbox.sysname,
module.module_name
]) if module.module_name else None
),
(module.start_time, None),
(module.downtime, None),
(
'history',
reverse('devicehistory-view') +
'?module=%(id)s&eventtype=a_moduleDown&group_by=datetime' % {
'id': module.module_id,
}
),
),
}
history.append(row)
self.history = history
class ThresholdSection(_Section):
columns = [
'Sysname',
'Описание',
'Начало',
'Продолжительность',
'',
]
devicehistory_type = 'a_exceededThreshold'
@staticmethod
def form_class():
return ThresholdForm
@staticmethod
def form_data(status_prefs):
data = {
'id': status_prefs.id,
'name': status_prefs.name,
'type': status_prefs.type,
'organizations': list(status_prefs.organizations.values_list(
'id', flat=True)) or [''],
'categories': list(status_prefs.categories.values_list(
'id', flat=True)) or ['']
}
return data
def fetch_history(self):
thresholds = AlertHistory.objects.select_related(
'netbox'
).filter(
end_time__gte=datetime.max,
event_type=THRESHOLD_STATE,
alert_type__name='exceededThreshold',
netbox__organization__in=self.organizations,
netbox__category__in=self.categories,
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
},
).order_by('-start_time')
history = []
for alert in thresholds:
description = self._description_from_alert(alert)
row = {'netboxid': alert.netbox.id,
'tabrow': (
(alert.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[alert.netbox.sysname])),
(description, None),
(alert.start_time, None),
(alert.downtime, None),
('history',
reverse('devicehistory-view') +
'?netbox=%(id)s&eventtype=a_exceededThreshold'
'&group_by=datetime' % {
'id': alert.netbox.id,
}),
),
}
history.append(row)
self.history = history
@staticmethod
def _description_from_alert(alert):
try:
ruleid, metric = alert.subid.split(':', 1)
except ValueError:
description = None
else:
try:
rule = ThresholdRule.objects.get(id=ruleid)
except ThresholdRule.DoesNotExist:
limit = ''
else:
limit = rule.alert
prefix = metric_prefix_for_device(alert.netbox.sysname)
if metric.startswith(prefix):
metric = metric[len(prefix)+1:]
description = "{0} {1}".format(metric, limit)
return description
class LinkStateSection(_Section):
columns = [
'Sysname',
'IP',
'Interface',
'Начало',
'Продолжительность',
'История',
'',
]
devicehistory_type = 'a_linkDown'
@staticmethod
def form_class():
return LinkStateForm
def fetch_history(self):
netbox_history = AlertHistory.objects.select_related(
'netbox'
).filter(
event_type=LINK_STATE,
end_time__gte=datetime.max,
netbox__category__in=self.categories,
netbox__organization__in=self.organizations,
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
'interfaceid': 'interface.interfaceid',
'ifname': 'interface.ifname',
},
where=['subid = interfaceid::text'],
tables=['interface']
).order_by('-start_time', 'end_time')
history = []
for h in netbox_history:
row = {
'netboxid': h.netbox.id,
'alerthistid': h.id,
'tabrow': (
(
h.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[h.netbox.sysname])
),
(h.netbox.ip, None),
(
h.ifname,
reverse('ipdevinfo-interface-details',
args=[h.netbox.sysname, h.interfaceid])
),
(h.start_time, None),
(h.downtime, None),
('history', reverse('devicehistory-view') +
'?netbox=%(id)s&eventtype=a_linkDown&group_by=datetime' % {
'id': h.netbox.id, }
),
),
}
history.append(row)
self.history = history
class SNMPAgentSection(_Section):
columns = [
'Sysname',
'IP',
'Начало',
'Продолжительность',
'',
]
devicehistory_type = 'a_snmpAgentDown'
@staticmethod
def form_class():
return SNMPAgentForm
@staticmethod
def form_data(status_prefs):
data = {
'id': status_prefs.id,
'name': status_prefs.name,
'type': status_prefs.type,
'organizations': list(status_prefs.organizations.values_list(
'id', flat=True)) or [''],
}
data['categories'] = list(status_prefs.categories.values_list(
'id', flat=True)) or ['']
return data
def fetch_history(self):
netbox_history = AlertHistory.objects.select_related(
'netbox'
).filter(
event_type=SNMP_STATE,
end_time__gte=datetime.max,
netbox__category__in=self.categories,
netbox__organization__in=self.organizations,
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
}
).order_by('-start_time', 'end_time')
history = []
for h in netbox_history:
row = {'netboxid': h.netbox.id,
'tabrow': (
(
h.netbox.sysname,
reverse('ipdevinfo-details-by-name',
args=[h.netbox.sysname])
),
(h.netbox.ip, None),
(h.start_time, None),
(h.downtime, None),
(
'history',
reverse('devicehistory-view') +
('?netbox=%(id)s&eventtype=a_snmpAgentDown'
'&group_by=datetime' % {'id': h.netbox.id})
),
),
}
history.append(row)
self.history = history
class PSUSection(_Section):
columns = [
'Sysname',
'IP',
'PSU',
'Начало',
'Продолжительность',
'',
]
devicehistory_type = 'a_psuNotOK'
@staticmethod
def form_class():
return ModuleForm
def fetch_history(self, psu_history=None):
psu_history = AlertHistory.objects.select_related(
'netbox', 'device'
).filter(
end_time__gte=datetime.max,
event_type=PSU_STATE,
alert_type__name='psuNotOK',
netbox__organization__in=self.organizations,
netbox__category__in=self.categories,
).extra(
select={
'downtime': "date_trunc('second', NOW() - start_time)",
'powersupply_id': 'powersupply_or_fan.powersupplyid',
'powersupply_name': 'powersupply_or_fan.name',
},
tables=['powersupply_or_fan'],
where=[
'alerthist.subid = powersupply_or_fan.powersupplyid::TEXT',
],
).order_by('-start_time') if psu_history is None else psu_history
self.history = [self._psu_to_table_row(psu) for psu in psu_history]
@staticmethod
def _psu_to_table_row(psu):
return {'netboxid': psu.netbox.id,
'tabrow': (
(psu.netbox.sysname,
reverse('ipdevinfo-details-by-name', args=[psu.netbox.sysname])),
(psu.netbox.ip, None),
(psu.powersupply_name, None),
(psu.start_time, None),
(psu.downtime, None),
('history',
(reverse('devicehistory-view') + '?powersupply=%s'
'&eventtype=a_psuNotOK'
'&group_by=datetime' %
psu.powersupply_id)),
)}
| gpl-2.0 | -2,737,846,190,334,382,000 | 32.294185 | 85 | 0.508648 | false |
projectarkc/arkc-server-gae | fetchfrom/goagent.py | 1 | 9320 | #!/usr/bin/env python
# coding:utf-8
__version__ = '3.2.0'
__password__ = ''
__hostsdeny__ = () # __hostsdeny__ = ('.youtube.com', '.youku.com')
import os
import re
import time
import struct
import zlib
import base64
import logging
import urlparse
import httplib
import io
import string
import json
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
from google.appengine.api import urlfetch
from google.appengine.api.taskqueue.taskqueue import MAX_URL_LENGTH
from google.appengine.runtime import apiproxy_errors
URLFETCH_MAX = 2
URLFETCH_MAXSIZE = 4 * 1024 * 1024
URLFETCH_DEFLATE_MAXSIZE = 4 * 1024 * 1024
URLFETCH_TIMEOUT = 30
class NotFoundKey(Exception):
pass
class GAEfail(Exception):
pass
class Nonsense(Exception):
pass
class PermanentFail(Exception):
pass
class TimeoutFail(Exception):
pass
class HTTPRequest(BaseHTTPRequestHandler):
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
def message_html(title, banner, detail=''):
MESSAGE_TEMPLATE = '''
<html><head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<title>$title</title>
<style><!--
body {font-family: arial,sans-serif}
div.nav {margin-top: 1ex}
div.nav A {font-size: 10pt; font-family: arial,sans-serif}
span.nav {font-size: 10pt; font-family: arial,sans-serif; font-weight: bold}
div.nav A,span.big {font-size: 12pt; color: #0000cc}
div.nav A {font-size: 10pt; color: black}
A.l:link {color: #6f6f6f}
A.u:link {color: green}
//--></style>
</head>
<body text=#000000 bgcolor=#ffffff>
<table border=0 cellpadding=2 cellspacing=0 width=100%>
<tr><td bgcolor=#3366cc><font face=arial,sans-serif color=#ffffff><b>Message From FetchServer</b></td></tr>
<tr><td> </td></tr></table>
<blockquote>
<H1>$banner</H1>
$detail
<p>
</blockquote>
<table width=100% cellpadding=0 cellspacing=0><tr><td bgcolor=#3366cc><img alt="" width=1 height=4></td></tr></table>
</body></html>
'''
return string.Template(MESSAGE_TEMPLATE).substitute(title=title, banner=banner, detail=detail)
try:
from Crypto.Cipher.ARC4 import new as RC4Cipher
except ImportError:
logging.warn('Load Crypto.Cipher.ARC4 Failed, Use Pure Python Instead.')
class RC4Cipher(object):
def __init__(self, key):
x = 0
box = range(256)
for i, y in enumerate(box):
x = (x + y + ord(key[i % len(key)])) & 0xff
box[i], box[x] = box[x], y
self.__box = box
self.__x = 0
self.__y = 0
def encrypt(self, data):
out = []
out_append = out.append
x = self.__x
y = self.__y
box = self.__box
for char in data:
x = (x + 1) & 0xff
y = (y + box[x]) & 0xff
box[x], box[y] = box[y], box[x]
out_append(chr(ord(char) ^ box[(box[x] + box[y]) & 0xff]))
self.__x = x
self.__y = y
return ''.join(out)
def inflate(data):
return zlib.decompress(data, -zlib.MAX_WBITS)
def deflate(data):
return zlib.compress(data)[2:-4]
def format_response(status, headers, content):
if content:
headers.pop('content-length', None)
headers['Content-Length'] = str(len(content))
data = 'HTTP/1.1 %d %s\r\n%s\r\n\r\n%s' % (status, httplib.responses.get(
status, 'Unknown'), '\r\n'.join('%s: %s' % (k.title(), v) for k, v in headers.items()), content)
data = deflate(data)
assert len(data) <= 65536
return "%04x" % len(data) + data
def application(headers, body, method, url):
kwargs = {}
any(kwargs.__setitem__(x[len('x-urlfetch-'):].lower(), headers.pop(x))
for x in headers.keys() if x.lower().startswith('x-urlfetch-'))
if 'Content-Encoding' in headers and body:
if headers['Content-Encoding'] == 'deflate':
body = inflate(body)
headers['Content-Length'] = str(len(body))
del headers['Content-Encoding']
# logging.info(
# '%s "%s %s %s" - -', environ['REMOTE_ADDR'], method, url, 'HTTP/1.1')
if __password__ and __password__ != kwargs.get('password', ''):
raise GAEfail
netloc = urlparse.urlparse(url).netloc
if __hostsdeny__ and netloc.endswith(__hostsdeny__):
raise GAEfail
if len(url) > MAX_URL_LENGTH:
raise GAEfail
if netloc.startswith(('127.0.0.', '::1', 'localhost')):
raise GAEfail
fetchmethod = getattr(urlfetch, method, None)
if not fetchmethod:
raise GAEfail
timeout = int(kwargs.get('timeout', URLFETCH_TIMEOUT))
validate_certificate = bool(int(kwargs.get('validate', 0)))
maxsize = int(kwargs.get('maxsize', 0))
# https://www.freebsdchina.org/forum/viewtopic.php?t=54269
accept_encoding = headers.get(
'Accept-Encoding', '') or headers.get('Bccept-Encoding', '')
errors = []
for i in xrange(int(kwargs.get('fetchmax', URLFETCH_MAX))):
try:
response = urlfetch.fetch(url, body, fetchmethod, headers, allow_truncated=False,
follow_redirects=False, deadline=timeout, validate_certificate=validate_certificate)
break
except apiproxy_errors.OverQuotaError as e:
time.sleep(5)
except urlfetch.DeadlineExceededError as e:
errors.append('%r, timeout=%s' % (e, timeout))
logging.error(
'DeadlineExceededError(timeout=%s, url=%r)', timeout, url)
time.sleep(1)
timeout *= 2
except urlfetch.DownloadError as e:
errors.append('%r, timeout=%s' % (e, timeout))
logging.error('DownloadError(timeout=%s, url=%r)', timeout, url)
time.sleep(1)
timeout *= 2
except urlfetch.ResponseTooLargeError as e:
errors.append('%r, timeout=%s' % (e, timeout))
response = e.response
logging.error(
'ResponseTooLargeError(timeout=%s, url=%r) response(%r)', timeout, url, response)
m = re.search(
r'=\s*(\d+)-', headers.get('Range') or headers.get('range') or '')
if m is None:
headers['Range'] = 'bytes=0-%d' % (maxsize or URLFETCH_MAXSIZE)
else:
headers.pop('Range', '')
headers.pop('range', '')
start = int(m.group(1))
headers[
'Range'] = 'bytes=%s-%d' % (start, start + (maxsize or URLFETCH_MAXSIZE))
timeout *= 2
except urlfetch.SSLCertificateError as e:
errors.append('%r, should validate=0 ?' % e)
logging.error('%r, timeout=%s', e, timeout)
except Exception as e:
errors.append(str(e))
if i == 0 and method == 'GET':
timeout *= 2
else:
raise PermanentFail
#logging.debug('url=%r response.status_code=%r response.headers=%r response.content[:1024]=%r', url, response.status_code, dict(response.headers), response.content[:1024])
status_code = int(response.status_code)
data = response.content
response_headers = response.headers
content_type = response_headers.get('content-type', '')
if status_code == 200 and maxsize and len(data) > maxsize and response_headers.get('accept-ranges', '').lower() == 'bytes' and int(response_headers.get('content-length', 0)):
status_code = 206
response_headers[
'Content-Range'] = 'bytes 0-%d/%d' % (maxsize - 1, len(data))
data = data[:maxsize]
if status_code == 200 and 'content-encoding' not in response_headers and 512 < len(data) < URLFETCH_DEFLATE_MAXSIZE and content_type.startswith(('text/', 'application/json', 'application/javascript')):
if 'gzip' in accept_encoding:
response_headers['Content-Encoding'] = 'gzip'
compressobj = zlib.compressobj(
zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
dataio = io.BytesIO()
dataio.write('\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff')
dataio.write(compressobj.compress(data))
dataio.write(compressobj.flush())
dataio.write(
struct.pack('<LL', zlib.crc32(data) & 0xFFFFFFFFL, len(data) & 0xFFFFFFFFL))
data = dataio.getvalue()
elif 'deflate' in accept_encoding:
response_headers['Content-Encoding'] = 'deflate'
data = deflate(data)
response_headers['Content-Length'] = str(len(data))
#logging.info("Goagent:: Get %d data and sent.", len(data))
return format_response(status_code, response_headers, '') + data
def process(data):
req = HTTPRequest(data)
p = json.loads(''.join(req.rfile.readlines()))
#logging.info("Access URL: " + p["url"])
return application(p["headers"], p["body"], p["method"], p["url"])
| gpl-2.0 | 4,217,351,065,966,016,500 | 33.64684 | 205 | 0.591094 | false |
kratorius/ads | python/interviewquestions/longest_sequence.py | 1 | 1730 | """
Given a list of distinct numbers, find the longest monotonically increasing
subsequence within that list.
For example:
S = [2, 4, 3, 5, 1, 7, 6, 9, 8] -> [2, 3, 5, 6, 8]
or [2, 4, 5, 7, 8]
or [2, 4, 5, 7, 9]
If there's more than one solution, just return one of them.
"""
import unittest
def longest_sequence(lst):
if not lst:
return []
lengths = [0] * len(lst)
predecessors = [None] * len(lst)
max_idx = 0
for idx, item in enumerate(lst):
# what's the longest subsequence until this point?
# (whose last item < current item)
max_length = 1
lengths[idx] = 1
predecessors[idx] = None
for i, length in enumerate(lengths[:idx]):
if length >= max_length and lst[i] < item:
max_length = length + 1
lengths[idx] = max_length
predecessors[idx] = i
max_idx = idx
# proceed backward and rebuild the list
longest = []
while max_idx is not None:
item = lst[max_idx]
longest.append(item)
max_idx = predecessors[max_idx]
return list(reversed(longest))
class LongestSequenceTest(unittest.TestCase):
def test_sequence_find(self):
self.assertEqual([], longest_sequence([]))
self.assertEqual([10], longest_sequence([10]))
self.assertEqual([2, 4, 5, 7, 8], longest_sequence([2, 4, 3, 5, 1, 7, 6, 9, 8]))
self.assertEqual([1, 2, 3], longest_sequence([1, 2, 3, 1, 2, 3, 1, 2, 3]))
self.assertEqual([1, 2, 3], longest_sequence([1, 2, 3]))
self.assertEqual([10, 20, 30], longest_sequence([10, 5, 4, 20, 3, 2, 30]))
| mit | 2,014,271,797,512,256,300 | 29.892857 | 88 | 0.546821 | false |
Yannig/ansible | lib/ansible/plugins/action/net_base.py | 1 | 7262 | # (c) 2015, Ansible Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.plugins.action import ActionBase
from ansible.module_utils.network_common import load_provider
from imp import find_module, load_module
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
play_context = copy.deepcopy(self._play_context)
play_context.network_os = self._get_network_os(task_vars)
# we should be able to stream line this a bit by creating a common
# provider argument spec in module_utils/network_common.py or another
# option is that there isn't a need to push provider into the module
# since the connection is started in the action handler.
f, p, d = find_module('ansible')
f2, p2, d2 = find_module('module_utils', [p])
f3, p3, d3 = find_module(play_context.network_os, [p2])
module = load_module('ansible.module_utils.' + play_context.network_os, f3, p3, d3)
self.provider = load_provider(module.get_provider_argspec(), self._task.args)
if play_context.network_os == 'junos':
play_context.connection = 'netconf'
play_context.port = int(self.provider['port'] or self._play_context.port or 830)
else:
play_context.connection = 'network_cli'
play_context.port = int(self.provider['port'] or self._play_context.port or 22)
play_context.remote_addr = self.provider['host'] or self._play_context.remote_addr
play_context.remote_user = self.provider['username'] or self._play_context.connection_user
play_context.password = self.provider['password'] or self._play_context.password
play_context.private_key_file = self.provider['ssh_keyfile'] or self._play_context.private_key_file
play_context.timeout = int(self.provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
if 'authorize' in self.provider.keys():
play_context.become = self.provider['authorize'] or False
play_context.become_pass = self.provider['auth_pass']
socket_path = self._start_connection(play_context)
task_vars['ansible_socket'] = socket_path
if 'fail_on_missing_module' not in self._task.args:
self._task.args['fail_on_missing_module'] = False
result = super(ActionModule, self).run(tmp, task_vars)
module = self._get_implementation_module(play_context.network_os, self._task.action)
if not module:
if self._task.args['fail_on_missing_module']:
result['failed'] = True
else:
result['failed'] = False
result['msg'] = ('Could not find implementation module %s for %s' %
(self._task.action, play_context.network_os))
else:
new_module_args = self._task.args.copy()
# perhaps delete the provider argument here as well since the
# module code doesn't need the information, the connection is
# already started
if 'network_os' in new_module_args:
del new_module_args['network_os']
del new_module_args['fail_on_missing_module']
display.vvvv('Running implementation module %s' % module)
result.update(self._execute_module(module_name=module,
module_args=new_module_args, task_vars=task_vars,
wrap_async=self._task.async))
display.vvvv('Caching network OS %s in facts' % play_context.network_os)
result['ansible_facts'] = {'network_os': play_context.network_os}
return result
def _start_connection(self, play_context):
display.vvv('using connection plugin %s' % play_context.connection, play_context.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent',
play_context, sys.stdin)
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, play_context.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
# make sure we are in the right cli context which should be
# enable mode and not config module
rc, out, err = connection.exec_command('prompt()')
if str(out).strip().endswith(')#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
connection.exec_command('exit')
if self._play_context.become_method == 'enable':
self._play_context.become = False
self._play_context.become_method = None
return socket_path
def _get_network_os(self, task_vars):
if ('network_os' in self._task.args and self._task.args['network_os']):
display.vvvv('Getting network OS from task argument')
network_os = self._task.args['network_os']
elif (self._play_context.network_os):
display.vvvv('Getting network OS from inventory')
network_os = self._play_context.network_os
elif ('network_os' in task_vars['ansible_facts'] and
task_vars['ansible_facts']['network_os']):
display.vvvv('Getting network OS from fact')
network_os = task_vars['ansible_facts']['network_os']
else:
# this will be replaced by the call to get_capabilities() on the
# connection
display.vvvv('Getting network OS from net discovery')
network_os = None
return network_os
def _get_implementation_module(self, network_os, platform_agnostic_module):
implementation_module = network_os + '_' + platform_agnostic_module.partition('_')[2]
if implementation_module not in self._shared_loader_obj.module_loader:
implementation_module = None
return implementation_module
| gpl-3.0 | -8,083,114,629,988,565,000 | 43.280488 | 118 | 0.630956 | false |
F5Networks/f5-common-python | f5/bigiq/cm/device/licensing/pool/initial_activation.py | 1 | 1773 | # coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IQ® license pool regkeys.
REST URI
``http://localhost/mgmt/cm/device/licensing/pool/initial-activation``
REST Kind
``cm:device:licensing:pool:initial-activation:*``
"""
from f5.bigiq.resource import Collection
from f5.bigiq.resource import Resource
class Initial_Activations(Collection):
def __init__(self, pool):
super(Initial_Activations, self).__init__(pool)
self._meta_data['required_json_kind'] = \
'cm:device:licensing:pool:initial-activation:initialactivationworkercollectionstate' # NOQA
self._meta_data['allowed_lazy_attributes'] = [Initial_Activation]
self._meta_data['attribute_registry'] = {
'cm:device:licensing:pool:initial-activation:initialactivationworkeritemstate': Initial_Activation # NOQA
}
class Initial_Activation(Resource):
def __init__(self, initial_activations):
super(Initial_Activation, self).__init__(initial_activations)
self._meta_data['required_creation_parameters'] = {'name', 'regKey'}
self._meta_data['required_json_kind'] = \
'cm:device:licensing:pool:initial-activation:initialactivationworkeritemstate'
| apache-2.0 | 1,947,570,729,455,576,800 | 36.702128 | 118 | 0.713883 | false |
CSD-Public/stonix | src/MacBuild/ramdisk/lib/environment.py | 1 | 30726 | #!/usr/bin/env python3
###############################################################################
# #
# Copyright 2019. Triad National Security, LLC. All rights reserved. #
# This program was produced under U.S. Government contract 89233218CNA000001 #
# for Los Alamos National Laboratory (LANL), which is operated by Triad #
# National Security, LLC for the U.S. Department of Energy/National Nuclear #
# Security Administration. #
# #
# All rights in the program are reserved by Triad National Security, LLC, and #
# the U.S. Department of Energy/National Nuclear Security Administration. The #
# Government is granted for itself and others acting on its behalf a #
# nonexclusive, paid-up, irrevocable worldwide license in this material to #
# reproduce, prepare derivative works, distribute copies to the public, #
# perform publicly and display publicly, and to permit others to do so. #
# #
###############################################################################
# ============================================================================#
# Filename $RCSfile: stonix/environment.py,v $
# Description Security Configuration Script
# OS Linux, OS X, Solaris, BSD
# Author Dave Kennel
# Last updated by $Author: $
# Notes Based on CIS Benchmarks, NSA RHEL
# Guidelines, NIST and DISA STIG/Checklist
# Release $Revision: 1.0 $
# Modified Date $Date: 2010/8/24 14:00:00 $
# ============================================================================#
'''
Created on Aug 24, 2010
@author: dkennel
@change: 2014/05/29 - ekkehard j. koch - pep8 and comment updates
'''
#--- Native python libraries
import os
import re
import sys
import socket
import subprocess
import types
import platform
import pwd
import time
class Environment:
'''The Environment class collects commonly used information about the
execution platform and makes it available to the rules.
:version: 1.0
:author: D. Kennel
'''
def __init__(self):
self.operatingsystem = ''
self.osreportstring = ''
self.osfamily = ''
self.hostname = ''
self.ipaddress = ''
self.macaddress = ''
self.osversion = ''
self.numrules = 0
self.euid = os.geteuid()
currpwd = pwd.getpwuid(self.euid)
try:
self.homedir = currpwd[5]
except(IndexError):
self.homedir = '/dev/null'
self.installmode = False
self.verbosemode = False
self.debugmode = False
self.runtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.collectinfo()
def setinstallmode(self, installmode):
'''Set the install mode bool value. Should be true if the prog should run
in install mode.
:param bool: installmode
:param installmode:
:returns: void
@author: D. Kennel
'''
try:
if type(installmode) is bool:
self.installmode = installmode
except (NameError):
# installmode was undefined
pass
def getinstallmode(self):
'''Return the current value of the install mode bool. Should be true if
the program is to run in install mode.
:returns: bool : installmode
@author: D. Kennel
'''
return self.installmode
def setverbosemode(self, verbosemode):
'''Set the verbose mode bool value. Should be true if the prog should run
in verbose mode.
:param bool: verbosemode
:param verbosemode:
:returns: void
@author: D. Kennel
'''
try:
if type(verbosemode) is bool:
self.verbosemode = verbosemode
except (NameError):
# verbosemode was undefined
pass
def getverbosemode(self):
'''Return the current value of the verbose mode bool. Should be true if
the program is to run in verbose mode.
:returns: bool : verbosemode
@author: D. Kennel
'''
return self.verbosemode
def setdebugmode(self, debugmode):
'''Set the verbose mode bool value. Should be true if the prog should run
in verbose mode.
:param bool: debugmode
:param debugmode:
:returns: void
@author: D. Kennel
'''
try:
if type(debugmode) is bool:
self.debugmode = debugmode
except (NameError):
# debugmode was undefined
pass
def getdebugmode(self):
'''Return the current value of the debug mode bool. Should be true if the
program is to run in debug mode.
:returns: bool : debugmode
@author: D. Kennel
'''
return self.debugmode
def getostype(self):
'''Return the detailed operating system type.
:returns: string :
@author D. Kennel
'''
return self.operatingsystem
def getosreportstring(self):
'''Return the detailed operating system type with full version info.
:returns: string :
@author D. Kennel
'''
return self.osreportstring
def getosfamily(self):
'''Return the value of self.osfamily which should be linux, darwin,
solaris or freebsd.
:returns: string :
@author: D. Kennel
'''
return self.osfamily
def getosver(self):
'''Return the OS version as a string.
:returns: string :
@author D. Kennel
'''
return self.osversion
def gethostname(self):
'''Return the hostname of the system.
:returns: string
@author: dkennel
'''
return self.hostname
def getipaddress(self):
'''Return the IP address associated with the host name.
:returns: string :
@author D. Kennel
'''
return self.ipaddress
def getmacaddr(self):
'''Return the mac address in native format.
:returns: string :
@author D. Kennel
'''
return self.macaddress
def geteuid(self):
'''Return the effective user ID
:returns: int :
@author D. Kennel
'''
return self.euid
def geteuidhome(self):
'''Returns the home directory of the current effective user ID.
:returns: string
@author: D. Kennel
'''
return self.homedir
def collectinfo(self):
'''Private method to populate data.
:returns: void
@author D. Kennel
'''
# print 'Environment Running discoveros'
self.discoveros()
# print 'Environment running setosfamily'
self.setosfamily()
# print 'Environment running guessnetwork'
self.guessnetwork()
self.collectpaths()
def discoveros(self):
'''Discover the operating system type and version
:returns: void
@author: D. Kennel
'''
# Alternative (better) implementation for Linux
if os.path.exists('/usr/bin/lsb_release'):
proc = subprocess.Popen('/usr/bin/lsb_release -dr',
shell=True, stdout=subprocess.PIPE,
close_fds=True)
description = proc.stdout.readline()
release = proc.stdout.readline()
description = description.split()
# print description
del description[0]
description = " ".join(description)
self.operatingsystem = description
self.osreportstring = description
release = release.split()
release = release[1]
self.osversion = release
elif os.path.exists('/etc/redhat-release'):
relfile = open('/etc/redhat-release')
release = relfile.read()
relfile.close()
release = release.split()
opsys = ''
for element in release:
if re.search('release', element):
break
else:
opsys = opsys + " " + element
self.operatingsystem = opsys
self.osreportstring = opsys
index = 0
for element in release:
if re.search('release', element):
index = index + 1
osver = release[index]
else:
index = index + 1
self.osversion = osver
elif os.path.exists('/etc/gentoo-release'):
relfile = open('/etc/gentoo-release')
release = relfile.read()
relfile.close()
release = release.split()
opsys = ''
for element in release:
if re.search('release', element):
break
else:
opsys = opsys + " " + element
self.operatingsystem = opsys
self.osreportstring = opsys
index = 0
for element in release:
if re.search('release', element):
index = index + 1
osver = release[index]
else:
index = index + 1
self.osversion = osver
elif os.path.exists('/usr/bin/sw_vers'):
proc1 = subprocess.Popen('/usr/bin/sw_vers -productName',
shell=True, stdout=subprocess.PIPE,
close_fds=True)
description = proc1.stdout.readline()
description = description.strip()
proc2 = subprocess.Popen('/usr/bin/sw_vers -productVersion',
shell=True, stdout=subprocess.PIPE,
close_fds=True)
release = proc2.stdout.readline()
release = release.strip()
self.operatingsystem = description
self.osversion = release
proc3 = subprocess.Popen('/usr/bin/sw_vers -buildVersion',
shell=True, stdout=subprocess.PIPE,
close_fds=True)
build = proc3.stdout.readline()
build = build.strip()
opsys = str(description) + ' ' + str(release) + ' ' + str(build)
self.osreportstring = opsys
def setosfamily(self):
'''Private method to detect and set the self.osfamily property. This is a
fuzzy classification of the OS.
'''
uname = sys.platform
if uname == 'linux2':
self.osfamily = 'linux'
elif uname == 'darwin':
self.osfamily = 'darwin'
elif uname == 'sunos5':
self.osfamily = 'solaris'
elif uname == 'freebsd9':
self.osfamily = 'freebsd'
def guessnetwork(self):
'''This private method checks the configured interfaces and tries to
make an educated guess as to the correct network data. self.ipaddress
and self.macaddress will be updated by this method.
'''
# regex to match mac addresses
macre = '(([0-9A-Fa-f]{2}[:-]){5}[0-9A-Fa-f]{2})'
ipaddress = ''
macaddress = '00:00:00:00:00:00'
hostname = socket.getfqdn()
try:
ipdata = socket.gethostbyname_ex(hostname)
iplist = ipdata[2]
try:
iplist.remove('127.0.0.1')
except (ValueError):
# tried to remove loopback when it's not present, continue
pass
if len(iplist) >= 1:
ipaddress = iplist[0]
else:
ipaddress = '127.0.0.1'
except(socket.gaierror):
# If we're here it's because socket.getfqdn did not in fact return
# a valid hostname and gethostbyname errored.
ipaddress = self.getdefaultip()
# In ifconfig output macaddresses are always one line before the ip
# address.
if sys.platform == 'linux2':
cmd = '/sbin/ifconfig'
elif os.path.exists('/usr/sbin/ifconfig'):
cmd = '/usr/sbin/ifconfig -a'
else:
cmd = '/sbin/ifconfig -a'
proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, close_fds=True)
netdata = proc.stdout.readlines()
for line in netdata:
# print "processing: " + line
match = re.search(macre, line.decode('utf-8'))
if match is not None:
# print 'Matched MAC address'
macaddress = match.group()
if re.search(ipaddress, line.decode('utf-8')):
# print 'Found ipaddress'
break
self.hostname = hostname
self.ipaddress = ipaddress
self.macaddress = macaddress
def getdefaultip(self):
'''This method will return the ip address of the interface
associated with the current default route.
:returns: string - ipaddress
@author: dkennel
'''
ipaddr = '127.0.0.1'
gateway = ''
if sys.platform == 'linux2':
try:
routecmd = subprocess.Popen('/sbin/route -n', shell=True,
stdout=subprocess.PIPE,
close_fds=True)
routedata = routecmd.stdout.readlines()
except(OSError):
return ipaddr
for line in routedata:
if re.search('^default', line.decode('utf-8')):
line = line.split()
try:
gateway = line[1]
except(IndexError):
return ipaddr
else:
try:
if os.path.exists('/usr/sbin/route'):
cmd = '/usr/sbin/route -n get default'
else:
cmd = '/sbin/route -n get default'
routecmd = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
routedata = routecmd.stdout.readlines()
except(OSError):
return ipaddr
for line in routedata:
if re.search('gateway:', line.decode('utf-8')):
line = line.decode('utf-8').split()
try:
gateway = line[1]
except(IndexError):
return ipaddr
if gateway:
iplist = self.getallips()
for level in [1, 2, 3, 4]:
matched = self.matchip(gateway, iplist, level)
if len(matched) == 1:
ipaddr = matched[0]
break
return ipaddr
def matchip(self, target, iplist, level=1):
'''This method will when given an IP try to find matching ip
from a list of IP addresses. Matching will work from left to right
according to the level param. If no match is found
the loopback address will be returned.
:param string: ipaddress
:param list: list of ipaddresses
:param int: level
:param target:
:param iplist:
:param level: (Default value = 1)
:returns: list - ipaddresses
@author: dkennel
'''
quad = target.split('.')
if level == 1:
network = quad[0]
elif level == 2:
network = quad[0] + '.' + quad[1]
elif level == 3:
network = quad[0] + '.' + quad[1] + '.' + quad[2]
elif level == 4:
return ['127.0.0.1']
matchlist = []
for addr in iplist:
if re.search(network, addr.decode('utf-8')):
matchlist.append(addr)
if len(matchlist) == 0:
matchlist.append('127.0.0.1')
return matchlist
def getallips(self):
'''This method returns all ip addresses on all interfaces on the system.
:returns: list of strings
@author: dkennel
'''
iplist = []
if sys.platform == 'linux2':
try:
ifcmd = subprocess.Popen('/sbin/ifconfig', shell=True,
stdout=subprocess.PIPE,
close_fds=True)
ifdata = ifcmd.stdout.readlines()
except(OSError):
return iplist
for line in ifdata:
if re.search('inet addr:', line.decode('utf-8')):
try:
line = line.split()
addr = line[1]
addr = addr.split(':')
addr = addr[1]
iplist.append(addr)
except(IndexError):
continue
else:
try:
if os.path.exists('/usr/sbin/ifconfig'):
cmd = '/usr/sbin/ifconfig -a'
else:
cmd = '/sbin/ifconfig -a'
ifcmd = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
ifdata = ifcmd.stdout.readlines()
except(OSError):
return iplist
for line in ifdata:
if re.search('inet ', line.decode('utf-8')):
try:
line = line.split()
addr = line[1]
iplist.append(addr)
except(IndexError):
continue
return iplist
def get_property_number(self):
'''Find and return the
Property number of the local machine
@author: scmcleni
@author: D. Kennel
:returns: int
'''
propnum = 0
try:
if os.path.exists('/etc/property-number'):
propertynumberfile = open('/etc/property-number', 'r')
propnum = propertynumberfile.readline()
propnum = propnum.strip()
propertynumberfile.close()
if platform.system() == 'Darwin':
pnfetch = '/usr/sbin/nvram asset_id 2>/dev/null'
cmd = subprocess.Popen(pnfetch, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
cmdout = cmd.stdout.readline()
cmdout = cmdout.split()
try:
propnum = cmdout[1]
except(IndexError, KeyError):
propnum = 0
except:
pass
# Failed to obtain property number
return propnum
def get_system_serial_number(self):
'''Find and return the
Serial number of the local machine
@author: dkennel
:returns: string
'''
systemserial = '0'
if os.path.exists('/usr/sbin/system_profiler'):
profilerfetch = '/usr/sbin/system_profiler SPHardwareDataType'
cmd3 = subprocess.Popen(profilerfetch, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
cmd3output = cmd3.stdout.readlines()
for line in cmd3output:
if re.search('Serial Number (system):', line.decode('utf-8')):
line = line.split(':')
try:
systemserial = line[1]
except(IndexError, KeyError):
pass
systemserial = systemserial.strip()
return systemserial
def get_sys_uuid(self):
'''Find and return a unique identifier for the system. On most systems
this will be the UUID of the system. On Solaris SPARC this will be
a number that is _hopefully_ unique as that platform doesn't have
UUID numbers.
@author: D. Kennel
:returns: string
'''
uuid = '0'
if os.path.exists('/usr/sbin/smbios'):
smbiosfetch = '/usr/sbin/smbios -t SMB_TYPE_SYSTEM 2>/dev/null'
cmd2 = subprocess.Popen(smbiosfetch, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
cmdoutput = cmd2.stdout.readlines()
for line in cmdoutput:
if re.search('UUID:', line.decode('utf-8')):
line = line.split()
try:
uuid = line[1]
except(IndexError, KeyError):
pass
elif os.path.exists('/usr/sbin/system_profiler'):
profilerfetch = '/usr/sbin/system_profiler SPHardwareDataType'
cmd3 = subprocess.Popen(profilerfetch, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
cmd3output = cmd3.stdout.readlines()
for line in cmd3output:
if re.search('UUID:', line.decode('utf-8')):
line = line.split()
try:
uuid = line[2]
except(IndexError, KeyError):
pass
elif platform.system() == 'SunOS':
fetchhostid = '/usr/bin/hostid'
cmd1 = subprocess.Popen(fetchhostid, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
uuid = cmd1.stdout.readline()
uuid = uuid.strip()
return uuid
def ismobile(self):
'''Returns a bool indicating whether or not the system in question is a
laptop. The is mobile method is used by some rules that have alternate
settings for laptops.
@author: dkennel
@regturn: bool - true if system is a laptop
'''
ismobile = False
dmitypes = ['LapTop', 'Portable', 'Notebook', 'Hand Held',
'Sub Notebook']
if os.path.exists('/usr/sbin/system_profiler'):
profilerfetch = '/usr/sbin/system_profiler SPHardwareDataType'
cmd3 = subprocess.Popen(profilerfetch, shell=True,
stdout=subprocess.PIPE,
close_fds=True)
cmd3output = cmd3.stdout.readlines()
for line in cmd3output:
if re.search('Book', line.decode('utf-8')):
ismobile = True
break
return ismobile
def issnitchactive(self):
'''Returns a bool indicating whether or not the little snitch program is
active. Little snitch is a firewall utility used on Mac systems and can
interfere with STONIX operations.
@author: ekkehard
:returns: bool - true if little snitch is running
'''
issnitchactive = False
if self.osfamily == 'darwin':
cmd = 'ps axc -o comm | grep lsd'
littlesnitch = 'lsd'
proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, close_fds=True)
netdata = proc.stdout.readlines()
for line in netdata:
print("processing: " + line.decode('utf-8'))
match = re.search(littlesnitch, line.decode('utf-8'))
if match is not None:
print('LittleSnitch Is Running')
issnitchactive = True
break
return issnitchactive
def collectpaths(self):
'''Determine how stonix is run and return appropriate paths for:
icons
rules
conf
logs
@author: Roy Nielsen
'''
script_path_zero = os.path.realpath(sys.argv[0])
try:
script_path_one = os.path.realpath(sys.argv[1])
except:
script_path_one = ""
self.test_mode = False
#####
# Check which argv variable has the script name -- required to allow
# for using the eclipse debugger.
if re.search("stonix.py$", script_path_zero) or re.search("stonix$", script_path_zero):
#####
# Run normally
self.script_path = os.path.dirname(os.path.realpath(sys.argv[0]))
else:
#####
# Run with Eclipse debugger -- Eclipse debugger will never try to run
# the "stonix" binary blob created by pyinstaller, so don't include
# here.
#print "DEBUG: Environment.collectpaths: unexpected argv[0]: " + str(sys.argv[0])
if re.search("stonix.py$", script_path_one) or re.search("stonixtest.py$", script_path_one):
script = script_path_one.split("/")[-1]
script_path = "/".join(script_path_one.split("/")[:-1])
if re.match("^stonixtest.py$", script) and \
os.path.exists(script_path_one) and \
os.path.exists(os.path.join(script_path, "stonixtest.py")) and \
os.path.exists(os.path.join(script_path, "stonix.py")):
self.test_mode = True
self.script_path = os.path.dirname(os.path.realpath(sys.argv[1]))
else:
print("ERROR: Cannot run using this method")
else:
#print "DEBUG: Cannot find appropriate path, building paths for current directory"
self.script_path = os.getcwd()
#####
# Set the rules & stonix_resources paths
if re.search("stonix.app/Contents/MacOS$", self.script_path):
#####
# Find the stonix.conf file in the stonix.app/Contents/Resources
# directory
macospath = self.script_path
self.resources_path = os.path.join(self.script_path,
"stonix_resources")
self.rules_path = os.path.join(self.resources_path,
"rules")
else:
# ##
# create the self.resources_path
self.resources_path = os.path.join(self.script_path,
"stonix_resources")
# ##
# create the self.rules_path
self.rules_path = os.path.join(self.script_path,
"stonix_resources",
"rules")
#####
# Set the log file path
if self.geteuid() == 0:
self.log_path = '/var/log'
else:
userpath = self.geteuidhome()
self.log_path = os.path.join(userpath, '.stonix')
if userpath == '/dev/null':
self.log_path = '/tmp'
#####
# Set the icon path
self.icon_path = os.path.join(self.resources_path, 'gfx')
#####
# Set the configuration file path
if re.search("stonix.app/Contents/MacOS/stonix$", os.path.realpath(sys.argv[0])):
#####
# Find the stonix.conf file in the stonix.app/Contents/Resources
# directory
macospath = self.script_path
parents = macospath.split("/")
parents.pop()
parents.append("Resources")
resources_dir = "/".join(parents)
self.conf_path = os.path.join(resources_dir, "stonix.conf")
elif os.path.exists(os.path.join(self.script_path, "etc", "stonix.conf")):
self.conf_path = os.path.join(self.script_path, "etc", "stonix.conf")
elif re.search('pydev', script_path_zero) and re.search('stonix_resources', script_path_one):
print("INFO: Called by unit test")
srcpath = script_path_one.split('/')[:-2]
srcpath = '/'.join(srcpath)
self.conf_path = os.path.join(srcpath, 'etc', 'stonix.conf')
print((self.conf_path))
else:
self.conf_path = "/etc/stonix.conf"
def get_test_mode(self):
'''Getter test mode flag
@author: Roy Nielsen
'''
return self.test_mode
def get_script_path(self):
'''Getter for the script path
@author: Roy Nielsen
'''
return self.script_path
def get_icon_path(self):
'''Getter for the icon path
@author: Roy Nielsen
'''
return self.icon_path
def get_rules_path(self):
'''Getter for rules path
@author: Roy Nielsen
'''
return self.rules_path
def get_config_path(self):
'''Getter for conf file path
@author: Roy Nielsen
'''
return self.conf_path
def get_log_path(self):
'''Getter for log path
@author: Roy Nielsen
'''
return self.log_path
def get_resources_path(self):
'''Getter for stonix resources directory
@author: Roy Nielsen
'''
return self.resources_path
def getruntime(self):
'''
:returns: @author: dkennel
'''
return self.runtime
def setnumrules(self, num):
'''Set the number of rules that apply to the system. This information is
used by the log dispatcher in the run metadata.
:param num: int - number of rules that apply to this host
@author: dkennel
'''
if type(num) is not int:
raise TypeError('Number of rules must be an integer')
elif num < 0:
raise ValueError('Number of rules must be a positive integer')
else:
self.numrules = num
def getnumrules(self):
'''
:returns: @author: dkennel
'''
return self.numrules
| gpl-2.0 | 8,765,713,703,017,385,000 | 32.145631 | 104 | 0.499837 | false |
ajbouh/tfi | src/tfi/main.py | 1 | 13541 | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import os.path
import sys
import tempfile
import tfi
import tfi.driver
import tfi.driverconfig
from tfi.resolve.model import _detect_model_file_kind, _model_module_for_kind, _load_model_from_path_fn
from tfi.cli import resolve as _resolve_model
from tfi.tensor.codec import encode as _tfi_tensor_codec_encode
from tfi.format.iterm2 import imgcat as _tfi_format_iterm2_imgcat
def _detect_model_object_kind(model):
klass = model if isinstance(model, type) else type(model)
for c in klass.mro():
if c.__name__ != "Model":
continue
if c.__module__ == "tfi.driver.pytorch":
return "pytorch"
if c.__module__ == "tfi.driver.prophet":
return "prophet"
if c.__module__ == "tfi.driver.tf":
return "tensorflow"
if c.__module__ == "tfi.driver.msp":
return "msp"
if c.__module__ == "tfi.driver.spacy":
return "spacy"
raise Exception("Unknown model type %s" % klass)
def _model_export(path, model):
kind = _detect_model_object_kind(model)
mod = _model_module_for_kind(kind)
return mod.export(path, model)
def _model_publish(f):
from tfi.publish import publish as _publish
kind = _detect_model_file_kind(f)
_publish(kind, f)
class ModelSpecifier(argparse.Action):
def __init__(self,
option_strings,
dest,
**kwargs):
super(ModelSpecifier, self).__init__(
option_strings=option_strings,
dest=dest,
**kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if values is None:
setattr(namespace, self.dest, None)
return
if values:
leading_value, *rest = values
else:
leading_value = None
rest = []
resolution = _resolve_model(leading_value, rest)
setattr(namespace, self.dest, resolution['model'])
setattr(namespace, "%s_module_fn" % self.dest, resolution.get('module_fn', lambda x: None))
setattr(namespace, "%s_can_refresh" % self.dest, resolution.get('can_refresh', None))
setattr(namespace, "%s_refresh_fn" % self.dest, resolution.get('refresh_fn', None))
setattr(namespace, "%s_method_fn" % self.dest, resolution['model_method_fn'])
setattr(namespace, "%s_source" % self.dest, resolution.get('source', None))
setattr(namespace, "%s_source_sha1hex" % self.dest, resolution.get('source_sha1hex', None))
setattr(namespace, "%s_via_python" % self.dest, resolution.get('via_python', None))
setattr(namespace, "%s_raw" % self.dest, resolution.get('leading_value', None))
parser = argparse.ArgumentParser(prog='tfi', add_help=False)
parser.add_argument('--serve', default=False, action='store_true', help='Start REST API on given port')
parser.add_argument('--tracing-host', type=str, default=os.environ.get('JAEGER_HOST', None), help='Jaeger host to submit traces to while serving')
parser.add_argument('--tracing-tags', type=str, default=os.environ.get('JAEGER_TAGS', ''), help='Jaeger tags to include in traces to while serving')
parser.add_argument('--internal-config', type=str, default=os.environ.get("TFI_INTERNAL_CONFIG", ""), help='For internal use.')
parser.add_argument('--publish', default=False, action='store_true', help='Publish model')
parser.add_argument('--bind', type=str, help='Set address:port to serve model on. Default behavior is 127.0.0.1 if available, otherwise 127.0.0.1:0')
parser.add_argument('--bind-default', type=str, default='127.0.0.1:5000')
parser.add_argument('--export', type=str, help='path to export to')
parser.add_argument('--export-doc', type=str, help='path to export doc to')
parser.add_argument('--watch', default=False, action='store_true', help='Watch given model and reload when it changes')
parser.add_argument('--interactive', '-i', default=None, action='store_true', help='Start interactive session')
parser.add_argument('--tf-tensorboard-bind-default', type=str, default='127.0.0.1:6007')
parser.add_argument('--tf-tensorboard-bind', type=str, help='Set address:port to serve TensorBoard on. Default behavior is 127.0.0.1:6007 if available, otherwise 127.0.0.1:0')
parser.add_argument('--tf-logdir',
default=os.path.expanduser('~/.tfi/tf/log/%F_%H-%M-%S/%04i'),
help='Set TensorFlow log dir to write to. Renders any % placeholders with strftime, runs TensorBoard from parent dir. %04i is replaced by a 0-padded run_id count')
parser.add_argument('specifier', type=str, default=None, nargs=argparse.REMAINDER, action=ModelSpecifier, help='fully qualified class name to instantiate')
# TODO(adamb)
# And let's add basic text --doc output.
# Then we'll add support for training a model locally ... (which?)
# Then we'll add support for training a model ELSEWHERE.
def run(argns, remaining_args):
model = None
module = None
exporting = argns.export is not None or argns.export_doc is not None
serving = argns.serve is not False
publishing = argns.publish is not False
batch = False
if argns.interactive is None:
argns.interactive = not batch and not exporting and not serving and not publishing
def tf_make_logdir_fn(datetime):
import re
base_logdir = datetime.strftime(argns.tf_logdir)
def logdir_fn(run_id=None):
if run_id is None:
return re.sub('(%\d*)i', '', base_logdir)
base_logdir_formatstr = re.sub('(%\d*)i', '\\1d', base_logdir)
return base_logdir_formatstr % run_id
return logdir_fn
import tfi
import tfi.driverconfig
tfi.driverconfig.tf.make_logdir_fn = tf_make_logdir_fn
if argns.specifier:
model = argns.specifier
module = argns.specifier_module_fn()
if argns.specifier_method_fn:
result = argns.specifier_method_fn()
accept_mimetypes = {"image/png": _tfi_format_iterm2_imgcat, "text/plain": lambda x: x}
result_val = _tfi_tensor_codec_encode(accept_mimetypes, result)
if result_val is None:
result_val = result
result_str = '%r\n' % (result_val, )
print(result_str)
batch = True
internal_config = argns.internal_config or (model and _detect_model_object_kind(model))
if internal_config == 'tensorflow':
import tensorflow
tensorboard = internal_config == 'tensorflow' and argns.interactive
if tensorboard:
import tfi.driver.tf.tensorboard_server
import threading
tb_logdir = argns.tf_logdir
while '%' in tb_logdir:
tb_logdir = os.path.dirname(tb_logdir)
if argns.tf_tensorboard_bind:
tb_host, tb_port = argns.tf_tensorboard_bind.split(':', 1)
tb_port = int(tb_port)
else:
tb_host, tb_port = argns.tf_tensorboard_bind_default.split(':', 1)
tb_port = int(tb_port)
import socket
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
try:
s.bind((tb_host, tb_port))
except socket.error as e:
if e.errno == 98:
tb_port = 0
# Use some fancy footwork to delay continuing until TensorBoard has started.
tb_cv = threading.Condition()
def tb_run():
def on_ready_fn(url):
if url:
print('TensorBoard at %s now serving %s' % (url, tb_logdir))
sys.stdout.flush()
with tb_cv:
tb_cv.notify_all()
tfi.driver.tf.tensorboard_server.main(tb_logdir, tb_host=tb_host, tb_port=tb_port, tb_on_ready_fn=on_ready_fn)
with tb_cv:
tb_thread = threading.Thread(target=tb_run, daemon=True)
tb_thread.start()
tb_cv.wait()
if internal_config == 'spacy':
import tfi.driver.spacy
if serving:
segment_js = """
<script>
!function(){var analytics=window.analytics=window.analytics||[];if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Segment snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on"];analytics.factory=function(t){return function(){var e=Array.prototype.slice.call(arguments);e.unshift(t);analytics.push(e);return analytics}};for(var t=0;t<analytics.methods.length;t++){var e=analytics.methods[t];analytics[e]=analytics.factory(e)}analytics.load=function(t){var e=document.createElement("script");e.type="text/javascript";e.async=!0;e.src=("https:"===document.location.protocol?"https://":"http://")+"cdn.segment.com/analytics.js/v1/"+t+"/analytics.min.js";var n=document.getElementsByTagName("script")[0];n.parentNode.insertBefore(e,n)};analytics.SNIPPET_VERSION="4.0.0";
analytics.load("GaappI2dkNZV4PLVdiJ8pHQ7Hofbf6Vz");
analytics.page();
}}();
</script>
"""
segment_js = ""
def on_bind(url):
print("Serving at %s" % url)
tracing_tags = {}
if argns.tracing_tags:
for tag_entry in argns.tracing_tags.split(' '):
tag_k, tag_v = tag_entry.split('=', 1)
tracing_tags[tag_k] = tag_v
if argns.bind:
host, port = argns.bind.split(':')
port = int(port)
else:
host, initial_port = argns.bind_default.split(':')
initial_port = int(initial_port)
port = 0
for possible_port in range(initial_port, initial_port + 32):
import socket
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
try:
s.bind((host, possible_port))
port = possible_port
break
except socket.error as e:
if e.errno == 98 or e.errno == 48:
pass
if model is None:
from tfi.serve import run_deferred as serve_deferred
serve_deferred(
host=host, port=port, on_bind=on_bind,
load_model_from_path_fn=_load_model_from_path_fn,
extra_scripts=segment_js,
jaeger_host=argns.tracing_host,
jaeger_tags=tracing_tags)
else:
from tfi.serve import run as serve
def model_file_fn():
if argns.specifier_source and not argns.specifier_via_python:
return argns.specifier_source
with tempfile.NamedTemporaryFile(mode='rb', delete=False) as f:
print("Exporting ...", end='', flush=True)
_model_export(f.name, model)
print(" done", flush=True)
return f.name
serve(model,
host=host,
port=port,
on_bind=on_bind,
extra_scripts=segment_js,
jaeger_host=argns.tracing_host,
jaeger_tags=tracing_tags,
model_file_fn=model_file_fn)
if argns.watch:
if not argns.specifier_can_refresh:
print("WARN: Can't watch unrefreshable model.")
else:
import tfi.watch
ar = tfi.watch.AutoRefresher()
def do_refresh():
def refresh_progress(model, ix, total):
print("Refreshing %d/%d: %s" % (ix, total, model))
argns.specifier_refresh_fn(refresh_progress)
ar.watch(argns.specifier_source, argns.specifier_source_sha1hex, do_refresh)
ar.start()
if argns.interactive:
from tfi.repl import run as run_repl
run_repl(
globals=globals(),
locals=None,
history_filename=os.path.expanduser('~/.tfihistory'),
model=model,
module=module)
if argns.export_doc:
tfi.doc.save(argns.export_doc, model)
if argns.export:
if argns.specifier_source and not argns.specifier_via_python:
import shutil
shutil.copyfile(argns.specifier_source, argns.export)
else:
_model_export(argns.export, model)
if argns.publish:
if argns.specifier_source and not argns.specifier_via_python:
with open(argns.specifier_source, 'rb') as f:
# TODO(adamb) Should actually autodetect which environment to use.
url = _model_publish(f)
else:
with tempfile.NamedTemporaryFile(mode='rb') as f:
# TODO(adamb) Should actually autodetect which environment to use.
print("Exporting ...", end='', flush=True)
_model_export(f.name, model)
print(" done", flush=True)
url = _model_publish(f)
print(url)
def cli(args):
argns, remaining_args = parser.parse_known_args(args)
argns.load_model_from_path_fn = _load_model_from_path_fn
run(argns, remaining_args)
def main():
cli(sys.argv[1:])
if __name__ == '__main__':
main() | mit | 230,525,589,146,237,150 | 41.990476 | 972 | 0.600251 | false |
kzvyahin/cfme_tests | sprout/appliances/api.py | 1 | 17913 | # -*- coding: utf-8 -*-
import inspect
import json
import re
from celery import chain
from celery.result import AsyncResult
from datetime import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.http import HttpResponse
from django.shortcuts import render
from appliances.models import (
Appliance, AppliancePool, Provider, Group, Template, User, GroupShepherd)
from appliances.tasks import (
appliance_power_on, appliance_power_off, appliance_suspend, appliance_rename,
connect_direct_lun, disconnect_direct_lun, mark_appliance_ready, wait_appliance_ready)
from sprout.log import create_logger
def json_response(data):
return HttpResponse(json.dumps(data), content_type="application/json")
def json_exception(e):
return json_response({
"status": "exception",
"result": {
"class": type(e).__name__,
"message": str(e)
}
})
def json_autherror(message):
return json_response({
"status": "autherror",
"result": {
"message": str(message)
}
})
def json_success(result):
return json_response({
"status": "success",
"result": result
})
class JSONMethod(object):
def __init__(self, method, auth=False):
self._method = method
if self._method.__doc__:
try:
head, body = self._method.__doc__.split("\n\n", 1)
head = head.strip()
self._doc = head
except ValueError:
self._doc = self._method.__doc__.strip()
else:
self._doc = ""
self.auth = auth
@property
def __name__(self):
return self._method.__name__
def __call__(self, *args, **kwargs):
return self._method(*args, **kwargs)
@property
def description(self):
f_args = inspect.getargspec(self._method).args
f_defaults = inspect.getargspec(self._method).defaults
defaults = {}
if f_defaults is not None:
for key, value in zip(f_args[-len(f_defaults):], f_defaults):
defaults[key] = value
return {
"name": self._method.__name__,
"args": f_args if not self.auth else f_args[1:],
"defaults": defaults,
"docstring": self._doc,
"needs_authentication": self.auth,
}
class JSONApi(object):
def __init__(self):
self._methods = {}
def method(self, f):
self._methods[f.__name__] = JSONMethod(f)
def authenticated_method(self, f):
self._methods[f.__name__] = JSONMethod(f, auth=True)
def doc(self, request):
return render(request, 'appliances/apidoc.html', {})
def __call__(self, request):
if request.method != 'POST':
return json_success({
"available_methods": sorted(
map(lambda m: m.description, self._methods.itervalues()),
key=lambda m: m["name"]),
})
try:
data = json.loads(request.body)
method_name = data["method"]
args = data["args"]
kwargs = data["kwargs"]
try:
method = self._methods[method_name]
except KeyError:
raise NameError("Method {} not found!".format(method_name))
create_logger(method).info(
"Calling with parameters {}{}".format(repr(tuple(args)), repr(kwargs)))
if method.auth:
if "auth" in data:
username, password = data["auth"]
try:
user = User.objects.get(username=username)
except ObjectDoesNotExist:
return json_autherror("User {} does not exist!".format(username))
if not user.check_password(password):
return json_autherror("Wrong password for user {}!".format(username))
create_logger(method).info(
"Called by user {}/{}".format(user.id, user.username))
return json_success(method(user, *args, **kwargs))
else:
return json_autherror("Method {} needs authentication!".format(method_name))
else:
return json_success(method(*args, **kwargs))
except Exception as e:
create_logger(method).error(
"Exception raised during call: {}: {}".format(type(e).__name__, str(e)))
return json_exception(e)
else:
create_logger(method).info("Call finished")
jsonapi = JSONApi()
def jsonapi_doc(*args, **kwargs):
return jsonapi.doc(*args, **kwargs)
@jsonapi.method
def has_template(template_name, preconfigured):
"""Check if Sprout tracks a template with a particular name.
Can check both fresh templates and preconfigured ones. It will only take the ones that are:
* Ready
* Existing
* Usable
Args:
template_name: Name of the *original* template.
preconfigured: Whether to check the fresh templates or preconfigured ones.
"""
query = Template.objects.filter(
ready=True, exists=True, usable=True, preconfigured=bool(preconfigured),
original_name=template_name)
return query.count() > 0
@jsonapi.method
def list_appliances(used=False):
"""Returns list of appliances.
Args:
used: Whether to report used or unused appliances
"""
query = Appliance.objects
if used:
query = query.exclude(appliance_pool__owner=None)
else:
query = query.filter(appliance_pool__owner=None)
result = []
for appliance in query:
result.append(appliance.serialized)
return result
@jsonapi.authenticated_method
def num_shepherd_appliances(user, group, version=None, date=None, provider=None):
"""Provides number of currently available shepherd appliances."""
group = Group.objects.get(id=group)
if provider is not None:
provider = Provider.objects.get(id=provider)
if version is None:
if provider is None:
try:
version = Template.get_versions(template_group=group)[0]
except IndexError:
# No version
pass
else:
try:
version = Template.get_versions(template_group=group, provider=provider)[0]
except IndexError:
# No version
pass
if date is None:
filter_kwargs = {"template_group": group}
if provider is not None:
filter_kwargs["provider"] = provider
if version is not None:
filter_kwargs["version"] = version
try:
date = Template.get_dates(**filter_kwargs)[0]
except IndexError:
# No date
pass
filter_kwargs = {"template__template_group": group, "ready": True, "appliance_pool": None}
if version is not None:
filter_kwargs["template__version"] = version
if date is not None:
filter_kwargs["template__date"] = date
if provider is not None:
filter_kwargs["template__provider"] = provider
return len(Appliance.objects.filter(**filter_kwargs))
@jsonapi.authenticated_method
def request_appliances(
user, group, count=1, lease_time=60, version=None, date=None, provider=None,
preconfigured=True, yum_update=False, container=False):
"""Request a number of appliances."""
if date:
date = datetime.strptime(date, "%y%m%d")
return AppliancePool.create(
user, group, version, date, provider, count, lease_time, preconfigured, yum_update,
container).id
@jsonapi.authenticated_method
def request_check(user, request_id):
"""Return status of the appliance pool"""
request = AppliancePool.objects.get(id=request_id)
if user != request.owner and not user.is_staff:
raise Exception("This pool belongs to a different user!")
return {
"fulfilled": request.fulfilled,
"finished": request.finished,
"preconfigured": request.preconfigured,
"yum_update": request.yum_update,
"progress": int(round(request.percent_finished * 100)),
"appliances": [
appliance.serialized
for appliance
in request.appliances
],
}
@jsonapi.authenticated_method
def prolong_appliance_lease(user, id, minutes=60):
"""Prolongs the appliance's lease time by specified amount of minutes from current time."""
appliance = Appliance.objects.get(id=id)
if appliance.owner is not None and user != appliance.owner and not user.is_staff:
raise Exception("This pool belongs to a different user!")
appliance.prolong_lease(time=minutes)
@jsonapi.authenticated_method
def prolong_appliance_pool_lease(user, id, minutes=60):
"""Prolongs the appliance pool's lease time by specified amount of minutes from current time."""
pool = AppliancePool.objects.get(id=id)
if user != pool.owner and not user.is_staff:
raise Exception("This pool belongs to a different user!")
pool.prolong_lease(time=minutes)
@jsonapi.authenticated_method
def destroy_pool(user, id):
"""Destroy the pool. Kills all associated appliances."""
pool = AppliancePool.objects.get(id=id)
if user != pool.owner and not user.is_staff:
raise Exception("This pool belongs to a different user!")
pool.kill()
@jsonapi.method
def pool_exists(id):
"""Check whether pool does exist"""
try:
AppliancePool.objects.get(id=id)
return True
except ObjectDoesNotExist:
return False
@jsonapi.authenticated_method
def get_number_free_appliances(user, group):
"""Get number of available appliances to keep in the pool"""
with transaction.atomic():
g = Group.objects.get(id=group)
return {
sg.user_group.name: sg.template_pool_size
for sg in
GroupShepherd.objects.filter(user_group__in=user.groups.all(), template_group=g)}
@jsonapi.authenticated_method
def set_number_free_appliances(user, group, n):
"""Set number of available appliances to keep in the pool"""
if not user.is_staff:
raise Exception("You don't have enough rights!")
if n < 0:
return False
with transaction.atomic():
g = Group.objects.get(id=group)
g.template_pool_size = n
g.save()
return True
@jsonapi.method
def available_cfme_versions(preconfigured=True):
"""Lists all versions that are available"""
return Template.get_versions(preconfigured=preconfigured)
@jsonapi.method
def available_groups():
return map(lambda group: group.id, Group.objects.all())
@jsonapi.method
def available_providers():
return map(lambda group: group.id, Provider.objects.all())
@jsonapi.authenticated_method
def add_provider(user, provider_key):
if not user.is_staff:
raise Exception("You don't have enough rights!")
try:
provider_o = Provider.objects.get(id=provider_key)
return False
except ObjectDoesNotExist:
provider_o = Provider(id=provider_key)
provider_o.save()
return True
def get_appliance(appliance, user=None):
"""'Multimethod' that receives an object and tries to guess by what field the appliance
should be retrieved. Then it retrieves the appliance"""
if isinstance(appliance, int):
appliance = Appliance.objects.get(id=appliance)
elif re.match(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$", appliance) is not None:
appliance = Appliance.objects.get(ip_address=appliance)
else:
appliance = Appliance.objects.get(name=appliance)
if user is None:
return appliance
else:
if appliance.owner is None:
if not user.is_staff:
raise Exception("Only staff can operate with nonowned appliances")
elif appliance.owner != user:
raise Exception("This appliance belongs to a different user!")
return appliance
@jsonapi.authenticated_method
def appliance_data(user, appliance):
"""Returns data about the appliance serialized as JSON.
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
return appliance.serialized
@jsonapi.authenticated_method
def destroy_appliance(user, appliance):
"""Destroy the appliance. If the kill task was called, id is returned, otherwise None
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
try:
return Appliance.kill(appliance).task_id
except AttributeError: # None was returned
return None
@jsonapi.method
def power_state(appliance):
"""Return appliance's current power state.
You can specify appliance by IP address, id or name.
"""
return get_appliance(appliance).power_state
@jsonapi.authenticated_method
def power_on(user, appliance, wait_ready=True):
"""Power on the appliance. If task is called, an id is returned, otherwise None.
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
if appliance.power_state != Appliance.Power.ON:
tasks = [appliance_power_on.si(appliance.id)]
if wait_ready:
tasks.append(wait_appliance_ready.si(appliance.id))
else:
tasks.append(mark_appliance_ready.si(appliance.id))
return chain(*tasks)().task_id
@jsonapi.authenticated_method
def power_off(user, appliance):
"""Power off the appliance. If task is called, an id is returned, otherwise None.
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
if appliance.power_state != Appliance.Power.OFF:
return appliance_power_off.delay(appliance.id).task_id
@jsonapi.authenticated_method
def suspend(user, appliance):
"""Suspend the appliance. If task is called, an id is returned, otherwise None.
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
if appliance.power_state == Appliance.Power.OFF:
return False
elif appliance.power_state != Appliance.Power.SUSPENDED:
return appliance_suspend.delay(appliance.id).task_id
@jsonapi.authenticated_method
def set_pool_description(user, pool_id, description):
"""Set the pool's description"""
pool = AppliancePool.objects.get(id=pool_id)
if pool.owner is None:
if not user.is_staff:
raise Exception("Only staff can operate with nonowned appliances")
elif pool.owner != user:
raise Exception("This appliance belongs to a different user!")
pool.description = description
pool.save()
return True
@jsonapi.authenticated_method
def get_pool_description(user, pool_id):
"""Get the pool's description"""
pool = AppliancePool.objects.get(id=pool_id)
if pool.owner is None:
if not user.is_staff:
raise Exception("Only staff can operate with nonowned appliances")
elif pool.owner != user:
raise Exception("This appliance belongs to a different user!")
return pool.description
@jsonapi.authenticated_method
def find_pools_by_description(user, description, partial=False):
"""Searches pools to find a pool with matching descriptions. When partial, `in` is used"""
pools = []
for pool in AppliancePool.objects.all():
if not pool.description:
continue
if partial:
if description in pool.description:
pools.append(pool)
else:
if pool.description == description:
pools.append(pool)
def _filter(pool):
return (pool.owner is None and user.is_staff) or (pool.owner == user)
return map(lambda pool: pool.id, filter(_filter, pools))
@jsonapi.authenticated_method
def rename_appliance(user, appliance, new_name):
"""Rename the appliance. Returns task id.
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
return appliance_rename.delay(appliance.id, new_name).task_id
@jsonapi.method
def task_finished(task_id):
"""Returns whether specified task has already finished"""
result = AsyncResult(task_id)
return result.ready()
@jsonapi.method
def task_result(task_id):
"""Returns result of the task. Returns None if no result yet"""
result = AsyncResult(task_id)
if not result.ready():
return None
return result.get(timeout=1)
@jsonapi.authenticated_method
def appliance_provider_type(user, appliance):
"""Return appliance's provider class.
Corresponds to the mgmtsystem class names.
You can specify appliance by IP address, id or name.
"""
api_class = type(get_appliance(appliance, user).provider_api)
return api_class.__name__
@jsonapi.authenticated_method
def appliance_provider_key(user, appliance):
"""Return appliance's provider key.
You can specify appliance by IP address, id or name.
"""
return get_appliance(appliance, user).provider.id
@jsonapi.authenticated_method
def appliance_connect_direct_lun(user, appliance):
"""Connects direct LUN disk to the appliance (RHEV only).
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
return connect_direct_lun(appliance.id).task_id
@jsonapi.authenticated_method
def appliance_disconnect_direct_lun(user, appliance):
"""Disconnects direct LUN disk from the appliance (RHEV only).
You can specify appliance by IP address, id or name.
"""
appliance = get_appliance(appliance, user)
return disconnect_direct_lun(appliance.id).task_id
| gpl-2.0 | -574,617,057,527,283,840 | 31.333935 | 100 | 0.639089 | false |
abramconnelly/genevieve | file_process/tasks.py | 1 | 5314 | """Tasks for analyzing genome/genetic data files"""
# absolute_import prevents conflicts between project celery.py file
# and the celery package.
from __future__ import absolute_import
import bz2
import csv
import gzip
import os
from random import randint
from celery import shared_task
from django.conf import settings
from django.core.files import File
from genomes.models import GenomeAnalysis, GenomeAnalysisVariant
from variants.models import Variant, ClinVarRecord
from .utils import vcf_parsing_tools as vcftools
from .utils.twentythree_and_me import (api23andme_full_gen_data,
api23andme_full_gen_infer_sex,
api23andme_to_vcf)
from .utils.cgivar_to_vcf import convert as convert_cgivar_to_vcf
CLINVAR_FILENAME = "clinvar-latest.vcf"
@shared_task
def analyze_23andme_from_api(access_token, profile_id, user):
genome_data = api23andme_full_gen_data(access_token, profile_id)
sex = api23andme_full_gen_infer_sex(genome_data)
vcf_data = api23andme_to_vcf(genome_data, sex)
targetdir = '/tmp'
filename = '23andme-api-' + profile_id + '.vcf.gz'
if os.path.exists(os.path.join(targetdir, filename)):
inc = 2
while os.path.exists(os.path.join(targetdir, filename)):
filename = '23andme-api-' + profile_id + '-' + str(inc) + '.vcf.gz'
inc += 1
filepath = os.path.join(targetdir, filename)
output_file = gzip.open(filepath, mode='wb')
output_file.writelines(vcf_data)
# Close to ensure it's *really* closed before using File.
output_file.close()
# Reopen as binary so we don't lose compression.
vcf_file = open(filepath)
django_file = File(vcf_file)
new_analysis = GenomeAnalysis(uploadfile=django_file,
user=user, name=filename)
new_analysis.save()
vcf_file.close()
os.remove(filepath)
read_input_genome(analysis_in=new_analysis, genome_format='vcf')
@shared_task
def read_input_genome(analysis_in, genome_format='vcf'):
"""Read genome, VCF or Complete Genomics, and match against ClinVar"""
name = os.path.basename(analysis_in.uploadfile.path)
print genome_format
if genome_format == 'cgivar':
print "Treating as CGI var to be translated"
genome_file = convert_cgivar_to_vcf(
analysis_in.uploadfile.path,
os.path.join(settings.DATA_FILE_ROOT, 'hg19.2bit'))
elif name.endswith('.gz'):
print "reading directly as gzip"
genome_file = gzip.open(analysis_in.uploadfile.path, 'rb')
elif name.endswith('.bz2'):
print 'reading directly as bz2'
genome_file = bz2.BZ2File(analysis_in.uploadfile.path, 'rb')
# GzipFile(mode='rb', compresslevel=9,
# fileobj=analysis_in.uploadfile)
read_vcf(analysis_in, genome_file)
@shared_task
def read_vcf(analysis_in, genome_file):
"""Takes two .vcf files and returns matches"""
clinvar_filepath = os.path.join(settings.DATA_FILE_ROOT, CLINVAR_FILENAME)
clin_file = open(clinvar_filepath, 'r')
# Creates a tmp file to write the .csv
tmp_output_file_path = os.path.join(
'/tmp', 'django_celery_fileprocess-' +
str(randint(10000000, 99999999)) + '-' +
os.path.basename(analysis_in.uploadfile.path))
tmp_output_file = open(tmp_output_file_path, 'w')
csv_out = csv.writer(tmp_output_file)
header = ("Chromosome", "Position", "Name", "Significance", "Frequency",
"Zygosity", "ACC URL")
csv_out.writerow(header)
matched_variants = vcftools.match_to_clinvar(genome_file, clin_file)
for var in matched_variants:
print var
chrom = var[0]
pos = var[1]
ref_allele = var[2]
alt_allele = var[3]
name_acc = var[4]
freq = var[5]
zygosity = var[6]
variant, _ = Variant.objects.get_or_create(chrom=chrom,
pos=pos,
ref_allele=ref_allele,
alt_allele=alt_allele)
if not variant.freq:
variant.freq = freq
variant.save()
genomeanalysisvariant = GenomeAnalysisVariant.objects.create(
genomeanalysis=analysis_in, variant=variant, zyg=zygosity)
genomeanalysisvariant.save()
for spec in name_acc:
# for online report
url = "http://www.ncbi.nlm.nih.gov/clinvar/" + str(spec[0])
name = spec[1]
clnsig = spec[2]
record, _ = ClinVarRecord.objects.get_or_create(
accnum=spec[0], variant=variant, condition=name, clnsig=clnsig)
record.save()
# analysis_in.variants.add(variant)
# for CSV output
data = (chrom, pos, name, clnsig, freq, zygosity, url)
csv_out.writerow(data)
# closes the tmp file
tmp_output_file.close()
# opens the tmp file and creates an output processed file"
csv_filename = os.path.basename(analysis_in.uploadfile.path) + '.csv'
with open(tmp_output_file_path, 'rb') as file_out:
output_file = File(file_out)
analysis_in.processedfile.save(csv_filename, output_file)
| mit | 4,513,550,672,840,500,000 | 37.230216 | 79 | 0.621754 | false |
centricular/meson | tools/cmake2meson.py | 1 | 10941 | #!/usr/bin/env python3
# Copyright 2014 Jussi Pakkanen
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, os
import re
class Token:
def __init__(self, tid, value):
self.tid = tid
self.value = value
self.lineno = 0
self.colno = 0
class Statement():
def __init__(self, name, args):
self.name = name
self.args = args
class Lexer:
def __init__(self):
self.token_specification = [
# Need to be sorted longest to shortest.
('ignore', re.compile(r'[ \t]')),
('string', re.compile(r'"([^\\]|(\\.))*?"', re.M)),
('varexp', re.compile(r'\${[-_0-9a-z/A-Z.]+}')),
('id', re.compile('''[,-><${}=+_0-9a-z/A-Z|@.*]+''')),
('eol', re.compile(r'\n')),
('comment', re.compile(r'\#.*')),
('lparen', re.compile(r'\(')),
('rparen', re.compile(r'\)')),
]
def lex(self, code):
lineno = 1
line_start = 0
loc = 0;
col = 0
while(loc < len(code)):
matched = False
for (tid, reg) in self.token_specification:
mo = reg.match(code, loc)
if mo:
col = mo.start()-line_start
matched = True
loc = mo.end()
match_text = mo.group()
if tid == 'ignore':
continue
if tid == 'comment':
yield(Token('comment', match_text))
elif tid == 'lparen':
yield(Token('lparen', '('))
elif tid == 'rparen':
yield(Token('rparen', ')'))
elif tid == 'string':
yield(Token('string', match_text[1:-1]))
elif tid == 'id':
yield(Token('id', match_text))
elif tid == 'eol':
#yield('eol')
lineno += 1
col = 1
line_start = mo.end()
pass
elif tid == 'varexp':
yield(Token('varexp', match_text[2:-1]))
else:
raise RuntimeError('Wharrgarbl')
break
if not matched:
raise RuntimeError('Lexer got confused line %d column %d' % (lineno, col))
class Parser():
def __init__(self, code):
self.stream = Lexer().lex(code)
self.getsym()
def getsym(self):
try:
self.current = next(self.stream)
except StopIteration:
self.current = Token('eof', '')
def accept(self, s):
if self.current.tid == s:
self.getsym()
return True
return False
def expect(self, s):
if self.accept(s):
return True
raise RuntimeError('Expecting %s got %s.' % (s, self.current.tid), self.current.lineno, self.current.colno)
def statement(self):
cur = self.current
if self.accept('comment'):
return Statement('_', [cur.value])
self.accept('id')
self.expect('lparen')
args = self.arguments()
self.expect('rparen')
return Statement(cur.value, args)
def arguments(self):
args = []
if self.accept('lparen'):
args.append(self.arguments())
self.expect('rparen')
arg = self.current
if self.accept('string') or self.accept('varexp') or\
self.accept('id'):
args.append(arg)
rest = self.arguments()
args += rest
return args
def parse(self):
while not self.accept('eof'):
yield(self.statement())
class Converter:
ignored_funcs = {'cmake_minimum_required' : True,
'enable_testing' : True,
'include' : True}
def __init__(self, cmake_root):
self.cmake_root = cmake_root
self.indent_unit = ' '
self.indent_level = 0
self.options = []
def convert_args(self, args, as_array=True):
res = []
if as_array:
start = '['
end = ']'
else:
start = ''
end = ''
for i in args:
if i.tid == 'id':
res.append("'%s'" % i.value)
elif i.tid == 'varexp':
res.append('%s' % i.value)
elif i.tid == 'string':
res.append("'%s'" % i.value)
else:
print(i)
raise RuntimeError('Unknown arg type.')
if len(res) > 1:
return start + ', '.join(res) + end
if len(res) == 1:
return res[0]
return ''
def write_entry(self, outfile, t):
if t.name in Converter.ignored_funcs:
return
preincrement = 0
postincrement = 0
if t.name == '_':
line = t.args[0]
elif t.name == 'add_subdirectory':
line = "subdir('" + t.args[0].value + "')"
elif t.name == 'pkg_search_module' or t.name == 'pkg_search_modules':
varname = t.args[0].value.lower()
mods = ["dependency('%s')" % i.value for i in t.args[1:]]
if len(mods) == 1:
line = '%s = %s' % (varname, mods[0])
else:
line = '%s = [%s]' % (varname, ', '.join(["'%s'" % i for i in mods]))
elif t.name == 'find_package':
line = "%s_dep = dependency('%s')" % (t.args[0].value, t.args[0].value)
elif t.name == 'find_library':
line = "%s = find_library('%s')" % (t.args[0].value.lower(), t.args[0].value)
elif t.name == 'add_executable':
line = '%s_exe = executable(%s)' % (t.args[0].value, self.convert_args(t.args, False))
elif t.name == 'add_library':
if t.args[1].value == 'SHARED':
libcmd = 'shared_library'
args = [t.args[0]] + t.args[2:]
elif t.args[1].value == 'STATIC':
libcmd = 'static_library'
args = [t.args[0]] + t.args[2:]
else:
libcmd = 'static_library'
args = t.args
line = '%s_lib = %s(%s)' % (t.args[0].value, libcmd, self.convert_args(args, False))
elif t.name == 'add_test':
line = 'test(%s)' % self.convert_args(t.args, False)
elif t.name == 'option':
optname = t.args[0].value
description = t.args[1].value
if len(t.args) > 2:
default = t.args[2].value
else:
default = None
self.options.append((optname, description, default))
return
elif t.name == 'project':
pname = t.args[0].value
args = [pname]
for l in t.args[1:]:
l = l.value.lower()
if l == 'cxx':
l = 'cpp'
args.append(l)
args = ["'%s'" % i for i in args]
line = 'project(' + ', '.join(args) + ')'
elif t.name == 'set':
varname = t.args[0].value.lower()
line = '%s = %s\n' % (varname, self.convert_args(t.args[1:]))
elif t.name == 'if':
postincrement = 1
line = 'if %s' % self.convert_args(t.args, False)
elif t.name == 'elseif':
preincrement = -1
postincrement = 1
line = 'elif %s' % self.convert_args(t.args, False)
elif t.name == 'else':
preincrement = -1
postincrement = 1
line = 'else'
elif t.name == 'endif':
preincrement = -1
line = 'endif'
else:
line = '''# %s(%s)''' % (t.name, self.convert_args(t.args))
self.indent_level += preincrement
indent = self.indent_level*self.indent_unit
outfile.write(indent)
outfile.write(line)
if not(line.endswith('\n')):
outfile.write('\n')
self.indent_level += postincrement
def convert(self, subdir=''):
if subdir == '':
subdir = self.cmake_root
cfile = os.path.join(subdir, 'CMakeLists.txt')
try:
with open(cfile) as f:
cmakecode = f.read()
except FileNotFoundError:
print('\nWarning: No CMakeLists.txt in', subdir, '\n')
return
p = Parser(cmakecode)
with open(os.path.join(subdir, 'meson.build'), 'w') as outfile:
for t in p.parse():
if t.name == 'add_subdirectory':
# print('\nRecursing to subdir',
# os.path.join(self.cmake_root, t.args[0].value),
# '\n')
self.convert(os.path.join(subdir, t.args[0].value))
# print('\nReturning to', self.cmake_root, '\n')
self.write_entry(outfile, t)
if subdir == self.cmake_root and len(self.options) > 0:
self.write_options()
def write_options(self):
filename = os.path.join(self.cmake_root, 'meson_options.txt')
with open(filename, 'w') as optfile:
for o in self.options:
(optname, description, default) = o
if default is None:
defaultstr = ''
else:
if default == 'OFF':
typestr = ' type : \'boolean\','
default = 'false'
elif default == 'ON':
default = 'true'
typestr = ' type : \'boolean\','
else:
typestr = ' type : \'string\','
defaultstr = ' value : %s,' % default
line = "option(%r,%s%s description : '%s')\n" % (optname,
typestr,
defaultstr,
description)
optfile.write(line)
if __name__ == '__main__':
if len(sys.argv) != 2:
print(sys.argv[0], '<CMake project root>')
sys.exit(1)
c = Converter(sys.argv[1])
c.convert()
| apache-2.0 | 8,318,193,074,904,967,000 | 35.348837 | 115 | 0.452792 | false |
adyliu/mysql-connector-python | python2/tests/test_examples.py | 1 | 6610 | # -*- coding: utf-8 -*-
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for examples
"""
import sys
import logging
import mysql.connector
import tests
logger = logging.getLogger(tests.LOGGER_NAME)
class TestExamples(tests.MySQLConnectorTests):
def setUp(self):
config = self.getMySQLConfig()
self.cnx = mysql.connector.connect(**config)
def tearDown(self):
self.cnx.close()
def _exec_main(self, example):
try:
return example.main(self.getMySQLConfig())
except StandardError as e:
self.fail(e)
def test_dates(self):
"""examples/dates.py"""
try:
import examples.dates as example
except StandardError as e:
self.fail(e)
output = example.main(self.getMySQLConfig())
exp = [' 1 | 1977-06-14 | 1977-06-14 21:10:00 | 21:10:00 |',
' 2 | None | None | 0:00:00 |',
' 3 | None | None | 0:00:00 |']
self.assertEqual(output, exp)
example.DATA.append(('0000-00-00',None,'00:00:00'),)
self.assertRaises(mysql.connector.errors.IntegrityError,
example.main, self.getMySQLConfig())
def test_engines(self):
"""examples/engines.py"""
try:
import examples.engines as example
except:
self.fail()
output = self._exec_main(example)
# Can't check output as it might be different per MySQL instance
# We check only if MyISAM is present
found = False
for s in output:
if s.find('MyISAM') > -1:
found = True
break
self.assertTrue(found,'MyISAM engine not found in output')
def test_inserts(self):
"""examples/inserts.py"""
try:
import examples.inserts as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
exp = [u'1 | Geert | 30\nInfo: c..\n',
u'2 | Jan | 30\nInfo: c..\n', u'3 | Michel | 30\nInfo: c..\n']
self.assertEqual(output,exp,'Output was not correct')
def test_transactions(self):
"""examples/transactions.py"""
db = mysql.connector.connect(**self.getMySQLConfig())
r = self.haveEngine(db,'InnoDB')
db.close()
if not r:
return
try:
import examples.transaction as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
exp = ['Inserting data', 'Rolling back transaction',
'No data, all is fine.', 'Data before commit:',
u'4 | Geert', u'5 | Jan', u'6 | Michel', 'Data after commit:',
u'4 | Geert', u'5 | Jan', u'6 | Michel']
self.assertEqual(output,exp,'Output was not correct')
def test_unicode(self):
"""examples/unicode.py"""
try:
import examples.unicode as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
exp = ['Unicode string: \xc2\xbfHabla espa\xc3\xb1ol?',
'Unicode string coming from db: \xc2\xbfHabla espa\xc3\xb1ol?']
self.assertEqual(output,exp,'Output was not correct')
def test_warnings(self):
"""examples/warnings.py"""
try:
import examples.warnings as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
exp = ["Executing 'SELECT 'abc'+1'",
u"1292: Truncated incorrect DOUBLE value: 'abc'"]
self.assertEqual(output,exp,'Output was not correct')
example.STMT = "SELECT 'abc'"
self.assertRaises(StandardError, example.main, self.getMySQLConfig())
def test_multi_resultsets(self):
"""examples/multi_resultsets.py"""
try:
import examples.multi_resultsets as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
exp = ['Inserted 1 row', 'Number of rows: 1', 'Inserted 2 rows',
u'Names in table: Geert Jan Michel']
self.assertEqual(output,exp,'Output was not correct')
def test_microseconds(self):
"""examples/microseconds.py"""
try:
import examples.microseconds as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
if self.cnx.get_server_version() < (5,6,4):
exp = "does not support fractional precision for timestamps."
self.assertTrue(output[0].endswith(exp))
else:
exp = [
' 1 | 1 | 0:00:47.510000',
' 1 | 2 | 0:00:47.020000',
' 1 | 3 | 0:00:47.650000',
' 1 | 4 | 0:00:46.060000',
]
self.assertEqual(output, exp, 'Output was not correct')
def test_prepared_statements(self):
"""examples/prepared_statements.py"""
try:
import examples.prepared_statements as example
except StandardError as e:
self.fail(e)
output = self._exec_main(example)
exp = [
'Inserted data',
'1 | Geert',
'2 | Jan',
'3 | Michel',
]
self.assertEqual(output, exp, 'Output was not correct')
| gpl-2.0 | -8,095,006,123,662,199,000 | 34.72973 | 78 | 0.579879 | false |
clearcare/cc_dynamodb | tests/conftest.py | 1 | 1033 | from decimal import Decimal
import os.path
import pytest
AWS_DYNAMODB_CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'dynamodb.yml')
@pytest.fixture
def fake_config():
import cc_dynamodb
cc_dynamodb.set_config(
table_config=AWS_DYNAMODB_CONFIG_PATH,
aws_access_key_id='<KEY>',
aws_secret_access_key='<SECRET>',
namespace='dev_')
DYNAMODB_FIXTURES = {
'nps_survey': [
{
'agency_id': Decimal('1669'),
'change': "I can't think of any...",
'comments': 'No comment',
'created': '2014-12-19T22:10:42.705243+00:00',
'favorite': 'I like all of ClearCare!',
'profile_id': Decimal('2616346'),
'recommend_score': '9'
},
{
'agency_id': Decimal('1669'),
'change': 'Most of the features, please',
'created': '2014-12-19T22:10:42.705243+00:00',
'profile_id': Decimal('2616347'),
'recommend_score': '3'
},
],
}
| mit | 1,849,978,970,764,546,300 | 25.487179 | 82 | 0.53243 | false |
felixbb/forseti-security | google/cloud/security/scanner/audit/buckets_rules_engine.py | 1 | 8802 |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules engine for Bucket acls"""
from collections import namedtuple
import itertools
import re
# pylint: disable=line-too-long
from google.cloud.security.common.gcp_type import bucket_access_controls as bkt_acls
# pylint: enable=line-too-long
from google.cloud.security.common.util import log_util
from google.cloud.security.scanner.audit import base_rules_engine as bre
from google.cloud.security.scanner.audit import errors as audit_errors
LOGGER = log_util.get_logger(__name__)
# TODO: move this to utils since it's used in more that one engine
def escape_and_globify(pattern_string):
"""Given a pattern string with a glob, create actual regex pattern.
To require > 0 length glob, change the "*" to ".+". This is to handle
strings like "*@company.com". (THe actual regex would probably be
".*@company.com", except that we don't want to match zero-length
usernames before the "@".)
Args:
pattern_string: The pattern string of which to make a regex.
Returns:
The pattern string, escaped except for the "*", which is
transformed into ".+" (match on one or more characters).
"""
return '^{}$'.format(re.escape(pattern_string).replace('\\*', '.+'))
class BucketsRulesEngine(bre.BaseRulesEngine):
"""Rules engine for bucket acls"""
def __init__(self, rules_file_path):
"""Initialize.
Args:
rules_file_path: file location of rules
"""
super(BucketsRulesEngine,
self).__init__(rules_file_path=rules_file_path)
self.rule_book = None
def build_rule_book(self):
"""Build BucketsRuleBook from the rules definition file."""
self.rule_book = BucketsRuleBook(self._load_rule_definitions())
# pylint: disable=arguments-differ
def find_policy_violations(self, buckets_acls,
force_rebuild=False):
"""Determine whether bucket acls violates rules."""
violations = itertools.chain()
if self.rule_book is None or force_rebuild:
self.build_rule_book()
resource_rules = self.rule_book.get_resource_rules()
for rule in resource_rules:
violations = itertools.chain(violations,
rule.\
find_policy_violations(buckets_acls))
return violations
def add_rules(self, rules):
"""Add rules to the rule book."""
if self.rule_book is not None:
self.rule_book.add_rules(rules)
class BucketsRuleBook(bre.BaseRuleBook):
"""The RuleBook for bucket acls resources."""
def __init__(self, rule_defs=None):
"""Initialization.
Args:
rule_defs: rule definitons
"""
super(BucketsRuleBook, self).__init__()
self.resource_rules_map = {}
if not rule_defs:
self.rule_defs = {}
else:
self.rule_defs = rule_defs
self.add_rules(rule_defs)
def add_rules(self, rule_defs):
"""Add rules to the rule book"""
for (i, rule) in enumerate(rule_defs.get('rules', [])):
self.add_rule(rule, i)
def add_rule(self, rule_def, rule_index):
"""Add a rule to the rule book.
Args:
rule_def: A dictionary containing rule definition properties.
rule_index: The index of the rule from the rule definitions.
Assigned automatically when the rule book is built.
Raises:
"""
resources = rule_def.get('resource')
for resource in resources:
resource_ids = resource.get('resource_ids')
if not resource_ids or len(resource_ids) < 1:
raise audit_errors.InvalidRulesSchemaError(
'Missing resource ids in rule {}'.format(rule_index))
bucket = rule_def.get('bucket')
entity = rule_def.get('entity')
email = rule_def.get('email')
domain = rule_def.get('domain')
role = rule_def.get('role')
if (bucket is None) or (entity is None) or (email is None) or\
(domain is None) or (role is None):
raise audit_errors.InvalidRulesSchemaError(
'Faulty rule {}'.format(rule_def.get('name')))
rule_def_resource = bkt_acls.BucketAccessControls(
escape_and_globify(bucket),
escape_and_globify(entity),
escape_and_globify(email),
escape_and_globify(domain),
escape_and_globify(role.upper()))
rule = Rule(rule_name=rule_def.get('name'),
rule_index=rule_index,
rules=rule_def_resource)
resource_rules = self.resource_rules_map.get(rule_index)
if not resource_rules:
self.resource_rules_map[rule_index] = rule
def get_resource_rules(self):
"""Get all the resource rules for (resource, RuleAppliesTo.*).
Args:
resource: The resource to find in the ResourceRules map.
Returns:
A list of ResourceRules.
"""
resource_rules = []
for resource_rule in self.resource_rules_map:
resource_rules.append(self.resource_rules_map[resource_rule])
return resource_rules
class Rule(object):
"""Rule properties from the rule definition file.
Also finds violations.
"""
def __init__(self, rule_name, rule_index, rules):
"""Initialize.
Args:
rule_name: Name of the loaded rule
rule_index: The index of the rule from the rule definitions
rules: The rules from the file
"""
self.rule_name = rule_name
self.rule_index = rule_index
self.rules = rules
def find_policy_violations(self, bucket_acl):
"""Find bucket policy acl violations in the rule book.
Args:
bucket_acl: Bucket ACL resource
Returns:
Returns RuleViolation named tuple
"""
if self.rules.bucket != '^.+$':
bucket_bool = re.match(self.rules.bucket, bucket_acl.bucket)
else:
bucket_bool = True
if self.rules.entity != '^.+$':
entity_bool = re.match(self.rules.entity, bucket_acl.entity)
else:
entity_bool = True
if self.rules.email != '^.+$':
email_bool = re.match(self.rules.email, bucket_acl.email)
else:
email_bool = True
if self.rules.domain != '^.+$':
domain_bool = re.match(self.rules.domain, bucket_acl.domain)
else:
domain_bool = True
if self.rules.role != '^.+$':
role_bool = re.match(self.rules.role, bucket_acl.role)
else:
role_bool = True
should_raise_violation = (
(bucket_bool is not None and bucket_bool) and
(entity_bool is not None and entity_bool) and
(email_bool is not None and email_bool) and
(domain_bool is not None and domain_bool) and
(role_bool is not None and role_bool))
if should_raise_violation:
yield self.RuleViolation(
resource_type='project',
resource_id=bucket_acl.project_number,
rule_name=self.rule_name,
rule_index=self.rule_index,
violation_type='BUCKET_VIOLATION',
role=bucket_acl.role,
entity=bucket_acl.entity,
email=bucket_acl.email,
domain=bucket_acl.domain,
bucket=bucket_acl.bucket)
# Rule violation.
# resource_type: string
# resource_id: string
# rule_name: string
# rule_index: int
# violation_type: BUCKET_VIOLATION
# role: string
# entity: string
# email: string
# domain: string
# bucket: string
RuleViolation = namedtuple('RuleViolation',
['resource_type', 'resource_id', 'rule_name',
'rule_index', 'violation_type', 'role',
'entity', 'email', 'domain', 'bucket'])
| apache-2.0 | -2,150,819,857,089,099,300 | 33.249027 | 84 | 0.586571 | false |
cyncyncyn/evette | languagefiles/language_irish_1.3.2.py | 1 | 68626 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
#Copyright (C) 2007 Adam Spencer - Free Veterinary Management Suite
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
##Contact: [email protected]
####Irish####
def GetDictionary():
dictionary = {}
##Misc
dictionary["usernamelabel"] = (
"Username",
"Username"
)
dictionary["passwordlabel"] = (
"Password",
"Password"
)
dictionary["submitlabel"] = (
"Submit",
"Submit"
)
dictionary["totallabel"] = (
"Total",
"Total"
)
dictionary["fromlabel"] = (
"From",
"From"
)
dictionary["tolabel"] = (
"To",
"To"
)
dictionary["pricelabel"] = (
"Price",
"Price"
)
dictionary["descriptionlabel"] = (
"Description",
"Decsription"
)
dictionary["yeslabel"] = (
"Yes",
"Yes"
)
dictionary["nolabel"] = (
"No",
"No"
)
dictionary["editlabel"] = (
"Edit",
"Edit"
)
dictionary["deletelabel"] = (
"Delete",
"Delete"
)
dictionary["searchlabel"] = (
"Search",
"Search"
)
dictionary["resetlabel"] = (
"Reset",
"Reset"
)
dictionary["movelabel"] = (
"Move",
"Move"
)
dictionary["unitlabel"] = (
"Unit",
"Unit"
)
dictionary["onlabel"] = (
"on",
"on"
)
dictionary["namelabel"] = (
"Name",
"Name"
)
dictionary["headertext1"] = (
"The complete FREE veterinary practice management package",
"The complete open-source veterinary practice management package"
)
dictionary["headertext2"] = (
"You can change this header to anything you like by editing",
"You can change this header to anything you like by editing"
)
dictionary["generatedbylabel"] = (
"Generated by",
"Generated by"
)
dictionary["timelabel"] = (
"Time",
"Time"
)
dictionary["operationslabel"] = (
"Operations",
"Operations"
)
dictionary["operatinglabel"] = (
"Operating",
"Operating"
)
dictionary["consultinglabel"] = (
"Consulting",
"Consulting"
)
dictionary["vetlabel"] = (
"Vet",
"Vet"
)
dictionary["animaldetailslabel"] = (
"Animal Details",
"Animal Details"
)
dictionary["ownerdetailslabel"] = (
"Owner Details",
"Owner Details"
)
dictionary["receiptlabel"] = (
"Receipt",
"Receipt"
)
dictionary["problemlabel"] = (
"Problem",
"Problem"
)
dictionary["noteslabel"] = (
"Notes",
"Notes"
)
dictionary["planlabel"] = (
"Plan",
"Plan"
)
dictionary["userdeleted"] = (
"User deleted",
"User Deleted"
)
dictionary["changelog"] = (
"Change Log",
"Change Log"
)
dictionary["positionlabel"] = (
"Position",
"Position"
)
dictionary["datelabel"] = (
"Date",
"Date"
)
dictionary["invalidtimemessage"] = (
"Invalid Time",
"Invalid Time"
)
dictionary["containslabel"] = (
"Contains",
"Contains"
)
dictionary["nextduelabel"] = (
"Next Due",
"Next Due"
)
dictionary["nonelabel"] = (
"None",
"None"
)
##Menus
dictionary["clientmenu"] = (
"&Clients",
"&Clients"
)
dictionary["appointmentsmenu"] = (
"&Appointments",
"&Appointments"
)
dictionary["medicationmenu"] = (
"&Medication",
"&Medication"
)
dictionary["proceduresmenu"] = (
"&Procedures",
"&Procedures"
)
dictionary["lookupsmenu"] = (
"&Lookups",
"&Lookups"
)
dictionary["formsmenu"] = (
"&Forms",
"&Forms"
)
dictionary["staffmenu"] = (
"&Staff",
"&Staff"
)
dictionary["settingsmenu"] = (
"Se&ttings",
"Se&ttings"
)
dictionary["helpmenu"] = (
"&Help",
"&Help"
)
dictionary["entirelabel"] = (
"Entire",
"Entire"
)
dictionary["neuteredlabel"] = (
"Neutered",
"Neutered"
)
##Menu items
dictionary["addclientmenu"] = (
("Add Client", "Create a new client record"),
("Add Client", "Create a new client record")
)
dictionary["findclientmenu"] = (
("Find Clients", "Find client and animal records"),
("Find Clients", "Find client and animal records")
)
dictionary["viewappointmentsmenu"] = (
("Todays Appointments", "View todays appointments"),
("Todays Appointments", "View todays appointments")
)
dictionary["viewoperationsmenu"] = (
("Todays Operations", "View todays operations"),
("Todays Operations", "View todays operations")
)
dictionary["editusersmenu"] = (
("Edit Users", "Add and edit Evette users"),
("Edit Users", "Add and edit Evette users")
)
dictionary["editrotamenu"] = (
("Edit Rota", "Edit the rota"),
("Edit Rota", "Edit the rota")
)
dictionary["editmedicationmenu"] = (
("Edit Medication", "Edit Medication"),
("Edit Medication", "Edit Medication")
)
dictionary["editvaccinationsmenu"] = (
("Edit Vaccinations", "Edit Vaccinations"),
("Edit Vaccinations", "Edit Vaccinations")
)
dictionary["editproceduresmenu"] = (
("Edit Procedures", "Edit Procedures"),
("Edit Procedures", "Edit Procedures")
)
dictionary["editcoloursmenu"] = (
("Edit Colours", "Edit Colours"),
("Edit Colours", "Edit Colours")
)
dictionary["editbreedsmenu"] = (
("Edit Breeds", "Edit Breeds"),
("Edit Breeds", "Edit Breeds")
)
dictionary["editspeciesmenu"] = (
("Edit Species", "Edit Species"),
("Edit Species", "Edit Species")
)
dictionary["editformsmenu"] = (
("Edit Forms", "Edit Forms"),
("Edit Forms", "Edit Forms")
)
dictionary["editsettingsmenu"] = (
("Edit Settings", "Edit settings unique to this practice"),
("Edit Settings", "Edit settings unique to this practice")
)
dictionary["randomdatamenu"] = (
("Random Data", "Generate random sample data to experiment with"),
("Random Data", "Generate random sample data to experiment with")
)
dictionary["resettablesmenu"] = (
("Reset Database", "Completely reset the evette database"),
("Reset Database", "Completely reset the Evette database. Be careful!")
)
dictionary["gethelpmenu"] = (
("Help", "Get help on using Evette"),
("Help", "Get help on using Evette")
)
dictionary["aboutmenu"] = (
("About", "Information about this program"),
("About", "Information about Evette")
)
##Toolbar
dictionary["addclienttoolbar"] = (
(" Add Client ", "Create a new client record"),
(" Add Client ", "Create a new client record")
)
dictionary["findclienttoolbar"] = (
(" Client Search ", "Find clients and animals"),
(" Client Search ", "Find clients and their animals")
)
dictionary["viewappointmentstoolbar"] = (
(" Todays Appointments ", "View todays appointments"),
(" Todays Appointments ", "View todays appointments")
)
dictionary["viewoperationstoolbar"] = (
(" Todays Operations ", "View todays operations"),
(" Todays Operations ", "View todays operations")
)
##Client Panel
dictionary["newclientpagetitle"] = (
"New Client",
"New Client"
)
dictionary["clienttitlelabel"] = (
"Title",
"Title"
)
dictionary["clientforenameslabel"] = (
"First Name",
"First Names"
)
dictionary["clientsurnamelabel"] = (
"Last Name",
"Last Name"
)
dictionary["clientaddresslabel"] = (
"Address",
"Address"
)
dictionary["clientpostcodelabel"] = (
"Post Code",
"Post Code"
)
dictionary["clienthomephonelabel"] = (
"Home Phone",
"Home Phone"
)
dictionary["clientmobilephonelabel"] = (
"Mobile Phone",
"Mobile Phone"
)
dictionary["clientworkphonelabel"] = (
"Work Phone",
"Work Phone"
)
dictionary["clientemailaddresslabel"] = (
"Email",
"Email"
)
dictionary["clientcommentslabel"] = (
"Comments",
"Comments"
)
dictionary["clientanimalslabel"] = (
"Animals",
"Animals"
)
dictionary["clientaddanimaltooltip"] = (
"Create a new animal",
"Create a new animal"
)
dictionary["clienteditanimaltooltip"] = (
"Edit the selected animal record",
"Edit the selected animal record"
)
dictionary["clientdeleteanimaltooltip"] = (
"Delete the selected animal record",
"Delete the selected animal record"
)
dictionary["clientrefreshanimalstooltip"] = (
"Refresh the list of animals",
"Refresh the list of animals"
)
dictionary["clientcreateappointmenttooltip"] = (
"Create an appointment for the selected animal",
"Create an appointment for the selected animal"
)
dictionary["clientbalancelabel"] = (
"Balance",
"Balance"
)
dictionary["clientdetailedbilllabel"] = (
"Detailed Bill",
"Detailed Bill"
)
dictionary["clientsavetooltip"] = (
"Save changes to client record",
"Save changes to client record"
)
dictionary["clientunsavedchangesmessage"] = (
"This client record has unsaved changes, are you sure you want to close?",
"This client record has unsaved changes, are you sure you want to close?"
)
dictionary["clientdeleteanimalmessage"] = (
"Really delete animal?",
"Really delete animal?"
)
dictionary["clientrefreshbilltooltip"] = (
"Refresh bill",
"Refresh bill"
)
dictionary["clientrecentbillitems"] = (
(" Recent Items", "Adjust the date range of the bill items displayed"),
(" Recent Items", "Adjust the date range of the bill items displayed")
)
dictionary["clientcleardetailedbillentriestooltip"] = (
"Unselect the current bill item and clear the price and description entries",
"Unselect the current bill item and clear the price and description entries"
)
dictionary["clientsubmitdetailedbillentriestooltip"] = (
"Submit changes to the selected bill item",
"Submit changes to the selected bill item"
)
dictionary["clientdeletedetailedbillentriestooltip"] = (
"Delete the selected bill item",
"Delete the selected bill item"
)
##Animal Panel
dictionary["newanimalpagetitle"] = (
"New Animal",
"New Animal"
)
dictionary["animalownerlabel"] = (
"Owner",
"Owner"
)
dictionary["animaleditownertooltip"] = (
"Edit this animals owner",
"Edit this animals owner"
)
dictionary["animalnamelabel"] = (
"Name",
"Name"
)
dictionary["animalsexlabel"] = (
"Sex",
"Sex"
)
dictionary["animalspecieslabel"] = (
"Species",
"Species"
)
dictionary["animalbreedlabel"] = (
"Breed",
"Breed"
)
dictionary["animalcolourlabel"] = (
"Colour",
"Colour"
)
dictionary["animaldoblabel"] = (
"DOB",
"DOB"
)
dictionary["animalchipnolabel"] = (
"Chip #",
"Chip #"
)
dictionary["animalcommentslabel"] = (
"Comments",
"Comments"
)
dictionary["animalneuteredtooltip"] = (
"Check if the animal is neutered",
"Check if the animal is neutered"
)
dictionary["animalprintentirerecordtooltip"] = (
"Generate printable output of this entire animal record",
"Generate printable output of this entire animal record"
)
dictionary["animalgenerateformtooltip"] = (
"Generate a form using this animals details",
"Generate a form using this animals details"
)
dictionary["animalappointmentslabel"] = (
"Appointments",
"Appointments"
)
dictionary["animalcreateappointmenttooltip"] = (
"Create an appointment for this animal",
"Create an appointment for this animal"
)
dictionary["animaleditappointmenttooltip"] = (
"Edit the selected appointment",
"Edit the selected appointment"
)
dictionary["animalrefreshappointmentstooltip"] = (
"Refresh the list of appointments",
"Refresh the list of appointments"
)
dictionary["animaldeleteappointmenttooltip"] = (
"Delete the selected appointment",
"Delete the selected appointment"
)
dictionary["animalprintappointmenttooltip"] = (
"Generate printable output for the selected appointment",
"Generate printable output for the selected appointment"
)
dictionary["animalvetformbutton"] = (
("Vet Form", "Edit the vet form for the selected appointment"),
("Vet Form", "Edit the vet form for the selected appointment")
)
dictionary["animalappointmentdetailslabel"] = (
"Appointment Details",
"Appointment Details"
)
dictionary["animalvaccinationslabel"] = (
"Vaccinations",
"Vaccinations"
)
dictionary["animalsavebuttontooltip"] = (
"Save any changes made to this animal record",
"Save any changes made to this animal record"
)
dictionary["animalunsavedchangesmessage"] = (
"This animal record has unsaved changes, are you sure you want to close?",
"This animal record has unsaved changes, are you sure you want to close?"
)
dictionary["animalconfirmdeleteappointmentmessage"] = (
"Really delete appointment?",
"Really delete appointment?"
)
dictionary["animalresetvaccinationentries"] = (
"Reset vaccination entries",
"Reset vaccination entries"
)
dictionary["animalvaccinelabel"] = (
" Vaccine: ",
" Vaccine: "
)
dictionary["animalgivenlabel"] = (
"Given: ",
"Given: "
)
dictionary["animalnextlabel"] = (
" Next: ",
" Next: "
)
dictionary["animaldeletevaccinationtooltip"] = (
"Delete the selected vaccination",
"Delete the selected vaccination"
)
dictionary["animalsubmitvaccinationtooltip"] = (
"Submit this vaccination",
"Submit this vaccination"
)
dictionary["animalconfirmdeletevaccinationmessage"] = (
"Are you sure that you want to delete this vaccination?",
"Are you sure that you want to delete this vaccination?"
)
dictionary["animalvaccinationbatchlabel"] = (
" Batch: ",
" Batch: "
)
##Appointments
dictionary["appointmentappointmentforlabel"] = (
"Appointment for",
"Appointment for"
)
dictionary["appointmentoperationforlabel"] = (
"Operation for",
"Operation for"
)
dictionary["appointmententervettooltip"] = (
"If this appointment is for a specific vet, enter the vet's name here",
"If this appointment is for a specific vet, enter the vet's name here"
)
dictionary["appointmentrefreshtooltip"] = (
"Refresh the list of appointments",
"Refresh the list of appointments"
)
dictionary["appointmentreasonlabel"] = (
"Reason For Appointment",
"Reason For Appointment"
)
dictionary["appointmenttimelabel"] = (
"Appointment time",
"Appointment time"
)
dictionary["appointmentisopcheckbox"] = (
("Operation?", "Check this box if you would like to book an operation"),
("Operation?", "Check this box if you would like to book an operation")
)
dictionary["appointmentsubmittooltip"] = (
"Submit this appointment",
"Submit this appointment"
)
dictionary["appointmentdeletetooltip"] = (
"Delete this appointment",
"Delete this appointment"
)
dictionary["appointmentstatuslabel"] = (
"Status",
"Status"
)
dictionary["appointmentnotarrivedlabel"] = (
"Not Arrived",
"Not Arrived"
)
dictionary["appointmentwaitinglabel"] = (
"Waiting",
"Waiting"
)
dictionary["appointmentwithvetlabel"] = (
"With Vet",
"With Vet"
)
dictionary["appointmentdonelabel"] = (
"Done",
"Done"
)
dictionary["appointmenteditownerbutton"] = (
("Edit Owner", "Edit client record"),
("Edit Owner", "Edit client record")
)
dictionary["appointmenteditanimalbutton"] = (
("Edit Animal", "Edit animal record"),
("Edit Animal", "Edit animal record")
)
dictionary["appointmentappointmentsforlabel"] = (
"Appointments for",
"Appointments for"
)
dictionary["appointmentoperationsforlabel"] = (
"Operations for",
"Operations for"
)
dictionary["appointmenttimetooearlymessage"] = (
"Appointment time is before the practice opens!",
"Appointment time is before the practice opens!"
)
dictionary["appointmenttimetoolatemessage"] = (
"Appointment time is after the practice closes!",
"Appointment time is after the practice closes!"
)
dictionary["appointmentinvalidtimemessage"] = (
"Invalid time - times must be HH:MM!",
"Invalid time - times must be HH:MM!"
)
##Client search panel
dictionary["clientsearchpagetitle"] = (
"Client Search",
"Client Search"
)
dictionary["clientsearchstitlelabel"] = (
"Clients",
"Clients"
)
dictionary["clientsearchsurnamelabel"] = (
"Last Name",
"Last Name"
)
dictionary["clientsearchphonelabel"] = (
"Phone",
"Phone"
)
dictionary["clientsearchaddresslabel"] = (
"Address",
"Address"
)
dictionary["clientsearchpostcodelabel"] = (
"Post Code",
"Zip Code"
)
dictionary["clientsearchemaillabel"] = (
"Email",
"Email"
)
dictionary["clientsearchclearbutton"] = (
("Clear", "Clear all entries"),
("Clear", "Clear all entries")
)
dictionary["clientsearchsearchbutton"] = (
("Search", "Perform the search"),
("Search", "Perform the search")
)
dictionary["clientsearcheditclienttooltip"] = (
"Edit the selected client record",
"Edit the selected client record"
)
dictionary["clientsearchdeleteclienttooltip"] = (
"Delete the selected client record",
"Delete the selected client record"
)
dictionary["clientsearchanimallabel"] = (
"Animals",
"Animals"
)
dictionary["clientsearchanimalnamelabel"] = (
"Name",
"Name"
)
dictionary["clientsearchanimalsexlabel"] = (
"Sex",
"Sex"
)
dictionary["clientsearchanimalspecieslabel"] = (
"Species",
"Species"
)
dictionary["clientsearchanimalbreedlabel"] = (
"Breed",
"Breed"
)
dictionary["clientsearchanimalchipnolabel"] = (
"Chip #",
"Chip #"
)
dictionary["clientsearchanimalcommentslabel"] = (
"Comments",
"Comments"
)
dictionary["clientsearcheditanimaltooltip"] = (
"Edit the selected animal record",
"Edit the selected animal record"
)
dictionary["clientsearchdeleteanimaltooltip"] = (
"Delete the selected animal record",
"Delete the selected animal record"
)
dictionary["clientreceiptchangeloglabel"] = (
"Receipt item - ",
"Receipt item - "
)
dictionary["clientreceiptdeletemessage"] = (
"Really delete this receipt entry?",
"Really delete this receipt entry?"
)
dictionary["clientclearpaymenttooltip"] = (
"Empty the payment entry",
"Empty the payment entry"
)
dictionary["clientpaymentlabel"] = (
"Payment",
"Payment"
)
dictionary["clientsubmitpaymenttooltip"] = (
"Submit Payment",
"Submit Payment"
)
dictionary["clientpaymentinreceiptlabel"] = (
"Payment",
"Payment"
)
##Launch panels
dictionary["launchcreateconffilemessage"] = (
"Conf file not found! Create one now?",
"Configuration file not found! Create one now?"
)
dictionary["launchevettefoldermessage"] = (
"Evette folder not found! Create it now?",
"Evette folder not found! Create it now?"
)
dictionary["launchnodatabaseservermessage"] = (
"Unable to connect to database server! Please check that it is installed and running. Would you like to adjust your local settings?",
"Unable to connect to database server! Please check that it is installed and running. Would you like to adjust your local settings?"
)
dictionary["launchnoevettedatabasemessage"] = (
"Unable to locate evette database! Would you like to create one now?",
"Unable to locate Evette database! Would you like to create one now?"
)
dictionary["launchconffilecreatedmessage"] = (
"Conf file created",
"Configuration file created"
)
dictionary["launchevettefoldercreatedmessage"] = (
"Evette folder created",
"Evette folder created"
)
dictionary["launchdbiplabel"] = (
"DB IP",
"Database IP Address"
)
dictionary["launchdbuserlabel"] = (
"DB User",
"Database User"
)
dictionary["launchdbpasslabel"] = (
"DB Pass",
"Database Password"
)
dictionary["launchunabletocreatedatabasemessage"] = (
"Unable to create database, please check your mysql server config!",
"Unable to create database, please check your MySQL server configuration!"
)
dictionary["launchdatabasecreatedmessage"] = (
"Database created successfully!",
"Database created successfully!"
)
dictionary["launchlogintooltip"] = (
"Log in",
"Log in"
)
##Lookups
dictionary["lookupscolourpagetitle"] = (
"Edit Colour Lookups",
"Edit Colour Lookups"
)
dictionary["lookupsspeciespagetitle"] = (
"Edit Species Lookups",
"Edit Species Lookups"
)
dictionary["lookupsbreedpagetitle"] = (
"Edit Breed Lookups",
"Edit Breed Lookups"
)
dictionary["lookupsrefreshtooltip"] = (
"Refresh the list",
"Refresh the list"
)
dictionary["lookupsdeletetooltip"] = (
"Delete the selected lookup",
"Delete the selected lookup"
)
dictionary["lookupssubmittooltip"] = (
"Submit lookup",
"Submit lookup"
)
dictionary["lookupsduplicatemessage"] = (
"That lookup already exists, it's pointless putting it in again!",
"That lookup already exists, it's pointless putting it in again!"
)
dictionary["lookupsnonamemessage"] = (
"You must give a name for this lookup!",
"You must give a name for this lookup!"
)
dictionary["lookupsdeletemessage"] = (
"Are you sure that you want to delete this lookup?",
"Are you sure that you want to delete this lookup?"
)
##Medication
dictionary["medicationeditmedicationpagetitle"] = (
"Edit Medication",
"Edit Medication"
)
dictionary["medicationrefreshtooltip"] = (
"Refresh Medication List",
"Refresh Medication List"
)
dictionary["medicationdeletetooltip"] = (
"Delete the selected medication",
"Delete the selected medication"
)
dictionary["medicationbatchnolabel"] = (
"Batch #",
"Batch #"
)
dictionary["medicationbatchmovementreporttooltip"] = (
"Generate a report showing all movements of this batch",
"Generate a report showing all movements of this batch"
)
dictionary["medicationstocklisttooltip"] = (
"Print a list of your current stock",
"Print a list of your current stock"
)
dictionary["medicationmovementsoflabel"] = (
"Movements of ",
"Movements of "
)
dictionary["medicationconfirmdeletemessage"] = (
"Are you sure you want to delete ",
"Are you sure you want to delete "
)
dictionary["medicationconfirmoverwritemessage"] = (
"Are you sure you want to overwrite this medication?",
"Are you sure you want to overwrite this medication?"
)
dictionary["medicationmovementsofbatchnumberlabel"] = (
"Movements of Batch Number ",
"Movements of Batch Number "
)
dictionary["medicationexpireslabel"] = (
"Expires",
"Expires"
)
dictionary["medicationrefreshdetailstooltip"] = (
"Refresh the details of this medication",
"Refresh the details of this medication"
)
dictionary["medicationdeletemovementtooltip"] = (
"Delete this medication movement",
"Delete this medication movement"
)
dictionary["movementmovementlabel"] = (
"Movement",
"Movement"
)
dictionary["movementoverwritemovementmessage"] = (
"Are you sure that you want to overwrite this movement?",
"Are you sure that you want to overwrite this movement?"
)
dictionary["movementconfirmdeletemovementmessage"] = (
"Are you sure that you want to delete this movement?",
"Are you sure that you want to delete this movement?"
)
dictionary["movementrefreshmovementsmessage"] = (
"Refresh the details of this medication",
"Refresh the details of this medication"
)
dictionary["movementresetsearchentriestooltip"] = (
"Reset search entries",
"Reset search entries"
)
dictionary["medicationcurrentbatchlabel"] = (
"Current Batch",
"Current Batch"
)
dictionary["medicationunitpricelabel"] = (
"Unit Price",
"Unit Price"
)
##Weekdays
dictionary["monday"] = (
"Monday",
"Monday"
)
dictionary["tuesday"] = (
"Tuesday",
"Tuesday"
)
dictionary["wednesday"] = (
"Wednesday",
"Wednesday"
)
dictionary["thursday"] = (
"Thursday",
"Thursday"
)
dictionary["friday"] = (
"Friday",
"Friday"
)
dictionary["saturday"] = (
"Saturday",
"Saturday"
)
dictionary["sunday"] = (
"Sunday",
"Sunday"
)
##Procedures
dictionary["editprocedurespagetitle"] = (
"Edit Procedures",
"Edit Procedures"
)
dictionary["proceduresrefreshprocedurestooltip"] = (
"Refresh the list of procedures",
"Refresh the list of procedures"
)
dictionary["proceduresdeleteproceduretooltip"] = (
"Delete the selected procedure",
"Delete the selected procedure"
)
dictionary["proceduresunnamedproceduremessage"] = (
"You must give this procedure a name!",
"You must give this procedure a name!"
)
dictionary["proceduresoverwritemessage"] = (
"Are you sure that you want to edit this procedure?",
"Are you sure that you want to edit this procedure?"
)
dictionary["proceduresdeletemessage"] = (
"Are you sure that you want to delete this procedure?",
"Are you sure that you want to delete this procedure?"
)
##Random data
dictionary["randomdatapagetitle"] = (
"Random Data",
"Random Data"
)
dictionary["randomdatanoofclientslabel"] = (
"No of clients",
"Number of clients"
)
dictionary["randomdatanoofanimalslabel"] = (
"No of animals",
"Number of animals"
)
dictionary["randomdatanoofappointmentslabel"] = (
"No of appointments",
"Number of appointments"
)
dictionary["randomdatanoofoperationslabel"] = (
"No of operations",
"Number of operations"
)
dictionary["randomdatanoofmedicationslabel"] = (
"No of medications",
"Number of medications"
)
dictionary["randomdataclientslabel"] = (
"Clients",
"Clients"
)
dictionary["randomdataanimalslabel"] = (
"Animals",
"Animals"
)
dictionary["randomdataappointmentslabel"] = (
"Appointments",
"Appointments"
)
dictionary["randomdataoperationslabel"] = (
"Operations",
"Operations"
)
dictionary["randomdatamedicationlabel"] = (
"Medication",
"Medication"
)
dictionary["randomdatasubmittooltip"] = (
"Create random data",
"Create random data"
)
##Settings Panel
dictionary["settingspracticenamelabel"] = (
"Practice Name",
"Practice Name"
)
dictionary["settingsopenfromlabel"] = (
"Open from",
"Open from"
)
dictionary["settingsopentolabel"] = (
"Open to",
"Open to"
)
dictionary["settingsoperatingtimelabel"] = (
"Operating time",
"Operating time"
)
dictionary["settingshtmlviewerlabel"] = (
"HTML viewer",
"HTML viewer"
)
dictionary["settingsfindhtmlviewertooltip"] = (
"HTML viewer",
"HTML viewer"
)
dictionary["settingslanguagelabel"] = (
"Language",
"American English"
)
##Staff settings
dictionary["editvetformlabel"] = (
"Edit Vet Form",
"Edit Vet Form"
)
dictionary["editfinanceslabel"] = (
"Edit Finances",
"Edit Finances"
)
dictionary["showtoolbarlabel"] = (
"Show Toolbar",
"Show Toolbar"
)
dictionary["viewchangeloglabel"] = (
"View Changelogs",
"View Changelogs"
)
dictionary["editsettingslabel"] = (
"Edit Settings",
"Edit Settings"
)
dictionary["editrotalabel"] = (
"Edit Rota",
"Edit Rota"
)
dictionary["editstaffpagetitle"] = (
"Edit Rota",
"Edit Rota"
)
dictionary["staffmemberlabel"] = (
"Staff Member",
"Staff Member"
)
dictionary["deleteusertooltip"] = (
"Delete the selected user",
"Delete the selected user"
)
dictionary["clientslabel"] = (
"Clients",
"Clients"
)
dictionary["animalslabel"] = (
"Animals",
"Animals"
)
dictionary["appointmentslabel"] = (
"Appointments",
"Appointments"
)
dictionary["medicationlabel"] = (
"Medication",
"Medication"
)
dictionary["procedureslabel"] = (
"Clients",
"Clients"
)
dictionary["lookupslabel"] = (
"Lookups",
"Lookups"
)
dictionary["formslabel"] = (
"Forms",
"Forms"
)
dictionary["userslabel"] = (
"Users",
"Users"
)
dictionary["misclabel"] = (
"Misc",
"Misc"
)
dictionary["tickalllabel"] = (
"Check All",
"Check All"
)
dictionary["tickalltooltip"] = (
"Give the user permission to use ALL areas of the system. Use with care!",
"Give the user permission to use ALL areas of the system. Use with care!"
)
dictionary["useroverwritemessage"] = (
"Are you sure that you want to overwrite this user?",
"Are you sure that you want to overwrite this user?"
)
dictionary["userdeletemessage"] = (
"Are you sure that you want to delete this user?",
"Are you sure that you want to delete this user?"
)
##Edit Rota
dictionary["editrotapagetitle"] = (
"Edit Rota",
"Edit Rota"
)
dictionary["timeonlabel"] = (
"Time On",
"Time On"
)
dictionary["timeofflabel"] = (
"Time Off",
"Time Off"
)
dictionary["operatinglabel"] = (
"Operating",
"Operating"
)
dictionary["staffsummarylabel"] = (
"Staff Summary",
"Staff Summary"
)
dictionary["dayplanlabel"] = (
"Day Plan",
"Day Plan"
)
dictionary["novetnamemessage"] = (
"You must enter a vets name!",
"You must enter a vets name!"
)
dictionary["vetfinishedbeforestartingmessage"] = (
"The vet cannot finish before starting!",
"The vet cannot finish before starting!"
)
dictionary["vettwoplacesatoncemessage"] = (
"This vet cannot be in two places at once!",
"This vet cannot be in two places at once!"
)
##Vaccinations
dictionary["vaccinationseditvaccinationspagetitle"] = (
"Edit Vaccinations",
"Edit Vaccinations"
)
dictionary["vaccinationsvaccinelabel"] = (
"Vaccine",
"Vaccine"
)
dictionary["vaccinationsrefreshvaccinationstooltip"] = (
"Refresh the list of vaccinations",
"Refresh the list of vaccinations"
)
dictionary["vaccinationsdeletevaccinationstooltip"] = (
"Delete the selected vaccination",
"Delete the selected vaccination"
)
dictionary["vaccinationsprintstocklisttooltip"] = (
"Print a list of your current stock",
"Print a list of your current stock"
)
dictionary["vaccinationsconfirmdeletevaccinationmessage"] = (
"Are you sure you want to delete this vaccination?",
"Are you sure you want to delete this vaccination?"
)
dictionary["vaccinationsconfirmoverwritevaccinationmessage"] = (
"Are you sure you want to overwrite this vaccination?",
"Are you sure you want to overwrite this vaccination?"
)
dictionary["vaccinationsrefreshmovementstooltip"] = (
"Refresh the details of this vaccination",
"Refresh the details of this vaccination"
)
dictionary["vaccinationsdeletemovementtooltip"] = (
"Delete this vaccination movement",
"Delete this vaccination movement"
)
dictionary["vaccinationsoverwritemovementmessage"] = (
"Are you sure that you want to edit this movement?",
"Are you sure that you want to edit this movement?"
)
dictionary["vaccinationsdeletemovementmessage"] = (
"Are you sure that you want to delete this movement?",
"Are you sure that you want to delete this movement?"
)
##Vet Form
dictionary["vetformpagetitle"] = (
"Vet Form",
"Vet Form"
)
dictionary["vetformotherappointmentslabel"] = (
"Appointment History",
"Appointment History"
)
dictionary["vetformappointmentdetailslabel"] = (
"Appointment Details",
"Appointment Details"
)
dictionary["vetformmedlabel"] = (
"Med",
"Med"
)
dictionary["vetformvacclabel"] = (
"Vacc",
"Vacc"
)
dictionary["vetformproclabel"] = (
"Proc",
"Proc"
)
dictionary["vetformmanlabel"] = (
"Man",
"Man"
)
dictionary["vetformdeletereceipttooltip"] = (
"Delete the selected item from the receipt",
"Delete the selected item from the receipt"
)
dictionary["vetformdonetooltip"] = (
"Mark this appointment as complete and close",
"Mark this appointment as complete and close"
)
dictionary["vetformsavetooltip"] = (
"Save any changes made to this vet form",
"Save any changes made to this vet form"
)
dictionary["vetformreceiptitemlabel"] = (
"Receipt Item",
"Receipt Item"
)
dictionary["vetformdeletereceiptmessage"] = (
"Are you sure you want to delete this receipt item?",
"Are you sure you want to delete this receipt item?"
)
dictionary["vetformmedicationclearcontainstooltip"] = (
"Clear the \"Contains\" entry",
"Clear the \"Contains\" entry"
)
dictionary["vetformrefreshmedicationtooltip"] = (
"Refresh the medication list",
"Refresh the medication list"
)
dictionary["vetformnoofunitstooltip"] = (
"Enter the number of units that you are dispensing here",
"Enter the number of units that you are dispensing here"
)
dictionary["vetforminstructionslabel"] = (
"Instructions",
"Instructions"
)
dictionary["vetforminstructionstooltip"] = (
"Enter instructions on how to administer this medication here",
"Enter instructions on how to administer this medication here"
)
dictionary["vetformprintlabeltooltip"] = (
"Print a label for this medication",
"Print a label for this medication"
)
dictionary["vetformbatchnotooltip"] = (
"Enter the batch number here",
"Enter the batch number here"
)
dictionary["vetformrefreshvaccinationtooltip"] = (
"Refresh the vaccination list",
"Refresh the vaccination list"
)
dictionary["vetformrefreshprocedurestooltip"] = (
"Refresh the procedures list",
"Refresh the procedures list"
)
dictionary["vetformnodescriptionmessage"] = (
"You must give a description!",
"You must give a description!"
)
##View Appointments
dictionary["viewappointmentspagetitle"] = (
"View Appointments",
"View Appointments"
)
dictionary["viewoperationsspagetitle"] = (
"View Operations",
"View Operations"
)
dictionary["viewappointmentsmarkwithvettooltip"] = (
"Mark this appointment as with the vet",
"Mark this appointment as with the vet"
)
dictionary["viewappointmentschoosevettooltip"] = (
"Choose a vet",
"Choose a vet"
)
dictionary["viewappointmentsvetformtooltip"] = (
"Carry out the vet visit for this appointment",
"Carry out the vet visit for this appointment"
)
dictionary["viewappointmentsmarkarrivedtooltip"] = (
"Mark this appointment as arrived",
"Mark this appointment as arrived"
)
dictionary["viewappointmentsmarkdonetooltip"] = (
"Mark this appointment as done",
"Mark this appointment as done"
)
dictionary["viewappointmentseditclientbuttonlabel"] = (
"Edit Client",
"Edit Client"
)
dictionary["viewappointmentseditclientbuttontooltip"] = (
"Edit this clients record (so they can pay their bill)",
"Edit this clients record (so they can pay their bill)"
)
dictionary["viewappointmentsvetsonlabel"] = (
"Vets On",
"Vets On"
)
dictionary["appointmentsearchpagetitle"] = (
"Appointment Search",
"Appointment Search"
)
dictionary["appointmentsearchmenu"] = (
("Appointment Search", "Find an appointment"),
("Appointment Search", "Find an appointment")
)
dictionary["appointmentsearchanimalnamelabel"] = (
"Animal Name",
"Animal Name"
)
dictionary["reasonlabel"] = (
"Reason",
"Reason"
)
dictionary["viewoperationspagetitle"] = (
"View Operations",
"View Operations"
)
dictionary["dateformat"] = (
"DDMMYYYY",
"DDMMYYYY"
)
dictionary["currency"] = (
"£",
"EUR "
)
dictionary["mailshotmenu"] = (
("Mail Shot", "Compile a list of clients to contact"),
("Mail Shot", "Compile a list of clients to contact")
)
dictionary["mailshotpagetitle"] = (
"Mail Shot",
"Mail Shot"
)
dictionary["anyvaccine"] = (
"Any Vaccine",
"Any Vaccine"
)
dictionary["anyspecies"] = (
"Any Species",
"Any Species"
)
dictionary["deceasedlabel"] = (
"Deceased",
"Deceased"
)
dictionary["causeofdeathlabel"] = (
"Cause of Death",
"Cause of Death"
)
dictionary["includedeceasedlabel"] = (
"Include Deceased",
"Include Deceased"
)
dictionary["createvaccinationappointmentbutton"] = (
("Create Appointment", "Create an appointment for this vaccination"),
("Create Appointment", "Create an appointment for this vaccination")
)
dictionary["generatevaccinationcsvbutton"] = (
("Create CSV File", "Create and save a CSV file to disc. This can be used by most word processors to create mail shots"),
("Create CSV File", "Create and save a CSV file to disc. This can be used by most word processors to create mail shots")
)
dictionary["csvsavedtolabel"] = (
"CSV file saved to",
"CSV file saved to"
)
dictionary["versiontablenotfoundquestion"] = (
"Version table not found, create it now?",
"Version table not found, create it now?"
)
dictionary["versionupdatequestion1"] = (
"You are attempting to run evette",
"ou are attempting to run evette"
)
dictionary["versionupdatequestion2"] = (
"your database is version",
"your database is version"
)
dictionary["versionupdatequestion3"] = (
"Would you like to upgrade your database?",
"Would you like to upgrade your database?"
)
dictionary["resetdatabasequestion"] = (
"Are you sure that you want to reset all tables? ALL DATA WILL BE LOST!",
"Are you sure that you want to reset all tables? ALL DATA WILL BE LOST!"
)
dictionary["alltablesresetmessage"] = (
"All tables have been reset!",
"All tables have been reset!"
)
dictionary["addstafflabel"] = (
"Add staff?",
"Add staff?"
)
dictionary["vetslabel"] = (
"Vets",
"Vets"
)
dictionary["nurseslabel"] = (
"Nurses",
"Nurses"
)
dictionary["otherslabel"] = (
"Others",
"Others"
)
dictionary["nextmonthtooltip"] = (
"Show next month",
"Show next month"
)
dictionary["previousmonthtooltip"] = (
"Show previous month",
"Show previous month"
)
dictionary["backtocalendartooltip"] = (
"Back to calendar",
"Back to calendar"
)
dictionary["addstafftodailyrotatooltip"] = (
"Add a member of staff to this days rota",
"Add a member of staff to this days rota"
)
dictionary["deleterotaitemtooltip"] = (
"Delete this rota entry",
"Delete this rota entry"
)
dictionary["submitrotaitemtooltip"] = (
"Submit this rota entry",
"Submit this rota entry"
)
dictionary["vetpositiontitle"] = (#Note: If a user is given this position, Evette will assume that the user is a vet
"Vet",
"Vet"
)
dictionary["vetnursepositiontitle"] = (#Note: If a user is given this position, Evette will assume that the user is a vet nurse
"Nurse",
"Nurse"
)
dictionary["managerpositiontitle"] = (#Note: If a user is given this position, Evette will assume that the user is a manager
"Manager",
"Manager"
)
dictionary["errorlabel"] = (
"Sorry, the following error has occured",
"Sorry, the following error has occured"
)
dictionary["editdiarytoolbar"] = (
("Edit Diary", "Edit the diary"),
("Edit Diary", "Edit the diary")
)
dictionary["editdiarypagetitle"] = (
"Edit Diary",
"Edit Diary"
)
dictionary["notesuptolabel"] = (
"Up to",
"Up to"
)
dictionary["subjectcontainslabel"] = (
"Subject contains",
"Subject contains"
)
dictionary["notecontainslabel"] = (
"Note contains",
"Note contains"
)
dictionary["showremovedlabel"] = (
"Include removed?",
"Include removed?"
)
dictionary["subjectlabel"] = (
"Subject",
"Subject"
)
dictionary["notelabel"] = (
"Note",
"Note"
)
dictionary["removedlabel"] = (
"Removed",
"Removed"
)
dictionary["linklabel"] = (
"Link",
"Link"
)
dictionary["clientlabel"] = (
"Client",
"Client"
)
dictionary["animallabel"] = (
"Animal",
"Animal"
)
dictionary["opentargetrecordtooltip"] = (
"Open the record linked to this diary note",
"Open the record linked to this diary note"
)
dictionary["diarynotelabel"] = (
"Diary Note",
"Diary Note"
)
dictionary["confirmdeletediarynotemessage"] = (
"Are you sure that you want to delete this diary note?",
"Are you sure that you want to delete this diary note?"
)
dictionary["nolinklabel"] = (
"No Link",
"No Link"
)
dictionary["createassociateddiarynotetooltip"] = (
"Create a diary note associated with this record",
"Create a diary note associated with this record"
)
dictionary["newdiarynotetooltip"] = (
"Create a new diary note",
"Create a new diary note"
)
dictionary["editdiarynotetooltip"] = (
"Edit the selected diary note",
"Edit the selected diary note"
)
dictionary["deletediarynotetooltip"] = (
"Delete the selected diary note",
"Delete the selected diary note"
)
dictionary["refreshdiarytooltip"] = (
"Refresh the list of diary notes",
"Refresh the list of diary notes"
)
dictionary["cleardiarytooltip"] = (
"Clear the diary filters",
"Clear the diary filters"
)
dictionary["clientolderthanservermessage"] = (
"You are trying to run an out-of-date client, please upgrade then try again",
"You are trying to run an out-of-date client, please upgrade then try again"
)
dictionary["adddiarynotes"] = (
"Add to diary",
"Add to diary"
)
dictionary["editdiarynotes"] = (
"Edit diary",
"Edit diary"
)
dictionary["deletediarynotes"] = (
"Delete from diary",
"Delete from diary"
)
dictionary["diarylabel"] = (
"Diary",
"Diary"
)
dictionary["viewlicensemenu"] = (
("View License", "View the license for this software."),
("View License", "View the license for this software.")
)
dictionary["fileaccosiationmenu"] = (
("File Associations", "Edit the external applications associated with attached files"),
("File Associations", "Edit the external applications associated with attached files")
)
dictionary["licenselabel"] = (
"License",
"License"
)
dictionary["aboutlabel"] = (
"About",
"About"
)
dictionary["attachedfileslabel"] = (
"Attached Files",
"Attached Files"
)
dictionary["deleteattachedfileconfirm"] = (
"Are you sure that you want to delete this file?",
"Are you sure that you want to delete this file?"
)
dictionary["addnewmediatooltip"] = (
"Add a new external file to this record",
"Add a new external file to this record"
)
dictionary["replacemediatooltip"] = (
"Update the description of the selected file",
"Update the description of the selected file"
)
dictionary["deletemediatooltip"] = (
"Delete the selected file",
"Delete the selected file"
)
dictionary["savemediatooltip"] = (
"Save the selected file to disk",
"Save the selected file to disk"
)
dictionary["fileassociationspagetitle"] = (
"File Associations",
"File Associations"
)
dictionary["extensionlabel"] = (
"Extension",
"Extension"
)
dictionary["programlabel"] = (
"Program",
"Program"
)
dictionary["fileassociationexistsmessage"] = (
"There is already a program associated with this file extension!",
"There is already a program associated with this file extension!"
)
dictionary["deleteassociationconfirm"] = (
"Are you sure that you want to delete this file association?",
"Are you sure that you want to delete this file association?"
)
dictionary["noprogramassociatedmessage"] = (
"There is no program associated with this file type!",
"There is no program associated with this file type!"
)
dictionary["mediatoolargemessage"] = (
"This file is too large to attach!",
"This file is too large to attach!"
)
############################## 1.1.9 ###############################################
dictionary["weightpanelpagetitle"] = (
"Weight",
"Weight"
)
dictionary["deleteweighttooltip"] = (
"Delete the selected weight",
"Delete the selected weight"
)
dictionary["deleteweightconfirm"] = (
"Are you sure that you want to delete this weight?",
"Are you sure that you want to delete this weight?"
)
dictionary["samelabel"] = (
"Same",
"Same"
)
dictionary["reorderlabel"] = (
"Minimum",
"Minimum"
)
dictionary["runninglowlabel"] = (
"Running Low?",
"Running Low?"
)
dictionary["diarymenu"] = (
"&Diary",
"&Diary"
)
############################## 1.2 ###############################################
dictionary["clientanimalsearchtooltip"] = (
"If you wish to filter the animals by name, enter the name here",
"If you wish to filter the animals by name, enter the name here"
)
dictionary["browseappointmentsmenu"] = (
( "Browse Appointments", "Browse all appointments" ),
( "Browse Appointments", "Browse all appointments" )
)
dictionary["browseappointmentspagetitle"] = (
"Browse Appointments",
"Browse Appointments"
)
dictionary["appointmentlabel"] = (
"appointment",
"appointment"
)
dictionary["januarylabel"] = (
"January",
"January"
)
dictionary["februarylabel"] = (
"February",
"February"
)
dictionary["marchlabel"] = (
"March",
"March"
)
dictionary["aprillabel"] = (
"April",
"April"
)
dictionary["maylabel"] = (
"May",
"May"
)
dictionary["junelabel"] = (
"June",
"June"
)
dictionary["julylabel"] = (
"July",
"July"
)
dictionary["augustlabel"] = (
"August",
"August"
)
dictionary["septemberlabel"] = (
"September",
"September"
)
dictionary["octoberlabel"] = (
"October",
"October"
)
dictionary["novemberlabel"] = (
"November",
"November"
)
dictionary["decemberlabel"] = (
"December",
"December"
)
dictionary["readfileassociationhelpmessage"] = (
"To learn about file associations - visit the help section.",
"To learn about file associations - visit the help section."
)
dictionary["websitelabel"] = (
"Website",
u"Website"
)
dictionary["generateinvoicelabel"] = (
"Generate a printable invoice for this client",
u"Generate a printable invoice for this client"
)
dictionary["animalformsmenu"] = (
("Animal Forms", "Create or edit forms that be generated using an animal's details"),
(u"Animal Forms", u"Create or edit forms that be generated using an animal's details")
)
dictionary["clientformsmenu"] = (
("Client Forms", "Create or edit forms that be generated using an client's details"),
(u"Client Forms", u"Create or edit forms that be generated using an client's details")
)
dictionary["animalformspagetitle"] = (
"Animal Forms",
u"Animal Forms"
)
dictionary["clientformspagetitle"] = (
"Client Forms",
u"Client Forms"
)
dictionary["previewlabel"] = (
"Preview",
u"Preview"
)
dictionary["wordkeyslabel"] = (
"Wordkeys",
u"Wordkeys"
)
dictionary["invoiceformsmenu"] = (
("Invoice Forms", "Edit the invoice templates"),
(u"Invoice Forms", u"Edit the invoice templates")
)
dictionary["editinvoicepagetitle"] = (
"Edit Invoices",
u"Edit Invoices"
)
dictionary["medicationformsmenu"] = (
("Medication Forms", "Edit the medication templates"),
(u"Medication Forms", u"Edit the medication templates")
)
dictionary["editmedicationtformspagetitle"] = (
"Medication Forms",
u"Medication Forms"
)
dictionary["invoicespagetitle"] = (
"Invoices",
u"Invoices"
)
dictionary["newinvoicetooltip"] = (
"Create a new invoice",
u"Create a new invoice"
)
dictionary["editinvoicetooltip"] = (
"Edit the selected invoice",
u"Edit the selected invoice"
)
dictionary["deleteinvoicetooltip"] = (
"Delete the selected invoice",
u"Delete the selected invoice"
)
dictionary["invoiceoverlapmessage"] = (
"Invoices are not allowed to overlap, please adjust the dates",
u"Invoices are not allowed to overlap, please adjust the dates"
)
dictionary["clientgenerateformtooltip"] = (
"Generate a form using this clients details",
u"Generate a form using this clients details"
)
dictionary["randomdatawarningmessage"] = (
"Note: Evette will need close when this process has completed,\nplease start Evette again to see the results.",
u"Note: Evette will need close when this process has completed,\nplease start Evette again to see the results."
)
dictionary["invoiceidlabel"] = (
"Invoice ID",
u"Invoice ID"
)
dictionary["paidlabel"] = (
"paid",
u"paid"
)
dictionary["unpaidlabel"] = (
"unpaid",
u"unpaid"
)
dictionary["invoiceidchoicetooltip"] = (
"Choose an invoice ID to mark an invoice as paid.",
u"Choose an invoice ID to mark an invoice as paid."
)
dictionary["editpaymentinvoicetooltip"] = (
"Edit the amount paid on the selected invoice.",
u"Edit the amount paid on the selected invoice."
)
dictionary["editinvoicepaymenttitle"] = (
"Edit payment",
u"Edit payment"
)
dictionary["editanimaltooltip"] = (
"Edit Animal",
u"Edit Animal"
)
###################1.2.2#####################
dictionary["stocklabel"] = (
"Stock",
u"Stock"
)
dictionary["editstockmenu"] = (
("Edit Stock", "Edit Stock"),
("Edit Stock", "Edit Stock")
)
dictionary["batchsearchmenu"] = (
("Batch Search", "Show movements for a specific batch number"),
("Batch Search", "Show movements for a specific batch number")
)
dictionary["batchbreakdowntooltip"] = (
"View a breakdown of the current stock by batch number",
u"View a breakdown of the current stock by batch number"
)
dictionary["editmovementlabel"] = (
"Edit Movement",
u"Edit Movement"
)
dictionary["createmovementlabel"] = (
"Create Movement",
u"Create Movement"
)
dictionary["consumablelabel"] = (
"Consumable",
u"Consumable"
)
dictionary["shoplabel"] = (
"Shop",
u"Shop"
)
dictionary["procedurelabel"] = (
"Procedure",
u"Procedure"
)
dictionary["manuallabel"] = (
"Manual",
u"Manual"
)
dictionary["prescribemedicationlabel"] = (
"Prescribe Medication",
u"Prescribe Medication"
)
dictionary["quantitylabel"] = (
"Quantity",
u"Quantity"
)
dictionary["quantityerrormessage"] = (
"Invalid quantity",
u"Invalid quantity"
)
dictionary["viewinvoicetooltip"] = (
"View Invoice",
u"View Invoice"
)
dictionary["diagnosislabel"] = (
"Diagnosis",
u"Diagnosis"
)
dictionary["createreceiptitemtooltip"] = (
"Create a receipt item",
u"Create a receipt item"
)
############################## 1.2.3 ###############################################
dictionary["editkennelsmenu"] = (
("Edit Kennels", "Edit kennels available"),
("Edit Kennels", "Edit kennels available")
)
dictionary["viewkennelsmenu"] = (
("View Kennels", "View Kennels"),
("View Kennels", "View Kennels")
)
dictionary["kennelsmenu"] = (
"&Kennels",
"&Kennels"
)
dictionary["kennelblocktitlelabel"] = (
"Kennel Blocks",
"Kennel Blocks"
)
dictionary["kennelstitlelabel"] = (
"Kennels",
"Kennels"
)
dictionary["editkennelblocktitle"] = (
"Edit kennel block",
"Edit kennel block"
)
dictionary["deletekennelblockconfirmation"] = (
"Are you sure that you want to delete this kennel block?",
"Are you sure that you want to delete this kennel block?"
)
dictionary["deletekennelconfirmation"] = (
"Are you sure that you want to delete this kennel?",
"Are you sure that you want to delete this kennel?"
)
dictionary["editkenneltitle"] = (
"Edit kennel",
"Edit kennel"
)
dictionary["stayinglabel"] = (
"Staying",
"Staying"
)
dictionary["occupiedlabel"] = (
"occupied",
"occupied"
)
dictionary["vacantlabel"] = (
"vacant",
"vacant"
)
dictionary["changeownershiptooltip"] = (
"Transfer ownership of this animal",
"Transfer ownership of this animal"
)
dictionary["choosenewownerdialogtitle"] = (
"Choose new owner",
"Choose new owner"
)
dictionary["doubleclicktoselecttooltip"] = (
"Double click to select",
"Double click to select"
)
dictionary["importasmanimaltooltip"] = (
"Create an animal record from an ASM record",
"Create an animal record from an ASM record"
)
dictionary["chooseananimaltitle"] = (
"Choose an animal",
"Choose an animal"
)
dictionary["clientrefnolabel"] = (
"Reference Number",
"Reference Number"
)
dictionary["toomanyresultsmessage"] = (
"Your search produced too many results to display, please narrow down your search",
"Your search produced too many results to display, please narrow down your search"
)
dictionary["idlelabel"] = (
"Idle",
"Idle"
)
dictionary["connectinglabel"] = (
"Connecting",
"Connecting"
)
dictionary["connectedlabel"] = (
"Connected",
"Connected"
)
dictionary["errorlabel"] = (
"Error",
"Error"
)
dictionary["usernamepassworderrormessage"] = (
"Unsuccessful Login",
"Unsuccessful Login"
)
dictionary["successfulloginmessage"] = (
"Successful Login",
"Successful Login"
)
dictionary["creatingevettefolder"] = (
"Creating Evette folder",
"Creating Evette folder"
)
dictionary["evettedatabasecreatedmessage"] = (
"Created Evette database",
"Created Evette database"
)
dictionary["errorcreatingdatabasemessage"] = (
"Error creating Evette database",
"Error creating Evette database"
)
dictionary["asmimportmenu"] = (
("ASM Import", "Import an animal from ASM"),
("ASM Import", "Import an animal from ASM")
)
dictionary["errorobtainingownermessage"] = (
"Unable to find owner",
"Unable to find owner"
)
dictionary["alreadyimportedmessage"] = (
"This animal has already been imported. Would you like to view it?",
"This animal has already been imported. Would you like to view it?"
)
dictionary["addweighttooltip"] = (
"Add Weight",
"Add Weight"
)
dictionary["editweightlabel"] = (
"Edit Weight",
"Edit Weight"
)
dictionary["adduserlabel"] = (
"Add User",
"Add User"
)
dictionary["edituserlabel"] = (
"Edit User",
"Edit User"
)
dictionary["editreasonsmenu"] = (
("Edit Reasons", "Edit common appointment reasons"),
("Edit Reasons", "Edit common appointment reasons")
)
dictionary["lookupsreasonpagetitle"] = (
"Appointment Reason Lookups",
"Appointment Reason Lookups"
)
dictionary["doubleclickforreasonstooltip"] = (
"Double click for a choice of common appointment reasons",
"Double click for a choice of common appointment reasons"
)
dictionary["filemenu"] = (
"File",
"File"
)
dictionary["fileexitmenu"] = (
("Exit", "Exit Evette"),
("Exit", "Exit Evette")
)
dictionary["fileclosewindowsmenu"] = (
("Close All Panels", "Close all open panels"),
("Close All Panels", "Close all open panels")
)
dictionary["confirmcloseallwindowsmessage"] = (
("Are you sure that you want to close all open panels? Any unsaved data will be lost."),
("Are you sure that you want to close all open panels? Any unsaved data will be lost.")
)
dictionary["locationlabel"] = (
"Location",
"Location"
)
dictionary["editprocedurelabel"] = (
"Edit Procedure",
"Edit Procedure"
)
############################## 1.2.4 ###############################################
dictionary["addlookuptooltip"] = (
"Create a new lookup",
u"Create a new lookup"
)
dictionary["malelabel"] = (
"Male",
u"Male"
)
dictionary["femalelabel"] = (
"Female",
u"Female"
)
dictionary["unknownlabel"] = (
"Unknown",
u"Unknown"
)
dictionary["dayslabel"] = (
"days",
u"days"
)
dictionary["weekslabel"] = (
"weeks",
u"weeks"
)
dictionary["monthslabel"] = (
"months",
u"months"
)
dictionary["yearslabel"] = (
"years",
u"years"
)
dictionary["invaliddobtooltip"] = (
"Invalid DOB",
u"Invalid DOB"
)
dictionary["addkennelblocktooltip"] = (
"Create a new kennel block",
u"Create a new kennel block"
)
dictionary["addkenneltooltip"] = (
"Create a new kennel",
u"Create a new kennel"
)
############################## 1.2.5 ###############################################
dictionary["asmclientimportmenu"] = (
("ASM Client Import", "Import a client from ASM"),
(u"ASM Client Import", u"Import a client from ASM")
)
dictionary["chooseclientlabel"] = (
"Choose client",
u"Choose client"
)
dictionary["datectrltooltip"] = (
"Double click to choose from a calendar",
u"Double click to choose from a calendar"
)
dictionary["choosedatetitle"] = (
"Choose a date",
u"Choose a date"
)
dictionary["editappointmentlabel"] = (
"Edit Appointment",
u"Edit Appointment"
)
dictionary["agelabel"] = (
"Age",
u"Age"
)
dictionary["addvaccinationtooltip"] = (
"Add Vaccination",
u"Add Vaccination"
)
dictionary["printtooltip"] = (
"Print",
u"Print"
)
############################## 1.2.6 ###############################################
dictionary["filealteredmessage"] = (
"Another user has altered this file since you opened it. Please close this record and try again.",
u"Another user has altered this file since you opened it. Please close this record and try again."
)
dictionary["asmreflabel"] = (
"ASM Ref",
u"ASM Ref"
)
dictionary["deselectlabel"] = (
"Deselect",
u"Deselect"
)
dictionary["createappointmentlabel"] = (
"Create Appointment",
u"Create Appointment"
)
dictionary["multiplepanellabel"] = (
"Allow multiple panels open",
u"Allow multiple panels open"
)
dictionary["filealteredchoice"] = (
"Another user has altered this file since you opened it. Would you like to force through your changes?",
u"Another user has altered this file since you opened it. Would you like to force through your changes?"
)
dictionary["latelabel"] = (
"Late",
u"Late"
)
dictionary["minslabel"] = (#Abrreviation of minutes - it is advisable to keep this as short as possible.
"mins",
u"mins"
)
dictionary["microchiplabel"] = (
"Microchip",
u"Microchip"
)
dictionary["microchippedlabel"] = (
"Microchip implanted",
u"Microchip implanted"
)
dictionary["costpricelabel"] = (
"Cost Price",
u"Cost Price"
)
dictionary["viewvetnoteslabel"] = (
"View Vet Notes",
u"View Vet Notes"
)
dictionary["appointmentsummarylistboxtooltip"] = (
"Right click to view available vets\nDouble click to choose time slot",
u"Right click to view available vets\nDouble click to choose time slot"
)
############################## 1.2.7 ###############################################
dictionary["shopsalemenuitem"] = (
"Shop Sale",
u"Shop Sale"
)
dictionary["shopitemstitle"] = (
"Shop Items",
u"Shop Items"
)
dictionary["basketlabel"] = (
"Basket",
u"Basket"
)
dictionary["putbacktooltip"] = (
"Put back",
u"Put back"
)
dictionary["addtobaskettooltip"] = (
"Add to basket",
u"Add to basket"
)
dictionary["clientmergetooltip"] = (
"Merge another client into this one",
u"Merge another client into this one"
)
dictionary["clientsmergedmessage"] = (
"Clients merged",
u"Clients merged"
)
dictionary["addlabel"] = (
"Add",
u"Add"
)
dictionary["subtractlabel"] = (
"Subtract",
u"Subtract"
)
dictionary["editmarkupmenu"] = (
("Define Markup Rules", "Define Markup Rules"),
(u"Define Markup Rules", u"Define Markup Rules")
)
dictionary["multiplybylabel"] = (
"Multiply by",
u"Multiply by"
)
dictionary["roundtolabel"] = (
"Round up to",
u"Round up to"
)
dictionary["costpriceentrytooltip"] = (
"This value is not included in your settings, it is here simply to allow you to try your settings out on some real figures.",
u"This value is not included in your settings, it is here simply to allow you to try your settings out on some real figures."
)
dictionary["invalidpricemessage"] = (
"Invalid Price!",
u"Invalid Price!"
)
dictionary["priceinpenniestooltip"] = (
"Please enter price in pennies eg. \"50\" to round to the nearest 50p, \"100\" to round to the nearest pound.",
u"Please enter price in cents eg. \"50\" to round to the nearest 50 cents, \"100\" to round to the nearest euro."
)
dictionary["customerpricelabel"] = (
"Customer Price",
u"Customer Price"
)
dictionary["submitsettingstooltip"] = (
"Submit settings",
u"Submit settings"
)
dictionary["applymarkuptostocktooltip"] = (
"Apply the current markup settings to all stock.",
u"Apply the current markup settings to all stock."
)
dictionary["markupappliedtoallmessage"] = (
"Markup applied to all prices",
u"Markup applied to all prices"
)
dictionary["automarkupconfirmmessage"] = (
"Continuing will alter all of your public prices, are you sure that you want to continue?",
u"Continuing will alter all of your public prices, are you sure that you want to continue?"
)
dictionary["unitpricentrytooltip"] = (
"Type \"a\" to autogenerate a price from markup rules.",
u"Type \"a\" to autogenerate a price from markup rules."
)
dictionary["costpricentrytooltip"] = (
"Type \"c\" for help calculating the cost price.",
u"Type \"c\" for help calculating the cost price."
)
dictionary["calculatecostpricetitle"] = (
"Calculate Cost Price",
u"Calculate Cost Price"
)
dictionary["packpricelabel"] = (
"Price per pack",
u"Price per pack"
)
dictionary["unitsperpacklabel"] = (
"Units per pack",
u"Units per pack"
)
############################## 1.2.8 ###############################################
dictionary["phonenumbertooltip"] = (
"CTRL + P to toggle public availability.",
u"CTRL + P to toggle public availability."
)
dictionary["lostanimallabel"] = (
"Lost Animal",
u"Lost Animal"
)
dictionary["foundanimallabel"] = (
"Found Animal",
u"Found Animal"
)
dictionary["lostandfoundmenu"] = (
("Lost and Found", "View/Edit Lost and Found"),
(u"Lost and Found", u"View/Edit Lost and Found")
)
dictionary["lostlabel"] = (
"Lost",
u"Lost"
)
dictionary["foundlabel"] = (
"Found",
u"Found"
)
dictionary["datelostlabel"] = (
"Date Lost",
u"Date Lost"
)
dictionary["datefoundlabel"] = (
"Date Found",
u"Date Found"
)
dictionary["furlengthlabel"] = (
"Fur Length",
u"Fur Length"
)
dictionary["longlabel"] = (
"Long",
u"Long"
)
dictionary["shortlabel"] = (
"Short",
u"Short"
)
dictionary["fluffylabel"] = (
"Fluffy",
u"Fluffy"
)
dictionary["hairlesslabel"] = (
"Hairless",
u"Hairless"
)
dictionary["sizelabel"] = (
"Size",
u"Size"
)
dictionary["largelabel"] = (
"Large",
u"Large"
)
dictionary["mediumlabel"] = (
"Medium",
u"Medium"
)
dictionary["smalllabel"] = (
"Small",
u"Small"
)
dictionary["juvenilelabel"] = (
"Juvenile",
u"Juvenile"
)
dictionary["adultlabel"] = (
"Adult",
u"Adult"
)
dictionary["elderlylabel"] = (
"Elderly",
u"Elderly"
)
dictionary["temperamentlabel"] = (
"Temperament",
u"Temperament"
)
dictionary["friendlylabel"] = (
"Friendly",
u"Friendly"
)
dictionary["timidlabel"] = (
"Timid",
u"Timid"
)
dictionary["aggressivelabel"] = (
"Aggressive",
u"Aggressive"
)
dictionary["collarlabel"] = (
"Collar",
u"Collar"
)
dictionary["collardescriptiontooltip"] = (
"Collar description",
u"Collar description"
)
dictionary["arealabel"] = (
"Area",
u"Area"
)
dictionary["areatooltip"] = (
"Please put in likely areas by postcode if possible as well as the city/state, separated by spaces.",
u"Please put in likely areas by postcode if possible as well as the city/state, separated by spaces."
)
dictionary["datecompletelabel"] = (
"Date complete",
u"Date complete"
)
dictionary["savetooltip"] = (
"Save",
u"Save"
)
dictionary["contacttooltip"] = (
"Contact",
u"Contact"
)
dictionary["completelabel"] = (
"Complete",
u"Complete"
)
dictionary["idlabel"] = (
"ID",
u"ID"
)
dictionary["rightclickformenutooltip"] = (
"Right click for available options.",
u"Right click for available options."
)
dictionary["lostandfoundsearchtooltip"] = (
"Search for a match",
u"Search for a match"
)
dictionary["searchuptolabel"] = (
"Search ceiling",
u"Search ceiling"
)
dictionary["searchfromlabel"] = (
"Search floor",
u"Search floor"
)
dictionary["alreadyonlostandfoundmessage"] = (
"This animal is already on the lost and found!",
u"This animal is already on the lost and found!"
)
dictionary["includecompletelabel"] = (
"Include complete?",
u"Include complete?"
)
dictionary["closelabel"] = (
"Close",
u"Close"
)
dictionary["scorelabel"] = (
"Score",
u"Score"
)
dictionary["lostandfoundsearchresultspagetitle"] = (
"Lost and Found Search Results",
u"Lost and Found Search Results"
)
dictionary["systemlabel"] = (
"System",
u"System"
)
dictionary["versionlabel"] = (
"Version",
u"Version"
)
############################## 1.3 ###############################################
dictionary["addlostmenu"] = (
("Add Lost", "Add a lost animal"),
(u"Add Lost", u"Add a lost animal")
)
dictionary["addfoundmenu"] = (
("Add Found", "Add a found animal"),
(u"Add Found", u"Add a found animal")
)
dictionary["alllabel"] = (
"All",
u"All"
)
dictionary["refreshlabel"] = (
"Refresh",
u"Refresh"
)
dictionary["filteranimalslabel"] = (
"Filter Animals",
u"Filter Animals"
)
dictionary["markaspaidlabel"] = (
"Mark as paid?",
u"Mark as paid?"
)
############################## 1.3.1 ###############################################
dictionary["asmshelterlabel"] = (
"ASM Shelter",
u"ASM Shelter"
)
dictionary["asmsheltertooltip"] = (
"If you use the Animal Shelter Manager system you can mark a client as \"The Shelter\" allowing you to import animal records from ASM who do not have an owner.",
u"If you use the Animal Shelter Manager system you can mark a client as \"The Shelter\" allowing you to import animal records from ASM who do not have an owner."
)
dictionary["appointmentrefreshlabel"] = (
"Appointment Refresh Interval",
u"Appointment Refresh Interval"
)
############################## 1.3.2 ###############################################
dictionary["asmvaccinationlabel"] = (
"ASM Vaccination",
u"ASM Vaccination"
)
dictionary["asmvaccinationtooltip"] = (
"Choose which ASM vaccine you would like Evette to use when updating animal records.",
u"Choose which ASM vaccine you would like Evette to use when updating animal records."
)
dictionary["asmerrormessage"] = (
"Unable to update ASM record!",
u"Unable to update ASM record!"
)
dictionary["asmsynctooltip"] = (
"Sync with ASM record",
u"Sync with ASM record"
)
dictionary["fieldlabel"] = (
"Field",
u"Field"
)
dictionary["asmsyncbuttontooltip"] = (
"Sync this field on Evette and ASM records",
u"Sync this field on Evette and ASM records"
)
dictionary["synctoasmlabel"] = (
"Sync to ASM",
u"Sync to ASM"
)
dictionary["synctoevettelabel"] = (
"Sync to Evette",
u"Sync to Evette"
)
dictionary["asmconnectionerrormessage"] = (
"Unable to connect to ASM.",
u"Unable to connect to ASM."
)
dictionary["asmdeathreasonlabel"] = (
"Record updated via ASM.",
u"Record updated via ASM."
)
dictionary["evettedeathreasonlabel"] = (
"Record updated via Evette.",
u"Record updated via Evette."
)
dictionary["importnewasmownermenuitem"] = (
"Import new ASM owner",
u"New Language"
)
dictionary["updateownermenuitem"] = (
"Update current owner",
u"Update current owner"
)
dictionary["1.3.2updatemessage"] = (
"Note: when you run the evette client following this upgrade you will need to re-input your database settings.",
u"Note: when you run the evette client following this upgrade you will need to re-input your database settings."
)
dictionary["tabbetweenentriestooltip"] = (
"You can switch between the user and password entries with the TAB key.",
u"You can switch between the user and password entries with the TAB key."
)
dictionary["dischargelabel"] = (
"Discharge",
u"Discharge"
)
dictionary["overnightstaylabel"] = (
"Overnight Stay",
u"Overnight Stay"
)
dictionary["animalstayedmessage"] = (
"This animal has stayed overnight, creating a new vet form.",
u"This animal has stayed overnight, creating a new vet form."
)
dictionary["prescriptionfeelabel"] = (
"Prescription Fee",
u"Prescription Fee"
)
dictionary["ontimelabel"] = (
"On time",
u"On time"
)
dictionary["dnalabel"] = (
"Did not arrive",
u"Did not arrive"
)
dictionary["viewlabel"] = (
"View",
u"View"
)
dictionary["renamelabel"] = (
"Rename",
u"Rename"
)
dictionary["filterlabel"] = (
"Filter",
u"Filter"
)
dictionary["programbrowsertooltip"] = (
"Browse to find an appropriate program.",
u"Browse to find an appropriate program."
)
dictionary["agelabel"] = (
"Age",
u"Age"
)
dictionary["batchbreakdownlabel"] = (
"Batch No Breakdown",
u"Batch No Breakdown"
)
dictionary["returntoshelterlabel"] = (
"Return to shelter",
u"Return to shelter"
)
dictionary["possibleduplicateownermessage"] = (
"This owner may already be known to the system. Would you like to view the list of similar clients?",
u"This owner may already be known to the system. Would you like to view the list of similar clients?"
)
dictionary["asmimportlabel"] = (
"Imported from ASM",
u"Imported from ASM"
)
return dictionary
| gpl-2.0 | -7,974,533,037,425,782,000 | 23.938619 | 163 | 0.665987 | false |
vernhart/flickr-moderate | common.py | 1 | 25143 | #!/usr/bin/env python3
from flickrapi import FlickrAPI, exceptions # Flickr API library
import os # get directory of script for config loading
import yaml # config file format
from pprint import pprint # for debugging
import re # for topic reply searching
import redis # redis db library
from time import sleep,time # for pauses
from functools import wraps # for decorator functions
import tempfile # for lock files
import os # for lock files
import fcntl # for lock files
import requests # for error handling
from datetime import datetime # for elasped time
global __config
global __config_loaded
__config = {}
__config_loaded = 0
def loadConfig(debug=False):
"Get configuration from yaml file"
global __config
global __config_loaded
script_dir = os.path.dirname(__file__)
config_file = script_dir + "/flickr.yaml"
modtime = os.path.getmtime(config_file)
if modtime > __config_loaded:
if __config_loaded > 0:
print("NOTICE: Reloading Config")
__config_loaded = modtime
with open(config_file, 'r') as yamlfile:
__config = yaml.safe_load(yamlfile)
if debug:
print("DEBUG: %s Loaded Configuration:" % datetime.now())
pprint(__config)
return(__config)
def handler(func):
@wraps(func)
def handle_exceptions(*args, **kwargs):
try:
resp = func(*args, **kwargs)
except exceptions.FlickrError as err:
extra=[]
for arg in args:
if not 'class' in str(type(arg)):
extra.append(str(arg))
for kw, arg in kwargs.items():
extra.append('%s=%s' % (kw, arg))
print('WARNING: flickrapi.exception.FlickrError: %s %s(%s)' %
(err, func.__name__, ', '.join(extra)))
except requests.exceptions.RequestException as err:
print('WARNING: Request Exception during %s, retrying...' % func.__name__)
sleep(10)
try:
resp = func(*args, **kwargs)
except exceptions.FlickrError as err:
print('WARNING: flickrapi.exception.FlickrError: %s %s' % (err, func.__name__))
else:
return(resp)
else:
return(resp)
return(handle_exceptions)
def retry(func, retries=3, failurefatal=True):
retries = int(retries)
@wraps(func)
def retry_function(*args, **kwargs):
for attempt in range(retries+1):
try:
resp = func(*args, **kwargs)
except:
if attempt == retries:
if failurefatal:
raise
else:
print('ERROR: Call to %s failed.' % func.__name__)
else:
print('WARNING: Call to %s failed. Retrying...' % func.__name__)
# pause before continuing
sleep(10)
else:
return(resp)
else:
extra=[]
for arg in args:
if not 'class' in str(type(arg)):
extra.append(arg)
for kw, arg in kwargs.items():
extra.append('%s=%s'.format(kw, arg))
print('ERROR: Tried too many times (%s). Giving up on %s(%s).' %
(retries+1, func.__name__, ', '.join(extra)))
return(retry_function)
class myflickrapi(FlickrAPI):
# here's where we define handlers for the flickr api methods we use
@retry
def myGetGroups(self, *args, **kvargs): return(self.people.getGroups(*args, **kvargs))
@retry
def myGetPhotos(self, *args, **kvargs): return(self.groups.pools.getPhotos(*args, **kvargs))
@handler
def myRemove(self, *args, **kvargs): return(self.groups.pools.remove(*args, **kvargs))
@retry
def myGetTopics(self, *args, **kvargs): return(self.groups.discuss.topics.getList(*args, **kvargs))
@retry
def myAddTopic(self, *args, **kvargs): return(self.groups.discuss.topics.add(*args, **kvargs))
@retry
def myGetReplies(self, *args, **kvargs): return(self.groups.discuss.replies.getList(*args, **kvargs))
@handler
def myAddReply(self, *args, **kvargs): return(self.groups.discuss.replies.add(*args, **kvargs))
@handler
def myDeleteReply(self, *args, **kvargs): return(self.groups.discuss.replies.delete(*args, **kvargs))
@handler
def myInvite(self, *args, **kvargs): return(self.groups.invite.photo.invite(*args, **kvargs))
def auth(api_key, api_secret, debug=False):
"Initialize API connection"
if debug: print("DEBUG: %s Before Auth" % datetime.now())
flickr = myflickrapi(api_key, api_secret, format='parsed-json')
if debug: print("DEBUG: %s Object created" % datetime.now())
# authorization tokens are cached so this should only need to be run once on any server
if not flickr.token_valid(perms='delete'):
if debug: print("DEBUG: %s token not valid, requesting another" % datetime.now())
flickr.get_request_token(oauth_callback='oob')
authorize_url = flickr.auth_url(perms='delete')
print("Enter this URL in your browser: %s" % authorize_url)
verifier = str(input('Verifier code: '))
flickr.get_access_token(verifier)
if debug: print("DEBUG: %s After Auth" % datetime.now())
return flickr
def isInt(v):
"Returns true if the string represents an integer"
v = str(v).strip()
return v=='0' or (v if v.find('..') > -1 else v.lstrip('-+').rstrip('0').rstrip('.')).isdigit()
def intOrString (string):
"If the string represents an integer, returns an integer, otherwise returns the string"
if isInt(string): return int(string)
else: return string
def charFilter(instring, allowed):
"return a string with all the un-allowed characters removed"
output = ''
for c in instring:
if c in allowed:
output += c
return output
def get_groups (flickr, user_id, debug=False):
"Get all Fav/View groups that we are a member of"
if debug: print("DEBUG: %s Before GetGroups" % datetime.now())
groups = flickr.myGetGroups(user_id=user_id, format='etree')
if debug: print("DEBUG: %s After GetGroups" % datetime.now())
views = {}
favs = {}
for node in groups.iter():
group = node.items()
if len(group) > 1:
info = {'icon': 'https://www.flickr.com/images/buddyicon.gif'}
for pair in group:
info[pair[0]] = intOrString(pair[1])
if info['iconserver'] > 0:
info['icon'] = 'http://farm%d.staticflickr.com/%d/buddyicons/%s.jpg' % (info['iconfarm'], info['iconserver'], info['nsid'])
if 'Views:' in info['name']:
mincount = int(info['name'][6:].replace(',', ''))
info['mincount'] = mincount
views[mincount] = info
if 'Favorites:' in info['name']:
if '<5' in info['name']: mincount = 1
else: mincount = int(info['name'][10:].replace(',', ''))
info['mincount'] = mincount
favs[mincount] = info
return {'views': views, 'favs': favs}
def scanGroups(flickr, groups, vieworfav, testrun=False, checkcounts=None, removeNow=False, maxpages=-1, redisStore=False):
"Scans view/fav groups and enforces rules"
checkViews = False
checkFavs = False
if vieworfav == 'views':
checkViews = True
elif vieworfav == 'favs':
checkFavs = True
assert checkViews or checkFavs, 'scanGroups second parameter must be "veiws" or "favs"'
viewsLimit = 0
favsLimit = 0
# checkcounts is a list of mincounts that we'll check
# if it's None, initialize it with all the counts
if checkcounts is None:
checkcounts = groups[vieworfav].keys()
if maxpages == -1:
viewsLimit = 200
else:
# if the mincounts are provided, only go down to the lowest specified
if checkFavs:
favsLimit = sorted(checkcounts)[0]
else:
viewsLimit = sorted(checkcounts)[0]
# no view or fav group will ever have more than this mincount
prevmin = 9999999999999
seenphotos = []
for mincount, info in sorted(groups[vieworfav].items(), reverse=True):
starttime = datetime.now()
# save what we're checking in the group object
info['vieworfav'] = vieworfav
# if mincount is not in the list to check, skip the delete actions at the end
if mincount not in checkcounts:
skipactions = True
else:
skipactions = False
if checkFavs and mincount < favsLimit: return
if checkViews and mincount < viewsLimit: return
if redisStore and mincount not in checkcounts:
print("DEBUG: %s Skipping %s" % (datetime.now(), mincount))
continue
if not (testrun or skipactions):
scanlock = lockScan(vieworfav + str(mincount))
if not scanlock['locked']:
print("Someone is already scanning %s%s, skipping actions" % (vieworfav, mincount))
skipactions = True
else:
scanlock = {'locked': False}
graduates = {}
removephotos = {}
seenthisgroup = []
# only work with groups we can administer
if info['admin']:
print('----- %s -----' % " ".join(info['name'].split()))
pages = 1
page_size = 500
timeout = 300 # 5 min
i = 0
while i < pages:
i=i+1
photos = flickr.myGetPhotos(group_id=info['nsid'], page=i, extras='views,count_faves,url_n', per_page=page_size, timeout=timeout)
# use the actual page limit if max is -1 or if the actual is less than the max
if maxpages == -1 or photos['photos']['pages'] < maxpages:
#print("~~~~~~ old max: %s" % maxpages)
pages = photos['photos']['pages'] + 1
else:
#print(":::::: override pages")
pages = maxpages
#print('page: %s pages: %s actual pages: %s' % (i, pages, photos['photos']['pages']))
for photo in photos['photos']['photo']:
# sometimes the url_n url doesn't get set for some reason
# let's construct it manually
if not 'url_n' in photo:
photo['url_n'] = 'https://farm%s.staticflickr.com/%s/%s_%s_n.jpg' % (photo['farm'], photo['server'], photo['id'], photo['secret'])
photo['url'] = "https://www.flickr.com/photos/%s/%s" % (photo['owner'], photo['id'])
if checkFavs:
# set favs
photo['favs'] = intOrString(photo['count_faves'])
photo['counts'] = photo['favs']
# later we'll use 'counts' instead of views or favs
if checkViews:
photo['counts'] = intOrString(photo['views'])
removed = False
# if it doesn't have high enough count, mark for removal
if photo['counts'] < mincount:
print("Should not be in this group!! %s %s" % (photo['counts'], photo['url']))
if removeNow:
if not (testrun or skipactions):
resp = flickr.myRemove(photo_id=photo['id'], group_id=info['nsid'])
else:
removephotos[photo['id']] = info['nsid']
removed = True
if checkFavs and photo['counts'] > 0:
if allowInvites(photo['owner']):
if not (testrun or skipactions):
# only invite to lower group if it is within 50% of current group
if photo['counts'] >= mincount*0.5:
bestgroup = bestGroup(groups, **{vieworfav: photo['counts']})
print('Inviting %s to %s' %(photo['url'], bestgroup['name']))
resp = flickr.myInvite(group_id=bestgroup['nsid'], photo_id=photo['id'])
if redisStore:
# check to see if the photo is already listed in a higher group
if not removed and photoInHigherGroup(photo['id'],vieworfav,mincount):
print("Already in a higher group: %s %s" % (photo['counts'],photo['url']))
if removeNow:
if not (testrun or skipactions):
resp = flickr.myRemove(photo_id=photo['id'], group_id=info['nsid'])
else:
removephotos[photo['id']] = info['nsid']
removed = True
else:
# if we've seen this photo before, it must already be in a higher group
if not removed and photo['id'] in seenphotos:
print('Already in a higher group: %s %s' % (photo['counts'],photo['url']))
if removeNow:
if not (testrun or skipactions):
resp = flickr.myRemove(photo_id=photo['id'], group_id=info['nsid'])
else:
removephotos[photo['id']] = info['nsid']
removed = True
# skip this for now...
if not (testrun or skipactions) and False:
# if we haven't seen it before but it has a high count, add to graduates list
if not removed and photo['counts'] >= prevmin:
# only add the data we need to reduce memory usage
graduates[photo['id']] = {}
graduates[photo['id']]['owner'] = photo['owner']
graduates[photo['id']]['url'] = photo['url']
graduates[photo['id']]['url_n'] = photo['url_n']
if checkFavs:
graduates[photo['id']]['favs'] = photo['favs']
if checkViews:
graduates[photo['id']]['views'] = photo['views']
# if we haven't removed the photo, keep track of the ID
if not redisStore and not removed:
seenthisgroup.append(photo['id'])
# if we've got more than 95% of the page_size in removephotos:
# remove them and turn back the page iterator
if len(removephotos) > (page_size * 0.95):
if not (testrun or skipactions):
print("Removing %d photos..." % len(removephotos))
# remove those in the lsit
for photo_id, group_id in removephotos.items():
resp = flickr.myRemove(photo_id=photo_id, group_id=group_id)
# turn back the page iterator
i = i - 1
# clear the list
removephotos = {}
if not (testrun or skipactions):
if len(removephotos) > 0:
print("Removing %d photos..." % len(removephotos))
# now remove all the photos that don't belong
for photo_id, group_id in removephotos.items():
resp = flickr.myRemove(photo_id=photo_id, group_id=group_id)
# only do the deletes in the graduation thread if we're scanning the whole group
if maxpages == -1:
doDeletes = True
else:
doDeletes = False
graduatePost(flickr, groups, group=info, photos=graduates, doDeletes=doDeletes)
if scanlock['locked']:
unlockScan(scanlock)
prevmin = mincount
seenphotos.extend(seenthisgroup)
print('Seen photos: %6d total: %7d %44s' %
(len(seenthisgroup), len(seenphotos), '(Elapsed: %s)' % (datetime.now() - starttime)))
def allowInvites(ownerid):
"Returns false if the owner ID is in the no-invites list"
# reload config, if necessary
cfg = loadConfig()
return(ownerid not in cfg['no_invites'])
def getTopicID(flickr, group_id, subject):
"Return the topic ID of the topic with the given subject in the supplied group."
topic_id = 0
# search the topics for the given subject
pages = 1
i = 0
while i <= pages:
i=i+1
topics = flickr.myGetTopics(group_id=group_id, page=i)
pages = topics['topics']['pages']
if int(topics['topics']['total']) > 0:
for topic in topics['topics']['topic']:
if topic['subject'] == subject:
topic_id = topic['id']
return(topic_id)
return(topic_id)
def graduatePost(flickr, groups, group, photos, doDeletes=True):
"Update topic post about photos that could be moved to the next higher group."
return
subject = 'Proposed Graduation'
topic_id = getTopicID(flickr, group_id=group['nsid'], subject=subject)
# if we didn't find the topic, create it
if topic_id == 0:
resp = flickr.myAddTopic(group_id=group['nsid'], subject=subject,
message='This topic is an autogenerated message.\n\n' +
'The replies to this post contain all the photos in this group' +
' that qualify for a higher group. This message will be updated periodically.' +
' If these photos are yours, feel free to remove them from this group and' +
' add them to the appropriate higher group. If you are an admin, do please' +
' invite these photos to the next higher group.')
if resp['stat'] == 'ok':
topic_id = resp['topic']['id']
no_photos_message = "No photos ready for graduation."
per_page = 500
replies_to_delete = {}
extra_replies = []
pages = 1
i = 0
while i <= pages:
i=i+1
replies = flickr.myGetReplies(group_id=group['nsid'], topic_id=topic_id, page=i, per_page=page_size)
pages = replies['replies']['topic']['pages']
#print("page %s/%s" % (i, pages))
if 'reply' in replies['replies']:
for reply in replies['replies']['reply']:
if reply['message']['_content'] == no_photos_message:
if len(photos) > 0:
# if the reply is "no photos" but we have photos, delete the reply
resp = flickr.myDeleteReply(group_id=group['nsid'], topic_id=topic_id, reply_id=reply['id'])
pass
else:
# if we have no photos and the reply is "no photos", do nothing
return
else:
# extract photo_id out of first url
m = re.search(r'/(?P<id>[0-9]+)[\'"]', reply['message']['_content'])
if m == None:
# if there's no match, remove the reply
extra_replies.append(reply['id'])
elif m.group('id') in replies_to_delete:
# we have a duplicate photo in replies, delete immediately
print("duplicate reply for %s" % m.group('id'))
#resp = flickr.myDeleteReply(group_id=group['nsid'], topic_id=topic_id, reply_id=reply['id'])
extra_replies.append(reply['id'])
else:
# we'll mark them all for deletion
# we'll remove from this list as we go through the photos
replies_to_delete[m.group('id')] = reply['id']
postedOwners = {}
maxPostsPerOwner = 5
for photo_id, photo in sorted(photos.items()):
if photo_id in replies_to_delete:
# if photo already posted in replies, remove from delete list
replies_to_delete.pop(photo_id)
else:
# else, post reply with photo
if 'favs' in photo:
# if we have favs, let's talk about favorites groups
nextgroup = bestGroup(groups, favs=int(photo['favs']))
else:
# else talk about views groups
nextgroup = bestGroup(groups, views=int(photo['views']))
# set the count to zero, if it's not set
if not photo['owner'] in postedOwners:
postedOwners[photo['owner']] = 0
if postedOwners[photo['owner']] <= maxPostsPerOwner:
"only post a few per owner per run, to cut down on spamming"
if allowInvites(photo['owner']):
# invite the photo to the next group
resp = flickr.myInvite(group_id=nextgroup['nsid'], photo_id=photo_id)
if resp is not None:
if resp['stat'] == 'ok':
postedOwners[photo['owner']] += 1
print('Posting reply for %s' % photo['url'])
resp = flickr.myAddReply(group_id=group['nsid'], topic_id=topic_id,
message=('<a href="https://www.flickr.com/photos/%s/%s"><img src="%s"></a> '
'Promote to <a href="https://www.flickr.com/groups/%s">%s</a>\n') %
(photo['owner'], photo_id, photo['url_n'], nextgroup['nsid'], nextgroup['name']))
if doDeletes:
for reply_id in sorted(extra_replies):
print('Deleting extra reply')
resp = flickr.myDeleteReply(group_id=group['nsid'], topic_id=topic_id, reply_id=reply_id)
for photo_id in sorted(replies_to_delete):
print('Deleting reply for photo_id %s reply_id %s' % (photo_id, replies_to_delete[photo_id]))
resp = flickr.myDeleteReply(group_id=group['nsid'], topic_id=topic_id, reply_id=replies_to_delete[photo_id])
replies = flickr.myGetReplies(group_id=group['nsid'], topic_id=topic_id, page=1, per_page=1)
if replies['replies']['topic']['total'] == '0':
resp = flickr.myAddReply(group_id=group['nsid'], topic_id=topic_id,
message=no_photos_message)
return
def bestGroup(groups, views=-1, favs=-1):
"Given a number of views or favorites, will return the name of the best group"
prevgroup = {}
if views >= 0:
for mincount, info in sorted(groups['views'].items()):
if views < mincount:
prevgroup['nextgroup'] = mincount
return(prevgroup)
prevgroup = info
return(info)
prevgroup = {}
if favs >= 0:
for mincount, info in sorted(groups['favs'].items()):
if favs < mincount:
prevgroup['nextgroup'] = mincount
return(prevgroup)
prevgroup = info
return(info)
# need to specify either views or favs as non-negative a parameter
return(prevgroup)
def lockScan(locktext):
"If this lock is in place, we're currently scanning the groups"
lock = {}
lock['lockfile'] = os.path.normpath(tempfile.gettempdir() + '/' + 'flickr-moderate-' + locktext)
lock['fp'] = open(lock['lockfile'], 'w')
lock['fp'].flush()
try:
fcntl.lockf(lock['fp'], fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
lock['locked'] = False
lock['fp'].close()
else:
lock['locked'] = True
return(lock)
def unlockScan(lock):
if not lock['locked']:
return
fcntl.lockf(lock['fp'], fcntl.LOCK_UN)
if os.path.isfile(lock['lockfile']):
os.unlink(lock['lockfile'])
return
#################
def redisAuth(cfg):
"initialize redis db object"
return(redis.StrictRedis(host=cfg['redis_host'], port=cfg['redis_port'], db=cfg['redis_db']))
def photoInHigherGroup(photo_id, vieworfav, mincount):
"check if the photo is in a higher group in redis db"
return(False)
def getFavsFromDB(flickr, db, photo_id):
"returns the photos favorites count from the db or from flickr"
favs = db.hget(photo_id, 'favs')
if favs:
return(int(favs))
favs = int(getFavsFromFlickr(flickr, photo_id))
saveFavs(db, photo_id, favs)
return(favs)
def saveFavs(db, photo_id, favs):
"saves photo_id and favs to redis db"
db.hset(photo_id, 'favs', favs)
db.hset(photo_id, 'ts', time())
return
| mpl-2.0 | -4,186,878,343,005,086,000 | 39.61874 | 154 | 0.534861 | false |
JhonyVilla/blog | pelican-plugins/assets/assets.py | 1 | 2672 | # -*- coding: utf-8 -*-
"""
Asset management plugin for Pelican
===================================
This plugin allows you to use the `webassets`_ module to manage assets such as
CSS and JS files.
The ASSET_URL is set to a relative url to honor Pelican's RELATIVE_URLS
setting. This requires the use of SITEURL in the templates::
<link rel="stylesheet" href="{{ SITEURL }}/{{ ASSET_URL }}">
.. _webassets: https://webassets.readthedocs.org/
"""
from __future__ import unicode_literals
import os
import logging
from pelican import signals
logger = logging.getLogger(__name__)
try:
import webassets
from webassets import Environment
from webassets.ext.jinja2 import AssetsExtension
except ImportError:
webassets = None
def add_jinja2_ext(pelican):
"""Add Webassets to Jinja2 extensions in Pelican settings."""
if 'JINJA_ENVIRONMENT' in pelican.settings: # pelican 3.7+
pelican.settings['JINJA_ENVIRONMENT']['extensions'].append(AssetsExtension)
else:
pelican.settings['JINJA_EXTENSIONS'].append(AssetsExtension)
def create_assets_env(generator):
"""Define the assets environment and pass it to the generator."""
theme_static_dir = generator.settings['THEME_STATIC_DIR']
assets_destination = os.path.join(generator.output_path, theme_static_dir)
generator.env.assets_environment = Environment(
assets_destination, theme_static_dir)
if 'ASSET_CONFIG' in generator.settings:
for item in generator.settings['ASSET_CONFIG']:
generator.env.assets_environment.config[item[0]] = item[1]
if 'ASSET_BUNDLES' in generator.settings:
for name, args, kwargs in generator.settings['ASSET_BUNDLES']:
generator.env.assets_environment.register(name, *args, **kwargs)
if 'ASSET_DEBUG' in generator.settings:
generator.env.assets_environment.debug = generator.settings['ASSET_DEBUG']
elif logging.getLevelName(logger.getEffectiveLevel()) == "DEBUG":
generator.env.assets_environment.debug = True
for path in (generator.settings['THEME_STATIC_PATHS'] +
generator.settings.get('ASSET_SOURCE_PATHS', [])):
full_path = os.path.join(generator.theme, path)
generator.env.assets_environment.append_path(full_path)
def register():
"""Plugin registration."""
if webassets:
signals.initialized.connect(add_jinja2_ext)
signals.generator_init.connect(create_assets_env)
else:
logger.warning('`assets` failed to load dependency `webassets`.'
'`assets` plugin not loaded.')
| gpl-3.0 | -8,804,839,349,497,951,000 | 33.626667 | 83 | 0.666542 | false |
samuelmaudo/yepes | yepes/contrib/datamigrations/importation_plans/base.py | 1 | 5591 | # -*- coding:utf-8 -*-
from __future__ import unicode_literals
import collections
import operator
from django.db import transaction
from django.db.models import F, Q
from django.utils.six.moves import reduce
from django.utils.text import camel_case_to_spaces, capfirst
from yepes.contrib.datamigrations.exceptions import (
UnableToCreateError,
UnableToImportError,
UnableToUpdateError,
)
from yepes.utils.iterators import isplit
from yepes.utils.properties import class_property
class ImportationPlan(object):
"""
Base class for data-importation plan implementations.
Subclasses must at least overwrite ``import_batch()``.
"""
inserts_data = True
updates_data = True
@class_property
def name(cls):
name = camel_case_to_spaces(cls.__name__)
if name.endswith('plan'):
name = name[:-5]
if name.endswith('importation'):
name = name[:-12]
return '_'.join(name.split())
@class_property
def verbose_name(cls):
return capfirst(cls.name.replace('_', ' ').strip())
def __init__(self, migration):
self.migration = migration
def check_conditions(self):
if not self.migration.can_import:
raise UnableToImportError
if self.inserts_data and not self.migration.can_create:
raise UnableToCreateError
if self.updates_data and not self.migration.can_update:
raise UnableToUpdateError
def finalize_importation(self):
pass
def import_batch(self, batch):
raise NotImplementedError('Subclasses of ImportationPlan must override import_batch() method')
def prepare_batch(self, batch):
return batch
def prepare_importation(self):
pass
def run(self, data, batch_size=100):
self.check_conditions()
with transaction.atomic():
self.prepare_importation()
for batch in isplit(data, batch_size):
self.import_batch(self.prepare_batch(batch))
self.finalize_importation()
class ModelImportationPlan(ImportationPlan):
"""
Base class for data-importation plan implementations.
Subclasses must at least overwrite ``import_batch()``.
"""
def get_existing_keys(self, batch):
key = self.migration.primary_key
if not batch or key is None:
return set()
qs = self.get_existing_queryset(batch)
if not isinstance(key, collections.Iterable):
return set(qs.values_list(key.attname, flat=True).iterator())
else:
key_attrs = [k.attname for k in key]
return set(qs.values_list(*key_attrs).iterator())
def get_existing_objects(self, batch):
key = self.migration.primary_key
if not batch or key is None:
return {}
qs = self.get_existing_queryset(batch)
if not isinstance(key, collections.Iterable):
key_attr = key.attname
return {
getattr(obj, key_attr): obj
for obj
in qs.iterator()
}
else:
key_attrs = [k.attname for k in key]
return {
tuple(getattr(obj, attr) for attr in key_attrs): obj
for obj
in qs.iterator()
}
def get_existing_queryset(self, batch):
key = self.migration.primary_key
model = self.migration.model
manager = model._base_manager
if not batch or key is None:
return manager.none()
if not isinstance(key, collections.Iterable):
key_attr = key.attname
return manager.filter(**{
'{0}__in'.format(key_attr): (
row[key_attr]
for row
in batch
)
})
else:
key_attrs = [k.attname for k in key]
return manager.filter(reduce(operator.or_, (
Q(**{
attr: row[attr]
for attr
in key_attrs
})
for row
in batch
)))
def prepare_batch(self, batch):
m = self.migration
if m.natural_foreign_keys is not None:
for fld in m.natural_foreign_keys:
attr = fld.attname
path = fld.path
rel_field = m.model_fields[fld][-1]
rel_manager = rel_field.model._base_manager
keys = dict(
rel_manager.filter(**{
'{0}__in'.format(rel_field.name): {
row[path]
for row
in batch
}
}).values_list(
rel_field.name,
'pk',
).iterator()
)
if not m.ignore_missing_foreign_keys:
for row in batch:
row[attr] = keys[row.pop(path)]
else:
erroneous_rows = []
for i, row in enumerate(batch):
try:
value = keys[row.pop(path)]
except KeyError:
erroneous_rows.append(i)
else:
row[attr] = value
for i in reversed(erroneous_rows):
del batch[i]
return batch
| bsd-3-clause | 1,023,181,490,690,836,600 | 29.551913 | 102 | 0.519943 | false |
j-towns/fastar | fastar/test_util.py | 1 | 3377 | from itertools import chain
from random import shuffle
import numpy as np
from jax import numpy as jnp, test_util as jtu
from jax.util import safe_map, safe_zip
from jax.tree_util import tree_multimap, tree_flatten, tree_map
from fastar import lazy_eval, lazy_eval_fixed_point, LazyArray
map = safe_map
zip = safe_zip
def check_shape_and_dtype(expected, actual):
assert expected.shape == actual.shape
assert expected.dtype == actual.dtype
def naive_fixed_point(fun, arg):
arg, arg_prev = fun(arg), arg
while not jnp.all(arg == arg_prev):
arg, arg_prev = fun(arg), arg
return arg
def check_child_counts(arrs):
visited = set()
def _check_child_counts(arrs):
for arr in arrs:
if isinstance(arr, LazyArray) and arr not in visited:
assert type(arr.child_counts) is np.ndarray
assert arr.child_counts.dtype == np.int64
assert np.all(arr.child_counts == 0)
visited.add(arr)
_check_child_counts(arr.eqn.invars)
_check_child_counts(arrs)
def check_state(arrs):
# Make sure none of the elements are in the temporary REQUESTED state
visited = set()
def _check_state(arrs):
for arr in arrs:
if isinstance(arr, LazyArray) and arr not in visited:
assert np.all((arr.state == 0) | (arr.state == 1))
visited.add(arr)
_check_state(arr.eqn.invars)
_check_state(arrs)
def _identity(x):
return x + np.zeros((), x.dtype)
def check_lazy_fun(fun_, *args, atol=None, rtol=None):
def fun(*args):
args = tree_map(_identity, args)
return fun_(*args)
out_expected_flat, out_expected_tree = tree_flatten(fun(*args))
out_flat, out_tree = tree_flatten(lazy_eval(fun, *args))
assert out_expected_tree == out_tree
tree_multimap(check_shape_and_dtype, out_expected_flat, out_flat)
jtu.check_close(out_expected_flat,
[o[:] if o.shape else o[()] for o in out_flat], atol, rtol)
check_child_counts(out_flat)
check_state(out_flat)
out_flat, _ = tree_flatten(lazy_eval(fun, *args))
indices = []
for n, o in enumerate(out_flat):
indices.append([(n, i) for i in np.ndindex(*o.shape)])
indices = list(chain(*indices))
shuffle(indices)
indices = indices[:5]
for n, i in indices:
jtu.check_close(out_flat[n][i], out_expected_flat[n][i], atol, rtol)
assert np.dtype(out_flat[n][i]) == np.dtype(out_expected_flat[n][i])
check_child_counts(out_flat)
check_state(out_flat)
def check_lazy_fixed_point(fun, mock_arg, atol=None, rtol=None):
out_expected_flat, out_expected_tree = tree_flatten(
naive_fixed_point(fun, mock_arg))
out_flat, out_tree = tree_flatten(lazy_eval_fixed_point(fun, mock_arg))
assert out_expected_tree == out_tree
tree_multimap(check_shape_and_dtype, out_expected_flat, out_flat)
jtu.check_close(out_expected_flat, [o[:] for o in out_flat], atol, rtol)
check_child_counts(out_flat)
check_state(out_flat)
out_flat, out_tree = tree_flatten(lazy_eval_fixed_point(fun, mock_arg))
indices = []
for n, o in enumerate(out_flat):
indices.append([(n, i) for i in np.ndindex(*o.shape)])
indices = list(chain(*indices))
shuffle(indices)
indices = indices[:5]
for n, i in indices:
jtu.check_close(out_flat[n][i], out_expected_flat[n][i], atol, rtol)
assert np.dtype(out_flat[n][i]) == np.dtype(out_expected_flat[n][i])
check_child_counts(out_flat)
check_state(out_flat)
| mit | 8,212,361,584,758,945,000 | 34.177083 | 77 | 0.674267 | false |
walafc0/soclib | soclib/iss/iss_profiler/bin/iss_profiler2profile.py | 1 | 2939 | #!/usr/bin/env python
from dsx.util.objdumper import *
import sys
__id__ = "$Id: iss_profiler2profile.py 917 2009-03-12 10:10:06Z nipo $"
__version__ = "$Revision: 917 $"
class SymLooker:
def __init__(self, arch, obj):
self.__syms = {}
dumper = ObjDumper(arch, obj)
for section in dumper:
for sym in section:
self.__syms[sym.addr] = sym.name
self.__addrs = self.__syms.keys()
self.__addrs.sort()
self.__addr2sym = {}
def is_entry(self, addr):
return addr in self.__syms
def lookup_sym(self, addr):
last_addr = None
for sym_addr in self.__addrs:
if sym_addr > addr:
break
last_addr = sym_addr
if last_addr is None:
print hex(addr), "not found in", self.__addrs
return self.__syms[last_addr]
def find_sym(self, addr):
try:
return self.__addr2sym[addr]
except KeyError:
sym = self.lookup_sym(addr)
self.__addr2sym[addr] = sym
return sym
def per_sym(self, ctor):
ret = {}
for k in self.syms():
ret[k] = ctor(k)
return ret
def syms(self):
return self.__syms.values()
arch = sys.argv[1]
obj = sys.argv[2]
sl = SymLooker(arch, obj)
class Counter:
def __init__(self, sym):
self.sym = sym
self.total = 0
self.frozen = 0
self.running = 0
self.runs = 0
def inc(self, running, entering):
if entering:
self.runs += 1
if running:
self.running += 1
else:
self.frozen += 1
self.total += 1
def cmp_total(self, other):
return cmp(self.total, other.total)
def cmp_running(self, other):
return cmp(self.running, other.running)
def missing(self):
if self.total:
return float(self.frozen)/float(self.total)
else:
return 0
def cmp_missing(self, other):
return cmp(self.missing(), other.missing())
def cmp_runs(self, other):
return cmp(self.runs, other.runs)
def cpr(self):
if self.runs:
return float(self.total)/float(self.runs)
else:
return 0
def cmp_cpr(self, other):
return cmp(self.cpr(), other.cpr())
def __repr__(self):
return "%s runs %04d total %06d, cpr: %06d, running time %06d, frz %06d, miss %f"%(
self.sym.ljust(30), self.runs, self.total, self.cpr(), self.running, self.frozen, self.missing())
if sys.argv[3:]:
for xaddr in sys.argv[3:]:
addr = int(xaddr, 16)
print hex(addr), sl.find_sym(addr)
else:
count = sl.per_sym(Counter)
total = 0
last_func = ''
for line in sys.stdin.readlines():
line = line.strip()
running, asked, xaddr = line.split(' ')
if asked == '+':
total += 1
running = running == 'R'
addr = int(xaddr, 16)
sym = sl.find_sym(addr)
entry = sl.is_entry(addr)
count[sym].inc(running, last_func != sym and entry)
last_func = sym
v = count.values()
v = filter(lambda x:x.runs > 15, v)
v.sort(Counter.cmp_runs)
v.reverse()
print "Most runs"
for i in v:
print i
v.sort(Counter.cmp_running)
v.reverse()
print "Most on CPU"
for i in v:
print i
v.sort(Counter.cmp_missing)
v.reverse()
print "Most missing"
for i in v:
print i
| lgpl-2.1 | -5,641,330,111,067,963,000 | 21.960938 | 100 | 0.638653 | false |
ayepezv/GAD_ERP | openerp/addons/test_impex/models.py | 2 | 4918 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
def selection_fn(model):
return list(enumerate(["Corge", "Grault", "Wheee", "Moog"]))
def compute_fn(records):
for record in records:
record.value = 3
def inverse_fn(records):
pass
MODELS = [
('boolean', fields.Boolean()),
('integer', fields.Integer()),
('float', fields.Float()),
('decimal', fields.Float(digits=(16, 3))),
('string.bounded', fields.Char(size=16)),
('string.required', fields.Char(size=None, required=True)),
('string', fields.Char(size=None)),
('date', fields.Date()),
('datetime', fields.Datetime()),
('text', fields.Text()),
('selection', fields.Selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])),
# here use size=-1 to store the values as integers instead of strings
('selection.function', fields.Selection(selection_fn, size=-1)),
# just relate to an integer
('many2one', fields.Many2one('export.integer')),
('one2many', fields.One2many('export.one2many.child', 'parent_id')),
('many2many', fields.Many2many('export.many2many.other')),
('function', fields.Integer(compute=compute_fn, inverse=inverse_fn)),
# related: specialization of fields.function, should work the same way
# TODO: reference
]
for name, field in MODELS:
class NewModel(models.Model):
_name = 'export.%s' % name
const = fields.Integer(default=4)
value = field
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
if isinstance(name, basestring) and name.split(':')[0] == self._name:
records = self.search([('value', operator, int(name.split(':')[1]))])
return records.name_get()
else:
return []
class One2ManyChild(models.Model):
_name = 'export.one2many.child'
# FIXME: orm.py:1161, fix to name_get on m2o field
_rec_name = 'value'
parent_id = fields.Many2one('export.one2many')
str = fields.Char()
value = fields.Integer()
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
if isinstance(name, basestring) and name.split(':')[0] == self._name:
records = self.search([('value', operator, int(name.split(':')[1]))])
return records.name_get()
else:
return []
class One2ManyMultiple(models.Model):
_name = 'export.one2many.multiple'
parent_id = fields.Many2one('export.one2many.recursive')
const = fields.Integer(default=36)
child1 = fields.One2many('export.one2many.child.1', 'parent_id')
child2 = fields.One2many('export.one2many.child.2', 'parent_id')
class One2ManyChildMultiple(models.Model):
_name = 'export.one2many.multiple.child'
# FIXME: orm.py:1161, fix to name_get on m2o field
_rec_name = 'value'
parent_id = fields.Many2one('export.one2many.multiple')
str = fields.Char()
value = fields.Integer()
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
class One2ManyChild1(models.Model):
_name = 'export.one2many.child.1'
_inherit = 'export.one2many.multiple.child'
class One2ManyChild2(models.Model):
_name = 'export.one2many.child.2'
_inherit = 'export.one2many.multiple.child'
class Many2ManyChild(models.Model):
_name = 'export.many2many.other'
# FIXME: orm.py:1161, fix to name_get on m2o field
_rec_name = 'value'
str = fields.Char()
value = fields.Integer()
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
if isinstance(name, basestring) and name.split(':')[0] == self._name:
records = self.search([('value', operator, int(name.split(':')[1]))])
return records.name_get()
else:
return []
class SelectionWithDefault(models.Model):
_name = 'export.selection.withdefault'
const = fields.Integer(default=4)
value = fields.Selection([(1, "Foo"), (2, "Bar")], default=2)
class RecO2M(models.Model):
_name = 'export.one2many.recursive'
value = fields.Integer()
child = fields.One2many('export.one2many.multiple', 'parent_id')
class OnlyOne(models.Model):
_name = 'export.unique'
value = fields.Integer()
_sql_constraints = [
('value_unique', 'unique (value)', "The value must be unique"),
]
| gpl-3.0 | 4,212,233,521,099,419,000 | 30.729032 | 89 | 0.618544 | false |
xuru/pyvisdk | pyvisdk/do/virtual_machine_file_layout.py | 1 | 1433 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VirtualMachineFileLayout(vim, *args, **kwargs):
'''Describes the set of files that makes up a virtual machine on disk. The file
layout is broken into 4 major sections:* Configuration: Files stored in the
configuration directory * Log: Files stored in the log directory * Disk: Files
stored relative to a disk configuration file * Snapshot: Stored in the snapshot
directoryOften the same directory is used for configuration, log, disk and
snapshots.'''
obj = vim.client.factory.create('ns0:VirtualMachineFileLayout')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'configFile', 'disk', 'logFile', 'snapshot', 'swapFile', 'dynamicProperty',
'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | -7,937,865,886,592,713,000 | 35.769231 | 124 | 0.635729 | false |
steven-martins/Marking | back/marks/migrations/0001_initial.py | 1 | 3773 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import datetime
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Mark',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('result', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Picture',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('file', models.ImageField(max_length=150, upload_to='picture/%Y/%m/%d')),
('title', models.CharField(max_length=50)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('name', models.CharField(max_length=100)),
('description', models.TextField()),
('marks', models.ManyToManyField(to=settings.AUTH_USER_MODEL, through='marks.Mark', related_name='project_marks_student')),
('members', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
('pictures', models.ManyToManyField(to='marks.Picture', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('title', models.CharField(max_length=200)),
('detail', models.CharField(max_length=250, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Student',
fields=[
('login', models.CharField(serialize=False, max_length=10, primary_key=True)),
('last_connection', models.DateTimeField(blank=True, default=datetime.datetime.now)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Timeslot',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('title', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='project',
name='timeslot',
field=models.ForeignKey(to='marks.Timeslot', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mark',
name='project',
field=models.ForeignKey(to='marks.Project'),
preserve_default=True,
),
migrations.AddField(
model_name='mark',
name='question',
field=models.ForeignKey(to='marks.Question'),
preserve_default=True,
),
migrations.AddField(
model_name='mark',
name='student',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
| mit | 3,561,885,093,581,885,000 | 34.59434 | 139 | 0.51789 | false |
linebp/pandas | pandas/tests/series/test_indexing.py | 1 | 88099 | # coding=utf-8
# pylint: disable-msg=E1101,W0612
import pytest
from datetime import datetime, timedelta
from numpy import nan
import numpy as np
import pandas as pd
import pandas._libs.index as _index
from pandas.core.dtypes.common import is_integer, is_scalar
from pandas import (Index, Series, DataFrame, isnull,
date_range, NaT, MultiIndex,
Timestamp, DatetimeIndex, Timedelta)
from pandas.core.indexing import IndexingError
from pandas.tseries.offsets import BDay
from pandas._libs import tslib, lib
from pandas.compat import lrange, range
from pandas import compat
from pandas.util.testing import (slow,
assert_series_equal,
assert_almost_equal,
assert_frame_equal)
import pandas.util.testing as tm
from pandas.tests.series.common import TestData
JOIN_TYPES = ['inner', 'outer', 'left', 'right']
class TestSeriesIndexing(TestData):
def test_get(self):
# GH 6383
s = Series(np.array([43, 48, 60, 48, 50, 51, 50, 45, 57, 48, 56, 45,
51, 39, 55, 43, 54, 52, 51, 54]))
result = s.get(25, 0)
expected = 0
assert result == expected
s = Series(np.array([43, 48, 60, 48, 50, 51, 50, 45, 57, 48, 56,
45, 51, 39, 55, 43, 54, 52, 51, 54]),
index=pd.Float64Index(
[25.0, 36.0, 49.0, 64.0, 81.0, 100.0,
121.0, 144.0, 169.0, 196.0, 1225.0,
1296.0, 1369.0, 1444.0, 1521.0, 1600.0,
1681.0, 1764.0, 1849.0, 1936.0],
dtype='object'))
result = s.get(25, 0)
expected = 43
assert result == expected
# GH 7407
# with a boolean accessor
df = pd.DataFrame({'i': [0] * 3, 'b': [False] * 3})
vc = df.i.value_counts()
result = vc.get(99, default='Missing')
assert result == 'Missing'
vc = df.b.value_counts()
result = vc.get(False, default='Missing')
assert result == 3
result = vc.get(True, default='Missing')
assert result == 'Missing'
def test_get_nan(self):
# GH 8569
s = pd.Float64Index(range(10)).to_series()
assert s.get(np.nan) is None
assert s.get(np.nan, default='Missing') == 'Missing'
# ensure that fixing the above hasn't broken get
# with multiple elements
idx = [20, 30]
assert_series_equal(s.get(idx),
Series([np.nan] * 2, index=idx))
idx = [np.nan, np.nan]
assert_series_equal(s.get(idx),
Series([np.nan] * 2, index=idx))
def test_delitem(self):
# GH 5542
# should delete the item inplace
s = Series(lrange(5))
del s[0]
expected = Series(lrange(1, 5), index=lrange(1, 5))
assert_series_equal(s, expected)
del s[1]
expected = Series(lrange(2, 5), index=lrange(2, 5))
assert_series_equal(s, expected)
# empty
s = Series()
def f():
del s[0]
pytest.raises(KeyError, f)
# only 1 left, del, add, del
s = Series(1)
del s[0]
assert_series_equal(s, Series(dtype='int64', index=Index(
[], dtype='int64')))
s[0] = 1
assert_series_equal(s, Series(1))
del s[0]
assert_series_equal(s, Series(dtype='int64', index=Index(
[], dtype='int64')))
# Index(dtype=object)
s = Series(1, index=['a'])
del s['a']
assert_series_equal(s, Series(dtype='int64', index=Index(
[], dtype='object')))
s['a'] = 1
assert_series_equal(s, Series(1, index=['a']))
del s['a']
assert_series_equal(s, Series(dtype='int64', index=Index(
[], dtype='object')))
def test_getitem_setitem_ellipsis(self):
s = Series(np.random.randn(10))
np.fix(s)
result = s[...]
assert_series_equal(result, s)
s[...] = 5
assert (result == 5).all()
def test_getitem_negative_out_of_bounds(self):
s = Series(tm.rands_array(5, 10), index=tm.rands_array(10, 10))
pytest.raises(IndexError, s.__getitem__, -11)
pytest.raises(IndexError, s.__setitem__, -11, 'foo')
def test_pop(self):
# GH 6600
df = DataFrame({'A': 0, 'B': np.arange(5, dtype='int64'), 'C': 0, })
k = df.iloc[4]
result = k.pop('B')
assert result == 4
expected = Series([0, 0], index=['A', 'C'], name=4)
assert_series_equal(k, expected)
def test_getitem_get(self):
idx1 = self.series.index[5]
idx2 = self.objSeries.index[5]
assert self.series[idx1] == self.series.get(idx1)
assert self.objSeries[idx2] == self.objSeries.get(idx2)
assert self.series[idx1] == self.series[5]
assert self.objSeries[idx2] == self.objSeries[5]
assert self.series.get(-1) == self.series.get(self.series.index[-1])
assert self.series[5] == self.series.get(self.series.index[5])
# missing
d = self.ts.index[0] - BDay()
pytest.raises(KeyError, self.ts.__getitem__, d)
# None
# GH 5652
for s in [Series(), Series(index=list('abc'))]:
result = s.get(None)
assert result is None
def test_iloc(self):
s = Series(np.random.randn(10), index=lrange(0, 20, 2))
for i in range(len(s)):
result = s.iloc[i]
exp = s[s.index[i]]
assert_almost_equal(result, exp)
# pass a slice
result = s.iloc[slice(1, 3)]
expected = s.loc[2:4]
assert_series_equal(result, expected)
# test slice is a view
result[:] = 0
assert (s[1:3] == 0).all()
# list of integers
result = s.iloc[[0, 2, 3, 4, 5]]
expected = s.reindex(s.index[[0, 2, 3, 4, 5]])
assert_series_equal(result, expected)
def test_iloc_nonunique(self):
s = Series([0, 1, 2], index=[0, 1, 0])
assert s.iloc[2] == 2
def test_getitem_regression(self):
s = Series(lrange(5), index=lrange(5))
result = s[lrange(5)]
assert_series_equal(result, s)
def test_getitem_setitem_slice_bug(self):
s = Series(lrange(10), lrange(10))
result = s[-12:]
assert_series_equal(result, s)
result = s[-7:]
assert_series_equal(result, s[3:])
result = s[:-12]
assert_series_equal(result, s[:0])
s = Series(lrange(10), lrange(10))
s[-12:] = 0
assert (s == 0).all()
s[:-12] = 5
assert (s == 0).all()
def test_getitem_int64(self):
idx = np.int64(5)
assert self.ts[idx] == self.ts[5]
def test_getitem_fancy(self):
slice1 = self.series[[1, 2, 3]]
slice2 = self.objSeries[[1, 2, 3]]
assert self.series.index[2] == slice1.index[1]
assert self.objSeries.index[2] == slice2.index[1]
assert self.series[2] == slice1[1]
assert self.objSeries[2] == slice2[1]
def test_getitem_boolean(self):
s = self.series
mask = s > s.median()
# passing list is OK
result = s[list(mask)]
expected = s[mask]
assert_series_equal(result, expected)
tm.assert_index_equal(result.index, s.index[mask])
def test_getitem_boolean_empty(self):
s = Series([], dtype=np.int64)
s.index.name = 'index_name'
s = s[s.isnull()]
assert s.index.name == 'index_name'
assert s.dtype == np.int64
# GH5877
# indexing with empty series
s = Series(['A', 'B'])
expected = Series(np.nan, index=['C'], dtype=object)
result = s[Series(['C'], dtype=object)]
assert_series_equal(result, expected)
s = Series(['A', 'B'])
expected = Series(dtype=object, index=Index([], dtype='int64'))
result = s[Series([], dtype=object)]
assert_series_equal(result, expected)
# invalid because of the boolean indexer
# that's empty or not-aligned
def f():
s[Series([], dtype=bool)]
pytest.raises(IndexingError, f)
def f():
s[Series([True], dtype=bool)]
pytest.raises(IndexingError, f)
def test_getitem_generator(self):
gen = (x > 0 for x in self.series)
result = self.series[gen]
result2 = self.series[iter(self.series > 0)]
expected = self.series[self.series > 0]
assert_series_equal(result, expected)
assert_series_equal(result2, expected)
def test_type_promotion(self):
# GH12599
s = pd.Series()
s["a"] = pd.Timestamp("2016-01-01")
s["b"] = 3.0
s["c"] = "foo"
expected = Series([pd.Timestamp("2016-01-01"), 3.0, "foo"],
index=["a", "b", "c"])
assert_series_equal(s, expected)
def test_getitem_boolean_object(self):
# using column from DataFrame
s = self.series
mask = s > s.median()
omask = mask.astype(object)
# getitem
result = s[omask]
expected = s[mask]
assert_series_equal(result, expected)
# setitem
s2 = s.copy()
cop = s.copy()
cop[omask] = 5
s2[mask] = 5
assert_series_equal(cop, s2)
# nans raise exception
omask[5:10] = np.nan
pytest.raises(Exception, s.__getitem__, omask)
pytest.raises(Exception, s.__setitem__, omask, 5)
def test_getitem_setitem_boolean_corner(self):
ts = self.ts
mask_shifted = ts.shift(1, freq=BDay()) > ts.median()
# these used to raise...??
pytest.raises(Exception, ts.__getitem__, mask_shifted)
pytest.raises(Exception, ts.__setitem__, mask_shifted, 1)
# ts[mask_shifted]
# ts[mask_shifted] = 1
pytest.raises(Exception, ts.loc.__getitem__, mask_shifted)
pytest.raises(Exception, ts.loc.__setitem__, mask_shifted, 1)
# ts.loc[mask_shifted]
# ts.loc[mask_shifted] = 2
def test_getitem_setitem_slice_integers(self):
s = Series(np.random.randn(8), index=[2, 4, 6, 8, 10, 12, 14, 16])
result = s[:4]
expected = s.reindex([2, 4, 6, 8])
assert_series_equal(result, expected)
s[:4] = 0
assert (s[:4] == 0).all()
assert not (s[4:] == 0).any()
def test_getitem_setitem_datetime_tz_pytz(self):
from pytz import timezone as tz
from pandas import date_range
N = 50
# testing with timezone, GH #2785
rng = date_range('1/1/1990', periods=N, freq='H', tz='US/Eastern')
ts = Series(np.random.randn(N), index=rng)
# also test Timestamp tz handling, GH #2789
result = ts.copy()
result["1990-01-01 09:00:00+00:00"] = 0
result["1990-01-01 09:00:00+00:00"] = ts[4]
assert_series_equal(result, ts)
result = ts.copy()
result["1990-01-01 03:00:00-06:00"] = 0
result["1990-01-01 03:00:00-06:00"] = ts[4]
assert_series_equal(result, ts)
# repeat with datetimes
result = ts.copy()
result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = 0
result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = ts[4]
assert_series_equal(result, ts)
result = ts.copy()
# comparison dates with datetime MUST be localized!
date = tz('US/Central').localize(datetime(1990, 1, 1, 3))
result[date] = 0
result[date] = ts[4]
assert_series_equal(result, ts)
def test_getitem_setitem_datetime_tz_dateutil(self):
from dateutil.tz import tzutc
from pandas._libs.tslib import _dateutil_gettz as gettz
tz = lambda x: tzutc() if x == 'UTC' else gettz(
x) # handle special case for utc in dateutil
from pandas import date_range
N = 50
# testing with timezone, GH #2785
rng = date_range('1/1/1990', periods=N, freq='H',
tz='America/New_York')
ts = Series(np.random.randn(N), index=rng)
# also test Timestamp tz handling, GH #2789
result = ts.copy()
result["1990-01-01 09:00:00+00:00"] = 0
result["1990-01-01 09:00:00+00:00"] = ts[4]
assert_series_equal(result, ts)
result = ts.copy()
result["1990-01-01 03:00:00-06:00"] = 0
result["1990-01-01 03:00:00-06:00"] = ts[4]
assert_series_equal(result, ts)
# repeat with datetimes
result = ts.copy()
result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = 0
result[datetime(1990, 1, 1, 9, tzinfo=tz('UTC'))] = ts[4]
assert_series_equal(result, ts)
result = ts.copy()
result[datetime(1990, 1, 1, 3, tzinfo=tz('America/Chicago'))] = 0
result[datetime(1990, 1, 1, 3, tzinfo=tz('America/Chicago'))] = ts[4]
assert_series_equal(result, ts)
def test_getitem_setitem_datetimeindex(self):
N = 50
# testing with timezone, GH #2785
rng = date_range('1/1/1990', periods=N, freq='H', tz='US/Eastern')
ts = Series(np.random.randn(N), index=rng)
result = ts["1990-01-01 04:00:00"]
expected = ts[4]
assert result == expected
result = ts.copy()
result["1990-01-01 04:00:00"] = 0
result["1990-01-01 04:00:00"] = ts[4]
assert_series_equal(result, ts)
result = ts["1990-01-01 04:00:00":"1990-01-01 07:00:00"]
expected = ts[4:8]
assert_series_equal(result, expected)
result = ts.copy()
result["1990-01-01 04:00:00":"1990-01-01 07:00:00"] = 0
result["1990-01-01 04:00:00":"1990-01-01 07:00:00"] = ts[4:8]
assert_series_equal(result, ts)
lb = "1990-01-01 04:00:00"
rb = "1990-01-01 07:00:00"
result = ts[(ts.index >= lb) & (ts.index <= rb)]
expected = ts[4:8]
assert_series_equal(result, expected)
# repeat all the above with naive datetimes
result = ts[datetime(1990, 1, 1, 4)]
expected = ts[4]
assert result == expected
result = ts.copy()
result[datetime(1990, 1, 1, 4)] = 0
result[datetime(1990, 1, 1, 4)] = ts[4]
assert_series_equal(result, ts)
result = ts[datetime(1990, 1, 1, 4):datetime(1990, 1, 1, 7)]
expected = ts[4:8]
assert_series_equal(result, expected)
result = ts.copy()
result[datetime(1990, 1, 1, 4):datetime(1990, 1, 1, 7)] = 0
result[datetime(1990, 1, 1, 4):datetime(1990, 1, 1, 7)] = ts[4:8]
assert_series_equal(result, ts)
lb = datetime(1990, 1, 1, 4)
rb = datetime(1990, 1, 1, 7)
result = ts[(ts.index >= lb) & (ts.index <= rb)]
expected = ts[4:8]
assert_series_equal(result, expected)
result = ts[ts.index[4]]
expected = ts[4]
assert result == expected
result = ts[ts.index[4:8]]
expected = ts[4:8]
assert_series_equal(result, expected)
result = ts.copy()
result[ts.index[4:8]] = 0
result[4:8] = ts[4:8]
assert_series_equal(result, ts)
# also test partial date slicing
result = ts["1990-01-02"]
expected = ts[24:48]
assert_series_equal(result, expected)
result = ts.copy()
result["1990-01-02"] = 0
result["1990-01-02"] = ts[24:48]
assert_series_equal(result, ts)
def test_getitem_setitem_periodindex(self):
from pandas import period_range
N = 50
rng = period_range('1/1/1990', periods=N, freq='H')
ts = Series(np.random.randn(N), index=rng)
result = ts["1990-01-01 04"]
expected = ts[4]
assert result == expected
result = ts.copy()
result["1990-01-01 04"] = 0
result["1990-01-01 04"] = ts[4]
assert_series_equal(result, ts)
result = ts["1990-01-01 04":"1990-01-01 07"]
expected = ts[4:8]
assert_series_equal(result, expected)
result = ts.copy()
result["1990-01-01 04":"1990-01-01 07"] = 0
result["1990-01-01 04":"1990-01-01 07"] = ts[4:8]
assert_series_equal(result, ts)
lb = "1990-01-01 04"
rb = "1990-01-01 07"
result = ts[(ts.index >= lb) & (ts.index <= rb)]
expected = ts[4:8]
assert_series_equal(result, expected)
# GH 2782
result = ts[ts.index[4]]
expected = ts[4]
assert result == expected
result = ts[ts.index[4:8]]
expected = ts[4:8]
assert_series_equal(result, expected)
result = ts.copy()
result[ts.index[4:8]] = 0
result[4:8] = ts[4:8]
assert_series_equal(result, ts)
def test_getitem_median_slice_bug(self):
index = date_range('20090415', '20090519', freq='2B')
s = Series(np.random.randn(13), index=index)
indexer = [slice(6, 7, None)]
result = s[indexer]
expected = s[indexer[0]]
assert_series_equal(result, expected)
def test_getitem_out_of_bounds(self):
# don't segfault, GH #495
pytest.raises(IndexError, self.ts.__getitem__, len(self.ts))
# GH #917
s = Series([])
pytest.raises(IndexError, s.__getitem__, -1)
def test_getitem_setitem_integers(self):
# caused bug without test
s = Series([1, 2, 3], ['a', 'b', 'c'])
assert s.iloc[0] == s['a']
s.iloc[0] = 5
tm.assert_almost_equal(s['a'], 5)
def test_getitem_box_float64(self):
value = self.ts[5]
assert isinstance(value, np.float64)
def test_getitem_ambiguous_keyerror(self):
s = Series(lrange(10), index=lrange(0, 20, 2))
pytest.raises(KeyError, s.__getitem__, 1)
pytest.raises(KeyError, s.loc.__getitem__, 1)
def test_getitem_unordered_dup(self):
obj = Series(lrange(5), index=['c', 'a', 'a', 'b', 'b'])
assert is_scalar(obj['c'])
assert obj['c'] == 0
def test_getitem_dups_with_missing(self):
# breaks reindex, so need to use .loc internally
# GH 4246
s = Series([1, 2, 3, 4], ['foo', 'bar', 'foo', 'bah'])
expected = s.loc[['foo', 'bar', 'bah', 'bam']]
result = s[['foo', 'bar', 'bah', 'bam']]
assert_series_equal(result, expected)
def test_getitem_dups(self):
s = Series(range(5), index=['A', 'A', 'B', 'C', 'C'], dtype=np.int64)
expected = Series([3, 4], index=['C', 'C'], dtype=np.int64)
result = s['C']
assert_series_equal(result, expected)
def test_getitem_dataframe(self):
rng = list(range(10))
s = pd.Series(10, index=rng)
df = pd.DataFrame(rng, index=rng)
pytest.raises(TypeError, s.__getitem__, df > 5)
def test_getitem_callable(self):
# GH 12533
s = pd.Series(4, index=list('ABCD'))
result = s[lambda x: 'A']
assert result == s.loc['A']
result = s[lambda x: ['A', 'B']]
tm.assert_series_equal(result, s.loc[['A', 'B']])
result = s[lambda x: [True, False, True, True]]
tm.assert_series_equal(result, s.iloc[[0, 2, 3]])
def test_setitem_ambiguous_keyerror(self):
s = Series(lrange(10), index=lrange(0, 20, 2))
# equivalent of an append
s2 = s.copy()
s2[1] = 5
expected = s.append(Series([5], index=[1]))
assert_series_equal(s2, expected)
s2 = s.copy()
s2.loc[1] = 5
expected = s.append(Series([5], index=[1]))
assert_series_equal(s2, expected)
def test_setitem_float_labels(self):
# note labels are floats
s = Series(['a', 'b', 'c'], index=[0, 0.5, 1])
tmp = s.copy()
s.loc[1] = 'zoo'
tmp.iloc[2] = 'zoo'
assert_series_equal(s, tmp)
def test_setitem_callable(self):
# GH 12533
s = pd.Series([1, 2, 3, 4], index=list('ABCD'))
s[lambda x: 'A'] = -1
tm.assert_series_equal(s, pd.Series([-1, 2, 3, 4], index=list('ABCD')))
def test_setitem_other_callable(self):
# GH 13299
inc = lambda x: x + 1
s = pd.Series([1, 2, -1, 4])
s[s < 0] = inc
expected = pd.Series([1, 2, inc, 4])
tm.assert_series_equal(s, expected)
def test_slice(self):
numSlice = self.series[10:20]
numSliceEnd = self.series[-10:]
objSlice = self.objSeries[10:20]
assert self.series.index[9] not in numSlice.index
assert self.objSeries.index[9] not in objSlice.index
assert len(numSlice) == len(numSlice.index)
assert self.series[numSlice.index[0]] == numSlice[numSlice.index[0]]
assert numSlice.index[1] == self.series.index[11]
assert tm.equalContents(numSliceEnd, np.array(self.series)[-10:])
# Test return view.
sl = self.series[10:20]
sl[:] = 0
assert (self.series[10:20] == 0).all()
def test_slice_can_reorder_not_uniquely_indexed(self):
s = Series(1, index=['a', 'a', 'b', 'b', 'c'])
s[::-1] # it works!
def test_slice_float_get_set(self):
pytest.raises(TypeError, lambda: self.ts[4.0:10.0])
def f():
self.ts[4.0:10.0] = 0
pytest.raises(TypeError, f)
pytest.raises(TypeError, self.ts.__getitem__, slice(4.5, 10.0))
pytest.raises(TypeError, self.ts.__setitem__, slice(4.5, 10.0), 0)
def test_slice_floats2(self):
s = Series(np.random.rand(10), index=np.arange(10, 20, dtype=float))
assert len(s.loc[12.0:]) == 8
assert len(s.loc[12.5:]) == 7
i = np.arange(10, 20, dtype=float)
i[2] = 12.2
s.index = i
assert len(s.loc[12.0:]) == 8
assert len(s.loc[12.5:]) == 7
def test_slice_float64(self):
values = np.arange(10., 50., 2)
index = Index(values)
start, end = values[[5, 15]]
s = Series(np.random.randn(20), index=index)
result = s[start:end]
expected = s.iloc[5:16]
assert_series_equal(result, expected)
result = s.loc[start:end]
assert_series_equal(result, expected)
df = DataFrame(np.random.randn(20, 3), index=index)
result = df[start:end]
expected = df.iloc[5:16]
tm.assert_frame_equal(result, expected)
result = df.loc[start:end]
tm.assert_frame_equal(result, expected)
def test_setitem(self):
self.ts[self.ts.index[5]] = np.NaN
self.ts[[1, 2, 17]] = np.NaN
self.ts[6] = np.NaN
assert np.isnan(self.ts[6])
assert np.isnan(self.ts[2])
self.ts[np.isnan(self.ts)] = 5
assert not np.isnan(self.ts[2])
# caught this bug when writing tests
series = Series(tm.makeIntIndex(20).astype(float),
index=tm.makeIntIndex(20))
series[::2] = 0
assert (series[::2] == 0).all()
# set item that's not contained
s = self.series.copy()
s['foobar'] = 1
app = Series([1], index=['foobar'], name='series')
expected = self.series.append(app)
assert_series_equal(s, expected)
# Test for issue #10193
key = pd.Timestamp('2012-01-01')
series = pd.Series()
series[key] = 47
expected = pd.Series(47, [key])
assert_series_equal(series, expected)
series = pd.Series([], pd.DatetimeIndex([], freq='D'))
series[key] = 47
expected = pd.Series(47, pd.DatetimeIndex([key], freq='D'))
assert_series_equal(series, expected)
def test_setitem_dtypes(self):
# change dtypes
# GH 4463
expected = Series([np.nan, 2, 3])
s = Series([1, 2, 3])
s.iloc[0] = np.nan
assert_series_equal(s, expected)
s = Series([1, 2, 3])
s.loc[0] = np.nan
assert_series_equal(s, expected)
s = Series([1, 2, 3])
s[0] = np.nan
assert_series_equal(s, expected)
s = Series([False])
s.loc[0] = np.nan
assert_series_equal(s, Series([np.nan]))
s = Series([False, True])
s.loc[0] = np.nan
assert_series_equal(s, Series([np.nan, 1.0]))
def test_set_value(self):
idx = self.ts.index[10]
res = self.ts.set_value(idx, 0)
assert res is self.ts
assert self.ts[idx] == 0
# equiv
s = self.series.copy()
res = s.set_value('foobar', 0)
assert res is s
assert res.index[-1] == 'foobar'
assert res['foobar'] == 0
s = self.series.copy()
s.loc['foobar'] = 0
assert s.index[-1] == 'foobar'
assert s['foobar'] == 0
def test_setslice(self):
sl = self.ts[5:20]
assert len(sl) == len(sl.index)
assert sl.index.is_unique
def test_basic_getitem_setitem_corner(self):
# invalid tuples, e.g. self.ts[:, None] vs. self.ts[:, 2]
with tm.assert_raises_regex(ValueError, 'tuple-index'):
self.ts[:, 2]
with tm.assert_raises_regex(ValueError, 'tuple-index'):
self.ts[:, 2] = 2
# weird lists. [slice(0, 5)] will work but not two slices
result = self.ts[[slice(None, 5)]]
expected = self.ts[:5]
assert_series_equal(result, expected)
# OK
pytest.raises(Exception, self.ts.__getitem__,
[5, slice(None, None)])
pytest.raises(Exception, self.ts.__setitem__,
[5, slice(None, None)], 2)
def test_basic_getitem_with_labels(self):
indices = self.ts.index[[5, 10, 15]]
result = self.ts[indices]
expected = self.ts.reindex(indices)
assert_series_equal(result, expected)
result = self.ts[indices[0]:indices[2]]
expected = self.ts.loc[indices[0]:indices[2]]
assert_series_equal(result, expected)
# integer indexes, be careful
s = Series(np.random.randn(10), index=lrange(0, 20, 2))
inds = [0, 2, 5, 7, 8]
arr_inds = np.array([0, 2, 5, 7, 8])
result = s[inds]
expected = s.reindex(inds)
assert_series_equal(result, expected)
result = s[arr_inds]
expected = s.reindex(arr_inds)
assert_series_equal(result, expected)
# GH12089
# with tz for values
s = Series(pd.date_range("2011-01-01", periods=3, tz="US/Eastern"),
index=['a', 'b', 'c'])
expected = Timestamp('2011-01-01', tz='US/Eastern')
result = s.loc['a']
assert result == expected
result = s.iloc[0]
assert result == expected
result = s['a']
assert result == expected
def test_basic_setitem_with_labels(self):
indices = self.ts.index[[5, 10, 15]]
cp = self.ts.copy()
exp = self.ts.copy()
cp[indices] = 0
exp.loc[indices] = 0
assert_series_equal(cp, exp)
cp = self.ts.copy()
exp = self.ts.copy()
cp[indices[0]:indices[2]] = 0
exp.loc[indices[0]:indices[2]] = 0
assert_series_equal(cp, exp)
# integer indexes, be careful
s = Series(np.random.randn(10), index=lrange(0, 20, 2))
inds = [0, 4, 6]
arr_inds = np.array([0, 4, 6])
cp = s.copy()
exp = s.copy()
s[inds] = 0
s.loc[inds] = 0
assert_series_equal(cp, exp)
cp = s.copy()
exp = s.copy()
s[arr_inds] = 0
s.loc[arr_inds] = 0
assert_series_equal(cp, exp)
inds_notfound = [0, 4, 5, 6]
arr_inds_notfound = np.array([0, 4, 5, 6])
pytest.raises(Exception, s.__setitem__, inds_notfound, 0)
pytest.raises(Exception, s.__setitem__, arr_inds_notfound, 0)
# GH12089
# with tz for values
s = Series(pd.date_range("2011-01-01", periods=3, tz="US/Eastern"),
index=['a', 'b', 'c'])
s2 = s.copy()
expected = Timestamp('2011-01-03', tz='US/Eastern')
s2.loc['a'] = expected
result = s2.loc['a']
assert result == expected
s2 = s.copy()
s2.iloc[0] = expected
result = s2.iloc[0]
assert result == expected
s2 = s.copy()
s2['a'] = expected
result = s2['a']
assert result == expected
def test_loc_getitem(self):
inds = self.series.index[[3, 4, 7]]
assert_series_equal(self.series.loc[inds], self.series.reindex(inds))
assert_series_equal(self.series.iloc[5::2], self.series[5::2])
# slice with indices
d1, d2 = self.ts.index[[5, 15]]
result = self.ts.loc[d1:d2]
expected = self.ts.truncate(d1, d2)
assert_series_equal(result, expected)
# boolean
mask = self.series > self.series.median()
assert_series_equal(self.series.loc[mask], self.series[mask])
# ask for index value
assert self.ts.loc[d1] == self.ts[d1]
assert self.ts.loc[d2] == self.ts[d2]
def test_loc_getitem_not_monotonic(self):
d1, d2 = self.ts.index[[5, 15]]
ts2 = self.ts[::2][[1, 2, 0]]
pytest.raises(KeyError, ts2.loc.__getitem__, slice(d1, d2))
pytest.raises(KeyError, ts2.loc.__setitem__, slice(d1, d2), 0)
def test_loc_getitem_setitem_integer_slice_keyerrors(self):
s = Series(np.random.randn(10), index=lrange(0, 20, 2))
# this is OK
cp = s.copy()
cp.iloc[4:10] = 0
assert (cp.iloc[4:10] == 0).all()
# so is this
cp = s.copy()
cp.iloc[3:11] = 0
assert (cp.iloc[3:11] == 0).values.all()
result = s.iloc[2:6]
result2 = s.loc[3:11]
expected = s.reindex([4, 6, 8, 10])
assert_series_equal(result, expected)
assert_series_equal(result2, expected)
# non-monotonic, raise KeyError
s2 = s.iloc[lrange(5) + lrange(5, 10)[::-1]]
pytest.raises(KeyError, s2.loc.__getitem__, slice(3, 11))
pytest.raises(KeyError, s2.loc.__setitem__, slice(3, 11), 0)
def test_loc_getitem_iterator(self):
idx = iter(self.series.index[:10])
result = self.series.loc[idx]
assert_series_equal(result, self.series[:10])
def test_setitem_with_tz(self):
for tz in ['US/Eastern', 'UTC', 'Asia/Tokyo']:
orig = pd.Series(pd.date_range('2016-01-01', freq='H', periods=3,
tz=tz))
assert orig.dtype == 'datetime64[ns, {0}]'.format(tz)
# scalar
s = orig.copy()
s[1] = pd.Timestamp('2011-01-01', tz=tz)
exp = pd.Series([pd.Timestamp('2016-01-01 00:00', tz=tz),
pd.Timestamp('2011-01-01 00:00', tz=tz),
pd.Timestamp('2016-01-01 02:00', tz=tz)])
tm.assert_series_equal(s, exp)
s = orig.copy()
s.loc[1] = pd.Timestamp('2011-01-01', tz=tz)
tm.assert_series_equal(s, exp)
s = orig.copy()
s.iloc[1] = pd.Timestamp('2011-01-01', tz=tz)
tm.assert_series_equal(s, exp)
# vector
vals = pd.Series([pd.Timestamp('2011-01-01', tz=tz),
pd.Timestamp('2012-01-01', tz=tz)], index=[1, 2])
assert vals.dtype == 'datetime64[ns, {0}]'.format(tz)
s[[1, 2]] = vals
exp = pd.Series([pd.Timestamp('2016-01-01 00:00', tz=tz),
pd.Timestamp('2011-01-01 00:00', tz=tz),
pd.Timestamp('2012-01-01 00:00', tz=tz)])
tm.assert_series_equal(s, exp)
s = orig.copy()
s.loc[[1, 2]] = vals
tm.assert_series_equal(s, exp)
s = orig.copy()
s.iloc[[1, 2]] = vals
tm.assert_series_equal(s, exp)
def test_setitem_with_tz_dst(self):
# GH XXX
tz = 'US/Eastern'
orig = pd.Series(pd.date_range('2016-11-06', freq='H', periods=3,
tz=tz))
assert orig.dtype == 'datetime64[ns, {0}]'.format(tz)
# scalar
s = orig.copy()
s[1] = pd.Timestamp('2011-01-01', tz=tz)
exp = pd.Series([pd.Timestamp('2016-11-06 00:00-04:00', tz=tz),
pd.Timestamp('2011-01-01 00:00-05:00', tz=tz),
pd.Timestamp('2016-11-06 01:00-05:00', tz=tz)])
tm.assert_series_equal(s, exp)
s = orig.copy()
s.loc[1] = pd.Timestamp('2011-01-01', tz=tz)
tm.assert_series_equal(s, exp)
s = orig.copy()
s.iloc[1] = pd.Timestamp('2011-01-01', tz=tz)
tm.assert_series_equal(s, exp)
# vector
vals = pd.Series([pd.Timestamp('2011-01-01', tz=tz),
pd.Timestamp('2012-01-01', tz=tz)], index=[1, 2])
assert vals.dtype == 'datetime64[ns, {0}]'.format(tz)
s[[1, 2]] = vals
exp = pd.Series([pd.Timestamp('2016-11-06 00:00', tz=tz),
pd.Timestamp('2011-01-01 00:00', tz=tz),
pd.Timestamp('2012-01-01 00:00', tz=tz)])
tm.assert_series_equal(s, exp)
s = orig.copy()
s.loc[[1, 2]] = vals
tm.assert_series_equal(s, exp)
s = orig.copy()
s.iloc[[1, 2]] = vals
tm.assert_series_equal(s, exp)
def test_where(self):
s = Series(np.random.randn(5))
cond = s > 0
rs = s.where(cond).dropna()
rs2 = s[cond]
assert_series_equal(rs, rs2)
rs = s.where(cond, -s)
assert_series_equal(rs, s.abs())
rs = s.where(cond)
assert (s.shape == rs.shape)
assert (rs is not s)
# test alignment
cond = Series([True, False, False, True, False], index=s.index)
s2 = -(s.abs())
expected = s2[cond].reindex(s2.index[:3]).reindex(s2.index)
rs = s2.where(cond[:3])
assert_series_equal(rs, expected)
expected = s2.abs()
expected.iloc[0] = s2[0]
rs = s2.where(cond[:3], -s2)
assert_series_equal(rs, expected)
pytest.raises(ValueError, s.where, 1)
pytest.raises(ValueError, s.where, cond[:3].values, -s)
# GH 2745
s = Series([1, 2])
s[[True, False]] = [0, 1]
expected = Series([0, 2])
assert_series_equal(s, expected)
# failures
pytest.raises(ValueError, s.__setitem__, tuple([[[True, False]]]),
[0, 2, 3])
pytest.raises(ValueError, s.__setitem__, tuple([[[True, False]]]),
[])
# unsafe dtype changes
for dtype in [np.int8, np.int16, np.int32, np.int64, np.float16,
np.float32, np.float64]:
s = Series(np.arange(10), dtype=dtype)
mask = s < 5
s[mask] = lrange(2, 7)
expected = Series(lrange(2, 7) + lrange(5, 10), dtype=dtype)
assert_series_equal(s, expected)
assert s.dtype == expected.dtype
# these are allowed operations, but are upcasted
for dtype in [np.int64, np.float64]:
s = Series(np.arange(10), dtype=dtype)
mask = s < 5
values = [2.5, 3.5, 4.5, 5.5, 6.5]
s[mask] = values
expected = Series(values + lrange(5, 10), dtype='float64')
assert_series_equal(s, expected)
assert s.dtype == expected.dtype
# GH 9731
s = Series(np.arange(10), dtype='int64')
mask = s > 5
values = [2.5, 3.5, 4.5, 5.5]
s[mask] = values
expected = Series(lrange(6) + values, dtype='float64')
assert_series_equal(s, expected)
# can't do these as we are forced to change the itemsize of the input
# to something we cannot
for dtype in [np.int8, np.int16, np.int32, np.float16, np.float32]:
s = Series(np.arange(10), dtype=dtype)
mask = s < 5
values = [2.5, 3.5, 4.5, 5.5, 6.5]
pytest.raises(Exception, s.__setitem__, tuple(mask), values)
# GH3235
s = Series(np.arange(10), dtype='int64')
mask = s < 5
s[mask] = lrange(2, 7)
expected = Series(lrange(2, 7) + lrange(5, 10), dtype='int64')
assert_series_equal(s, expected)
assert s.dtype == expected.dtype
s = Series(np.arange(10), dtype='int64')
mask = s > 5
s[mask] = [0] * 4
expected = Series([0, 1, 2, 3, 4, 5] + [0] * 4, dtype='int64')
assert_series_equal(s, expected)
s = Series(np.arange(10))
mask = s > 5
def f():
s[mask] = [5, 4, 3, 2, 1]
pytest.raises(ValueError, f)
def f():
s[mask] = [0] * 5
pytest.raises(ValueError, f)
# dtype changes
s = Series([1, 2, 3, 4])
result = s.where(s > 2, np.nan)
expected = Series([np.nan, np.nan, 3, 4])
assert_series_equal(result, expected)
# GH 4667
# setting with None changes dtype
s = Series(range(10)).astype(float)
s[8] = None
result = s[8]
assert isnull(result)
s = Series(range(10)).astype(float)
s[s > 8] = None
result = s[isnull(s)]
expected = Series(np.nan, index=[9])
assert_series_equal(result, expected)
def test_where_array_like(self):
# see gh-15414
s = Series([1, 2, 3])
cond = [False, True, True]
expected = Series([np.nan, 2, 3])
klasses = [list, tuple, np.array, Series]
for klass in klasses:
result = s.where(klass(cond))
assert_series_equal(result, expected)
def test_where_invalid_input(self):
# see gh-15414: only boolean arrays accepted
s = Series([1, 2, 3])
msg = "Boolean array expected for the condition"
conds = [
[1, 0, 1],
Series([2, 5, 7]),
["True", "False", "True"],
[Timestamp("2017-01-01"),
pd.NaT, Timestamp("2017-01-02")]
]
for cond in conds:
with tm.assert_raises_regex(ValueError, msg):
s.where(cond)
msg = "Array conditional must be same shape as self"
with tm.assert_raises_regex(ValueError, msg):
s.where([True])
def test_where_ndframe_align(self):
msg = "Array conditional must be same shape as self"
s = Series([1, 2, 3])
cond = [True]
with tm.assert_raises_regex(ValueError, msg):
s.where(cond)
expected = Series([1, np.nan, np.nan])
out = s.where(Series(cond))
tm.assert_series_equal(out, expected)
cond = np.array([False, True, False, True])
with tm.assert_raises_regex(ValueError, msg):
s.where(cond)
expected = Series([np.nan, 2, np.nan])
out = s.where(Series(cond))
tm.assert_series_equal(out, expected)
def test_where_setitem_invalid(self):
# GH 2702
# make sure correct exceptions are raised on invalid list assignment
# slice
s = Series(list('abc'))
def f():
s[0:3] = list(range(27))
pytest.raises(ValueError, f)
s[0:3] = list(range(3))
expected = Series([0, 1, 2])
assert_series_equal(s.astype(np.int64), expected, )
# slice with step
s = Series(list('abcdef'))
def f():
s[0:4:2] = list(range(27))
pytest.raises(ValueError, f)
s = Series(list('abcdef'))
s[0:4:2] = list(range(2))
expected = Series([0, 'b', 1, 'd', 'e', 'f'])
assert_series_equal(s, expected)
# neg slices
s = Series(list('abcdef'))
def f():
s[:-1] = list(range(27))
pytest.raises(ValueError, f)
s[-3:-1] = list(range(2))
expected = Series(['a', 'b', 'c', 0, 1, 'f'])
assert_series_equal(s, expected)
# list
s = Series(list('abc'))
def f():
s[[0, 1, 2]] = list(range(27))
pytest.raises(ValueError, f)
s = Series(list('abc'))
def f():
s[[0, 1, 2]] = list(range(2))
pytest.raises(ValueError, f)
# scalar
s = Series(list('abc'))
s[0] = list(range(10))
expected = Series([list(range(10)), 'b', 'c'])
assert_series_equal(s, expected)
def test_where_broadcast(self):
# Test a variety of differently sized series
for size in range(2, 6):
# Test a variety of boolean indices
for selection in [
# First element should be set
np.resize([True, False, False, False, False], size),
# Set alternating elements]
np.resize([True, False], size),
# No element should be set
np.resize([False], size)]:
# Test a variety of different numbers as content
for item in [2.0, np.nan, np.finfo(np.float).max,
np.finfo(np.float).min]:
# Test numpy arrays, lists and tuples as the input to be
# broadcast
for arr in [np.array([item]), [item], (item, )]:
data = np.arange(size, dtype=float)
s = Series(data)
s[selection] = arr
# Construct the expected series by taking the source
# data or item based on the selection
expected = Series([item if use_item else data[
i] for i, use_item in enumerate(selection)])
assert_series_equal(s, expected)
s = Series(data)
result = s.where(~selection, arr)
assert_series_equal(result, expected)
def test_where_inplace(self):
s = Series(np.random.randn(5))
cond = s > 0
rs = s.copy()
rs.where(cond, inplace=True)
assert_series_equal(rs.dropna(), s[cond])
assert_series_equal(rs, s.where(cond))
rs = s.copy()
rs.where(cond, -s, inplace=True)
assert_series_equal(rs, s.where(cond, -s))
def test_where_dups(self):
# GH 4550
# where crashes with dups in index
s1 = Series(list(range(3)))
s2 = Series(list(range(3)))
comb = pd.concat([s1, s2])
result = comb.where(comb < 2)
expected = Series([0, 1, np.nan, 0, 1, np.nan],
index=[0, 1, 2, 0, 1, 2])
assert_series_equal(result, expected)
# GH 4548
# inplace updating not working with dups
comb[comb < 1] = 5
expected = Series([5, 1, 2, 5, 1, 2], index=[0, 1, 2, 0, 1, 2])
assert_series_equal(comb, expected)
comb[comb < 2] += 10
expected = Series([5, 11, 2, 5, 11, 2], index=[0, 1, 2, 0, 1, 2])
assert_series_equal(comb, expected)
def test_where_datetime(self):
s = Series(date_range('20130102', periods=2))
expected = Series([10, 10], dtype='datetime64[ns]')
mask = np.array([False, False])
rs = s.where(mask, [10, 10])
assert_series_equal(rs, expected)
rs = s.where(mask, 10)
assert_series_equal(rs, expected)
rs = s.where(mask, 10.0)
assert_series_equal(rs, expected)
rs = s.where(mask, [10.0, 10.0])
assert_series_equal(rs, expected)
rs = s.where(mask, [10.0, np.nan])
expected = Series([10, None], dtype='datetime64[ns]')
assert_series_equal(rs, expected)
# GH 15701
timestamps = ['2016-12-31 12:00:04+00:00',
'2016-12-31 12:00:04.010000+00:00']
s = Series([pd.Timestamp(t) for t in timestamps])
rs = s.where(Series([False, True]))
expected = Series([pd.NaT, s[1]])
assert_series_equal(rs, expected)
def test_where_timedelta(self):
s = Series([1, 2], dtype='timedelta64[ns]')
expected = Series([10, 10], dtype='timedelta64[ns]')
mask = np.array([False, False])
rs = s.where(mask, [10, 10])
assert_series_equal(rs, expected)
rs = s.where(mask, 10)
assert_series_equal(rs, expected)
rs = s.where(mask, 10.0)
assert_series_equal(rs, expected)
rs = s.where(mask, [10.0, 10.0])
assert_series_equal(rs, expected)
rs = s.where(mask, [10.0, np.nan])
expected = Series([10, None], dtype='timedelta64[ns]')
assert_series_equal(rs, expected)
def test_mask(self):
# compare with tested results in test_where
s = Series(np.random.randn(5))
cond = s > 0
rs = s.where(~cond, np.nan)
assert_series_equal(rs, s.mask(cond))
rs = s.where(~cond)
rs2 = s.mask(cond)
assert_series_equal(rs, rs2)
rs = s.where(~cond, -s)
rs2 = s.mask(cond, -s)
assert_series_equal(rs, rs2)
cond = Series([True, False, False, True, False], index=s.index)
s2 = -(s.abs())
rs = s2.where(~cond[:3])
rs2 = s2.mask(cond[:3])
assert_series_equal(rs, rs2)
rs = s2.where(~cond[:3], -s2)
rs2 = s2.mask(cond[:3], -s2)
assert_series_equal(rs, rs2)
pytest.raises(ValueError, s.mask, 1)
pytest.raises(ValueError, s.mask, cond[:3].values, -s)
# dtype changes
s = Series([1, 2, 3, 4])
result = s.mask(s > 2, np.nan)
expected = Series([1, 2, np.nan, np.nan])
assert_series_equal(result, expected)
def test_mask_broadcast(self):
# GH 8801
# copied from test_where_broadcast
for size in range(2, 6):
for selection in [
# First element should be set
np.resize([True, False, False, False, False], size),
# Set alternating elements]
np.resize([True, False], size),
# No element should be set
np.resize([False], size)]:
for item in [2.0, np.nan, np.finfo(np.float).max,
np.finfo(np.float).min]:
for arr in [np.array([item]), [item], (item, )]:
data = np.arange(size, dtype=float)
s = Series(data)
result = s.mask(selection, arr)
expected = Series([item if use_item else data[
i] for i, use_item in enumerate(selection)])
assert_series_equal(result, expected)
def test_mask_inplace(self):
s = Series(np.random.randn(5))
cond = s > 0
rs = s.copy()
rs.mask(cond, inplace=True)
assert_series_equal(rs.dropna(), s[~cond])
assert_series_equal(rs, s.mask(cond))
rs = s.copy()
rs.mask(cond, -s, inplace=True)
assert_series_equal(rs, s.mask(cond, -s))
def test_ix_setitem(self):
inds = self.series.index[[3, 4, 7]]
result = self.series.copy()
result.loc[inds] = 5
expected = self.series.copy()
expected[[3, 4, 7]] = 5
assert_series_equal(result, expected)
result.iloc[5:10] = 10
expected[5:10] = 10
assert_series_equal(result, expected)
# set slice with indices
d1, d2 = self.series.index[[5, 15]]
result.loc[d1:d2] = 6
expected[5:16] = 6 # because it's inclusive
assert_series_equal(result, expected)
# set index value
self.series.loc[d1] = 4
self.series.loc[d2] = 6
assert self.series[d1] == 4
assert self.series[d2] == 6
def test_where_numeric_with_string(self):
# GH 9280
s = pd.Series([1, 2, 3])
w = s.where(s > 1, 'X')
assert not is_integer(w[0])
assert is_integer(w[1])
assert is_integer(w[2])
assert isinstance(w[0], str)
assert w.dtype == 'object'
w = s.where(s > 1, ['X', 'Y', 'Z'])
assert not is_integer(w[0])
assert is_integer(w[1])
assert is_integer(w[2])
assert isinstance(w[0], str)
assert w.dtype == 'object'
w = s.where(s > 1, np.array(['X', 'Y', 'Z']))
assert not is_integer(w[0])
assert is_integer(w[1])
assert is_integer(w[2])
assert isinstance(w[0], str)
assert w.dtype == 'object'
def test_setitem_boolean(self):
mask = self.series > self.series.median()
# similiar indexed series
result = self.series.copy()
result[mask] = self.series * 2
expected = self.series * 2
assert_series_equal(result[mask], expected[mask])
# needs alignment
result = self.series.copy()
result[mask] = (self.series * 2)[0:5]
expected = (self.series * 2)[0:5].reindex_like(self.series)
expected[-mask] = self.series[mask]
assert_series_equal(result[mask], expected[mask])
def test_ix_setitem_boolean(self):
mask = self.series > self.series.median()
result = self.series.copy()
result.loc[mask] = 0
expected = self.series
expected[mask] = 0
assert_series_equal(result, expected)
def test_ix_setitem_corner(self):
inds = list(self.series.index[[5, 8, 12]])
self.series.loc[inds] = 5
pytest.raises(Exception, self.series.loc.__setitem__,
inds + ['foo'], 5)
def test_get_set_boolean_different_order(self):
ordered = self.series.sort_values()
# setting
copy = self.series.copy()
copy[ordered > 0] = 0
expected = self.series.copy()
expected[expected > 0] = 0
assert_series_equal(copy, expected)
# getting
sel = self.series[ordered > 0]
exp = self.series[self.series > 0]
assert_series_equal(sel, exp)
def test_setitem_na(self):
# these induce dtype changes
expected = Series([np.nan, 3, np.nan, 5, np.nan, 7, np.nan, 9, np.nan])
s = Series([2, 3, 4, 5, 6, 7, 8, 9, 10])
s[::2] = np.nan
assert_series_equal(s, expected)
# get's coerced to float, right?
expected = Series([np.nan, 1, np.nan, 0])
s = Series([True, True, False, False])
s[::2] = np.nan
assert_series_equal(s, expected)
expected = Series([np.nan, np.nan, np.nan, np.nan, np.nan, 5, 6, 7, 8,
9])
s = Series(np.arange(10))
s[:5] = np.nan
assert_series_equal(s, expected)
def test_basic_indexing(self):
s = Series(np.random.randn(5), index=['a', 'b', 'a', 'a', 'b'])
pytest.raises(IndexError, s.__getitem__, 5)
pytest.raises(IndexError, s.__setitem__, 5, 0)
pytest.raises(KeyError, s.__getitem__, 'c')
s = s.sort_index()
pytest.raises(IndexError, s.__getitem__, 5)
pytest.raises(IndexError, s.__setitem__, 5, 0)
def test_int_indexing(self):
s = Series(np.random.randn(6), index=[0, 0, 1, 1, 2, 2])
pytest.raises(KeyError, s.__getitem__, 5)
pytest.raises(KeyError, s.__getitem__, 'c')
# not monotonic
s = Series(np.random.randn(6), index=[2, 2, 0, 0, 1, 1])
pytest.raises(KeyError, s.__getitem__, 5)
pytest.raises(KeyError, s.__getitem__, 'c')
def test_datetime_indexing(self):
from pandas import date_range
index = date_range('1/1/2000', '1/7/2000')
index = index.repeat(3)
s = Series(len(index), index=index)
stamp = Timestamp('1/8/2000')
pytest.raises(KeyError, s.__getitem__, stamp)
s[stamp] = 0
assert s[stamp] == 0
# not monotonic
s = Series(len(index), index=index)
s = s[::-1]
pytest.raises(KeyError, s.__getitem__, stamp)
s[stamp] = 0
assert s[stamp] == 0
def test_timedelta_assignment(self):
# GH 8209
s = Series([])
s.loc['B'] = timedelta(1)
tm.assert_series_equal(s, Series(Timedelta('1 days'), index=['B']))
s = s.reindex(s.index.insert(0, 'A'))
tm.assert_series_equal(s, Series(
[np.nan, Timedelta('1 days')], index=['A', 'B']))
result = s.fillna(timedelta(1))
expected = Series(Timedelta('1 days'), index=['A', 'B'])
tm.assert_series_equal(result, expected)
s.loc['A'] = timedelta(1)
tm.assert_series_equal(s, expected)
# GH 14155
s = Series(10 * [np.timedelta64(10, 'm')])
s.loc[[1, 2, 3]] = np.timedelta64(20, 'm')
expected = pd.Series(10 * [np.timedelta64(10, 'm')])
expected.loc[[1, 2, 3]] = pd.Timedelta(np.timedelta64(20, 'm'))
tm.assert_series_equal(s, expected)
def test_underlying_data_conversion(self):
# GH 4080
df = DataFrame(dict((c, [1, 2, 3]) for c in ['a', 'b', 'c']))
df.set_index(['a', 'b', 'c'], inplace=True)
s = Series([1], index=[(2, 2, 2)])
df['val'] = 0
df
df['val'].update(s)
expected = DataFrame(
dict(a=[1, 2, 3], b=[1, 2, 3], c=[1, 2, 3], val=[0, 1, 0]))
expected.set_index(['a', 'b', 'c'], inplace=True)
tm.assert_frame_equal(df, expected)
# GH 3970
# these are chained assignments as well
pd.set_option('chained_assignment', None)
df = DataFrame({"aa": range(5), "bb": [2.2] * 5})
df["cc"] = 0.0
ck = [True] * len(df)
df["bb"].iloc[0] = .13
# TODO: unused
df_tmp = df.iloc[ck] # noqa
df["bb"].iloc[0] = .15
assert df['bb'].iloc[0] == 0.15
pd.set_option('chained_assignment', 'raise')
# GH 3217
df = DataFrame(dict(a=[1, 3], b=[np.nan, 2]))
df['c'] = np.nan
df['c'].update(pd.Series(['foo'], index=[0]))
expected = DataFrame(dict(a=[1, 3], b=[np.nan, 2], c=['foo', np.nan]))
tm.assert_frame_equal(df, expected)
def test_preserveRefs(self):
seq = self.ts[[5, 10, 15]]
seq[1] = np.NaN
assert not np.isnan(self.ts[10])
def test_drop(self):
# unique
s = Series([1, 2], index=['one', 'two'])
expected = Series([1], index=['one'])
result = s.drop(['two'])
assert_series_equal(result, expected)
result = s.drop('two', axis='rows')
assert_series_equal(result, expected)
# non-unique
# GH 5248
s = Series([1, 1, 2], index=['one', 'two', 'one'])
expected = Series([1, 2], index=['one', 'one'])
result = s.drop(['two'], axis=0)
assert_series_equal(result, expected)
result = s.drop('two')
assert_series_equal(result, expected)
expected = Series([1], index=['two'])
result = s.drop(['one'])
assert_series_equal(result, expected)
result = s.drop('one')
assert_series_equal(result, expected)
# single string/tuple-like
s = Series(range(3), index=list('abc'))
pytest.raises(ValueError, s.drop, 'bc')
pytest.raises(ValueError, s.drop, ('a', ))
# errors='ignore'
s = Series(range(3), index=list('abc'))
result = s.drop('bc', errors='ignore')
assert_series_equal(result, s)
result = s.drop(['a', 'd'], errors='ignore')
expected = s.iloc[1:]
assert_series_equal(result, expected)
# bad axis
pytest.raises(ValueError, s.drop, 'one', axis='columns')
# GH 8522
s = Series([2, 3], index=[True, False])
assert s.index.is_object()
result = s.drop(True)
expected = Series([3], index=[False])
assert_series_equal(result, expected)
def test_align(self):
def _check_align(a, b, how='left', fill=None):
aa, ab = a.align(b, join=how, fill_value=fill)
join_index = a.index.join(b.index, how=how)
if fill is not None:
diff_a = aa.index.difference(join_index)
diff_b = ab.index.difference(join_index)
if len(diff_a) > 0:
assert (aa.reindex(diff_a) == fill).all()
if len(diff_b) > 0:
assert (ab.reindex(diff_b) == fill).all()
ea = a.reindex(join_index)
eb = b.reindex(join_index)
if fill is not None:
ea = ea.fillna(fill)
eb = eb.fillna(fill)
assert_series_equal(aa, ea)
assert_series_equal(ab, eb)
assert aa.name == 'ts'
assert ea.name == 'ts'
assert ab.name == 'ts'
assert eb.name == 'ts'
for kind in JOIN_TYPES:
_check_align(self.ts[2:], self.ts[:-5], how=kind)
_check_align(self.ts[2:], self.ts[:-5], how=kind, fill=-1)
# empty left
_check_align(self.ts[:0], self.ts[:-5], how=kind)
_check_align(self.ts[:0], self.ts[:-5], how=kind, fill=-1)
# empty right
_check_align(self.ts[:-5], self.ts[:0], how=kind)
_check_align(self.ts[:-5], self.ts[:0], how=kind, fill=-1)
# both empty
_check_align(self.ts[:0], self.ts[:0], how=kind)
_check_align(self.ts[:0], self.ts[:0], how=kind, fill=-1)
def test_align_fill_method(self):
def _check_align(a, b, how='left', method='pad', limit=None):
aa, ab = a.align(b, join=how, method=method, limit=limit)
join_index = a.index.join(b.index, how=how)
ea = a.reindex(join_index)
eb = b.reindex(join_index)
ea = ea.fillna(method=method, limit=limit)
eb = eb.fillna(method=method, limit=limit)
assert_series_equal(aa, ea)
assert_series_equal(ab, eb)
for kind in JOIN_TYPES:
for meth in ['pad', 'bfill']:
_check_align(self.ts[2:], self.ts[:-5], how=kind, method=meth)
_check_align(self.ts[2:], self.ts[:-5], how=kind, method=meth,
limit=1)
# empty left
_check_align(self.ts[:0], self.ts[:-5], how=kind, method=meth)
_check_align(self.ts[:0], self.ts[:-5], how=kind, method=meth,
limit=1)
# empty right
_check_align(self.ts[:-5], self.ts[:0], how=kind, method=meth)
_check_align(self.ts[:-5], self.ts[:0], how=kind, method=meth,
limit=1)
# both empty
_check_align(self.ts[:0], self.ts[:0], how=kind, method=meth)
_check_align(self.ts[:0], self.ts[:0], how=kind, method=meth,
limit=1)
def test_align_nocopy(self):
b = self.ts[:5].copy()
# do copy
a = self.ts.copy()
ra, _ = a.align(b, join='left')
ra[:5] = 5
assert not (a[:5] == 5).any()
# do not copy
a = self.ts.copy()
ra, _ = a.align(b, join='left', copy=False)
ra[:5] = 5
assert (a[:5] == 5).all()
# do copy
a = self.ts.copy()
b = self.ts[:5].copy()
_, rb = a.align(b, join='right')
rb[:3] = 5
assert not (b[:3] == 5).any()
# do not copy
a = self.ts.copy()
b = self.ts[:5].copy()
_, rb = a.align(b, join='right', copy=False)
rb[:2] = 5
assert (b[:2] == 5).all()
def test_align_same_index(self):
a, b = self.ts.align(self.ts, copy=False)
assert a.index is self.ts.index
assert b.index is self.ts.index
a, b = self.ts.align(self.ts, copy=True)
assert a.index is not self.ts.index
assert b.index is not self.ts.index
def test_align_multiindex(self):
# GH 10665
midx = pd.MultiIndex.from_product([range(2), range(3), range(2)],
names=('a', 'b', 'c'))
idx = pd.Index(range(2), name='b')
s1 = pd.Series(np.arange(12, dtype='int64'), index=midx)
s2 = pd.Series(np.arange(2, dtype='int64'), index=idx)
# these must be the same results (but flipped)
res1l, res1r = s1.align(s2, join='left')
res2l, res2r = s2.align(s1, join='right')
expl = s1
tm.assert_series_equal(expl, res1l)
tm.assert_series_equal(expl, res2r)
expr = pd.Series([0, 0, 1, 1, np.nan, np.nan] * 2, index=midx)
tm.assert_series_equal(expr, res1r)
tm.assert_series_equal(expr, res2l)
res1l, res1r = s1.align(s2, join='right')
res2l, res2r = s2.align(s1, join='left')
exp_idx = pd.MultiIndex.from_product([range(2), range(2), range(2)],
names=('a', 'b', 'c'))
expl = pd.Series([0, 1, 2, 3, 6, 7, 8, 9], index=exp_idx)
tm.assert_series_equal(expl, res1l)
tm.assert_series_equal(expl, res2r)
expr = pd.Series([0, 0, 1, 1] * 2, index=exp_idx)
tm.assert_series_equal(expr, res1r)
tm.assert_series_equal(expr, res2l)
def test_reindex(self):
identity = self.series.reindex(self.series.index)
# __array_interface__ is not defined for older numpies
# and on some pythons
try:
assert np.may_share_memory(self.series.index, identity.index)
except AttributeError:
pass
assert identity.index.is_(self.series.index)
assert identity.index.identical(self.series.index)
subIndex = self.series.index[10:20]
subSeries = self.series.reindex(subIndex)
for idx, val in compat.iteritems(subSeries):
assert val == self.series[idx]
subIndex2 = self.ts.index[10:20]
subTS = self.ts.reindex(subIndex2)
for idx, val in compat.iteritems(subTS):
assert val == self.ts[idx]
stuffSeries = self.ts.reindex(subIndex)
assert np.isnan(stuffSeries).all()
# This is extremely important for the Cython code to not screw up
nonContigIndex = self.ts.index[::2]
subNonContig = self.ts.reindex(nonContigIndex)
for idx, val in compat.iteritems(subNonContig):
assert val == self.ts[idx]
# return a copy the same index here
result = self.ts.reindex()
assert not (result is self.ts)
def test_reindex_nan(self):
ts = Series([2, 3, 5, 7], index=[1, 4, nan, 8])
i, j = [nan, 1, nan, 8, 4, nan], [2, 0, 2, 3, 1, 2]
assert_series_equal(ts.reindex(i), ts.iloc[j])
ts.index = ts.index.astype('object')
# reindex coerces index.dtype to float, loc/iloc doesn't
assert_series_equal(ts.reindex(i), ts.iloc[j], check_index_type=False)
def test_reindex_series_add_nat(self):
rng = date_range('1/1/2000 00:00:00', periods=10, freq='10s')
series = Series(rng)
result = series.reindex(lrange(15))
assert np.issubdtype(result.dtype, np.dtype('M8[ns]'))
mask = result.isnull()
assert mask[-5:].all()
assert not mask[:-5].any()
def test_reindex_with_datetimes(self):
rng = date_range('1/1/2000', periods=20)
ts = Series(np.random.randn(20), index=rng)
result = ts.reindex(list(ts.index[5:10]))
expected = ts[5:10]
tm.assert_series_equal(result, expected)
result = ts[list(ts.index[5:10])]
tm.assert_series_equal(result, expected)
def test_reindex_corner(self):
# (don't forget to fix this) I think it's fixed
self.empty.reindex(self.ts.index, method='pad') # it works
# corner case: pad empty series
reindexed = self.empty.reindex(self.ts.index, method='pad')
# pass non-Index
reindexed = self.ts.reindex(list(self.ts.index))
assert_series_equal(self.ts, reindexed)
# bad fill method
ts = self.ts[::2]
pytest.raises(Exception, ts.reindex, self.ts.index, method='foo')
def test_reindex_pad(self):
s = Series(np.arange(10), dtype='int64')
s2 = s[::2]
reindexed = s2.reindex(s.index, method='pad')
reindexed2 = s2.reindex(s.index, method='ffill')
assert_series_equal(reindexed, reindexed2)
expected = Series([0, 0, 2, 2, 4, 4, 6, 6, 8, 8], index=np.arange(10))
assert_series_equal(reindexed, expected)
# GH4604
s = Series([1, 2, 3, 4, 5], index=['a', 'b', 'c', 'd', 'e'])
new_index = ['a', 'g', 'c', 'f']
expected = Series([1, 1, 3, 3], index=new_index)
# this changes dtype because the ffill happens after
result = s.reindex(new_index).ffill()
assert_series_equal(result, expected.astype('float64'))
result = s.reindex(new_index).ffill(downcast='infer')
assert_series_equal(result, expected)
expected = Series([1, 5, 3, 5], index=new_index)
result = s.reindex(new_index, method='ffill')
assert_series_equal(result, expected)
# inferrence of new dtype
s = Series([True, False, False, True], index=list('abcd'))
new_index = 'agc'
result = s.reindex(list(new_index)).ffill()
expected = Series([True, True, False], index=list(new_index))
assert_series_equal(result, expected)
# GH4618 shifted series downcasting
s = Series(False, index=lrange(0, 5))
result = s.shift(1).fillna(method='bfill')
expected = Series(False, index=lrange(0, 5))
assert_series_equal(result, expected)
def test_reindex_nearest(self):
s = Series(np.arange(10, dtype='int64'))
target = [0.1, 0.9, 1.5, 2.0]
actual = s.reindex(target, method='nearest')
expected = Series(np.around(target).astype('int64'), target)
assert_series_equal(expected, actual)
actual = s.reindex_like(actual, method='nearest')
assert_series_equal(expected, actual)
actual = s.reindex_like(actual, method='nearest', tolerance=1)
assert_series_equal(expected, actual)
actual = s.reindex(target, method='nearest', tolerance=0.2)
expected = Series([0, 1, np.nan, 2], target)
assert_series_equal(expected, actual)
def test_reindex_backfill(self):
pass
def test_reindex_int(self):
ts = self.ts[::2]
int_ts = Series(np.zeros(len(ts), dtype=int), index=ts.index)
# this should work fine
reindexed_int = int_ts.reindex(self.ts.index)
# if NaNs introduced
assert reindexed_int.dtype == np.float_
# NO NaNs introduced
reindexed_int = int_ts.reindex(int_ts.index[::2])
assert reindexed_int.dtype == np.int_
def test_reindex_bool(self):
# A series other than float, int, string, or object
ts = self.ts[::2]
bool_ts = Series(np.zeros(len(ts), dtype=bool), index=ts.index)
# this should work fine
reindexed_bool = bool_ts.reindex(self.ts.index)
# if NaNs introduced
assert reindexed_bool.dtype == np.object_
# NO NaNs introduced
reindexed_bool = bool_ts.reindex(bool_ts.index[::2])
assert reindexed_bool.dtype == np.bool_
def test_reindex_bool_pad(self):
# fail
ts = self.ts[5:]
bool_ts = Series(np.zeros(len(ts), dtype=bool), index=ts.index)
filled_bool = bool_ts.reindex(self.ts.index, method='pad')
assert isnull(filled_bool[:5]).all()
def test_reindex_like(self):
other = self.ts[::2]
assert_series_equal(self.ts.reindex(other.index),
self.ts.reindex_like(other))
# GH 7179
day1 = datetime(2013, 3, 5)
day2 = datetime(2013, 5, 5)
day3 = datetime(2014, 3, 5)
series1 = Series([5, None, None], [day1, day2, day3])
series2 = Series([None, None], [day1, day3])
result = series1.reindex_like(series2, method='pad')
expected = Series([5, np.nan], index=[day1, day3])
assert_series_equal(result, expected)
def test_reindex_fill_value(self):
# -----------------------------------------------------------
# floats
floats = Series([1., 2., 3.])
result = floats.reindex([1, 2, 3])
expected = Series([2., 3., np.nan], index=[1, 2, 3])
assert_series_equal(result, expected)
result = floats.reindex([1, 2, 3], fill_value=0)
expected = Series([2., 3., 0], index=[1, 2, 3])
assert_series_equal(result, expected)
# -----------------------------------------------------------
# ints
ints = Series([1, 2, 3])
result = ints.reindex([1, 2, 3])
expected = Series([2., 3., np.nan], index=[1, 2, 3])
assert_series_equal(result, expected)
# don't upcast
result = ints.reindex([1, 2, 3], fill_value=0)
expected = Series([2, 3, 0], index=[1, 2, 3])
assert issubclass(result.dtype.type, np.integer)
assert_series_equal(result, expected)
# -----------------------------------------------------------
# objects
objects = Series([1, 2, 3], dtype=object)
result = objects.reindex([1, 2, 3])
expected = Series([2, 3, np.nan], index=[1, 2, 3], dtype=object)
assert_series_equal(result, expected)
result = objects.reindex([1, 2, 3], fill_value='foo')
expected = Series([2, 3, 'foo'], index=[1, 2, 3], dtype=object)
assert_series_equal(result, expected)
# ------------------------------------------------------------
# bools
bools = Series([True, False, True])
result = bools.reindex([1, 2, 3])
expected = Series([False, True, np.nan], index=[1, 2, 3], dtype=object)
assert_series_equal(result, expected)
result = bools.reindex([1, 2, 3], fill_value=False)
expected = Series([False, True, False], index=[1, 2, 3])
assert_series_equal(result, expected)
def test_select(self):
n = len(self.ts)
result = self.ts.select(lambda x: x >= self.ts.index[n // 2])
expected = self.ts.reindex(self.ts.index[n // 2:])
assert_series_equal(result, expected)
result = self.ts.select(lambda x: x.weekday() == 2)
expected = self.ts[self.ts.index.weekday == 2]
assert_series_equal(result, expected)
def test_cast_on_putmask(self):
# GH 2746
# need to upcast
s = Series([1, 2], index=[1, 2], dtype='int64')
s[[True, False]] = Series([0], index=[1], dtype='int64')
expected = Series([0, 2], index=[1, 2], dtype='int64')
assert_series_equal(s, expected)
def test_type_promote_putmask(self):
# GH8387: test that changing types does not break alignment
ts = Series(np.random.randn(100), index=np.arange(100, 0, -1)).round(5)
left, mask = ts.copy(), ts > 0
right = ts[mask].copy().map(str)
left[mask] = right
assert_series_equal(left, ts.map(lambda t: str(t) if t > 0 else t))
s = Series([0, 1, 2, 0])
mask = s > 0
s2 = s[mask].map(str)
s[mask] = s2
assert_series_equal(s, Series([0, '1', '2', 0]))
s = Series([0, 'foo', 'bar', 0])
mask = Series([False, True, True, False])
s2 = s[mask]
s[mask] = s2
assert_series_equal(s, Series([0, 'foo', 'bar', 0]))
def test_head_tail(self):
assert_series_equal(self.series.head(), self.series[:5])
assert_series_equal(self.series.head(0), self.series[0:0])
assert_series_equal(self.series.tail(), self.series[-5:])
assert_series_equal(self.series.tail(0), self.series[0:0])
def test_multilevel_preserve_name(self):
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'], ['one', 'two',
'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['first', 'second'])
s = Series(np.random.randn(len(index)), index=index, name='sth')
result = s['foo']
result2 = s.loc['foo']
assert result.name == s.name
assert result2.name == s.name
def test_setitem_scalar_into_readonly_backing_data(self):
# GH14359: test that you cannot mutate a read only buffer
array = np.zeros(5)
array.flags.writeable = False # make the array immutable
series = Series(array)
for n in range(len(series)):
with pytest.raises(ValueError):
series[n] = 1
assert array[n] == 0
def test_setitem_slice_into_readonly_backing_data(self):
# GH14359: test that you cannot mutate a read only buffer
array = np.zeros(5)
array.flags.writeable = False # make the array immutable
series = Series(array)
with pytest.raises(ValueError):
series[1:3] = 1
assert not array.any()
class TestTimeSeriesDuplicates(object):
def setup_method(self, method):
dates = [datetime(2000, 1, 2), datetime(2000, 1, 2),
datetime(2000, 1, 2), datetime(2000, 1, 3),
datetime(2000, 1, 3), datetime(2000, 1, 3),
datetime(2000, 1, 4), datetime(2000, 1, 4),
datetime(2000, 1, 4), datetime(2000, 1, 5)]
self.dups = Series(np.random.randn(len(dates)), index=dates)
def test_constructor(self):
assert isinstance(self.dups, Series)
assert isinstance(self.dups.index, DatetimeIndex)
def test_is_unique_monotonic(self):
assert not self.dups.index.is_unique
def test_index_unique(self):
uniques = self.dups.index.unique()
expected = DatetimeIndex([datetime(2000, 1, 2), datetime(2000, 1, 3),
datetime(2000, 1, 4), datetime(2000, 1, 5)])
assert uniques.dtype == 'M8[ns]' # sanity
tm.assert_index_equal(uniques, expected)
assert self.dups.index.nunique() == 4
# #2563
assert isinstance(uniques, DatetimeIndex)
dups_local = self.dups.index.tz_localize('US/Eastern')
dups_local.name = 'foo'
result = dups_local.unique()
expected = DatetimeIndex(expected, name='foo')
expected = expected.tz_localize('US/Eastern')
assert result.tz is not None
assert result.name == 'foo'
tm.assert_index_equal(result, expected)
# NaT, note this is excluded
arr = [1370745748 + t for t in range(20)] + [tslib.iNaT]
idx = DatetimeIndex(arr * 3)
tm.assert_index_equal(idx.unique(), DatetimeIndex(arr))
assert idx.nunique() == 20
assert idx.nunique(dropna=False) == 21
arr = [Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t)
for t in range(20)] + [NaT]
idx = DatetimeIndex(arr * 3)
tm.assert_index_equal(idx.unique(), DatetimeIndex(arr))
assert idx.nunique() == 20
assert idx.nunique(dropna=False) == 21
def test_index_dupes_contains(self):
d = datetime(2011, 12, 5, 20, 30)
ix = DatetimeIndex([d, d])
assert d in ix
def test_duplicate_dates_indexing(self):
ts = self.dups
uniques = ts.index.unique()
for date in uniques:
result = ts[date]
mask = ts.index == date
total = (ts.index == date).sum()
expected = ts[mask]
if total > 1:
assert_series_equal(result, expected)
else:
assert_almost_equal(result, expected[0])
cp = ts.copy()
cp[date] = 0
expected = Series(np.where(mask, 0, ts), index=ts.index)
assert_series_equal(cp, expected)
pytest.raises(KeyError, ts.__getitem__, datetime(2000, 1, 6))
# new index
ts[datetime(2000, 1, 6)] = 0
assert ts[datetime(2000, 1, 6)] == 0
def test_range_slice(self):
idx = DatetimeIndex(['1/1/2000', '1/2/2000', '1/2/2000', '1/3/2000',
'1/4/2000'])
ts = Series(np.random.randn(len(idx)), index=idx)
result = ts['1/2/2000':]
expected = ts[1:]
assert_series_equal(result, expected)
result = ts['1/2/2000':'1/3/2000']
expected = ts[1:4]
assert_series_equal(result, expected)
def test_groupby_average_dup_values(self):
result = self.dups.groupby(level=0).mean()
expected = self.dups.groupby(self.dups.index).mean()
assert_series_equal(result, expected)
def test_indexing_over_size_cutoff(self):
import datetime
# #1821
old_cutoff = _index._SIZE_CUTOFF
try:
_index._SIZE_CUTOFF = 1000
# create large list of non periodic datetime
dates = []
sec = datetime.timedelta(seconds=1)
half_sec = datetime.timedelta(microseconds=500000)
d = datetime.datetime(2011, 12, 5, 20, 30)
n = 1100
for i in range(n):
dates.append(d)
dates.append(d + sec)
dates.append(d + sec + half_sec)
dates.append(d + sec + sec + half_sec)
d += 3 * sec
# duplicate some values in the list
duplicate_positions = np.random.randint(0, len(dates) - 1, 20)
for p in duplicate_positions:
dates[p + 1] = dates[p]
df = DataFrame(np.random.randn(len(dates), 4),
index=dates,
columns=list('ABCD'))
pos = n * 3
timestamp = df.index[pos]
assert timestamp in df.index
# it works!
df.loc[timestamp]
assert len(df.loc[[timestamp]]) > 0
finally:
_index._SIZE_CUTOFF = old_cutoff
def test_indexing_unordered(self):
# GH 2437
rng = date_range(start='2011-01-01', end='2011-01-15')
ts = Series(np.random.rand(len(rng)), index=rng)
ts2 = pd.concat([ts[0:4], ts[-4:], ts[4:-4]])
for t in ts.index:
# TODO: unused?
s = str(t) # noqa
expected = ts[t]
result = ts2[t]
assert expected == result
# GH 3448 (ranges)
def compare(slobj):
result = ts2[slobj].copy()
result = result.sort_index()
expected = ts[slobj]
assert_series_equal(result, expected)
compare(slice('2011-01-01', '2011-01-15'))
compare(slice('2010-12-30', '2011-01-15'))
compare(slice('2011-01-01', '2011-01-16'))
# partial ranges
compare(slice('2011-01-01', '2011-01-6'))
compare(slice('2011-01-06', '2011-01-8'))
compare(slice('2011-01-06', '2011-01-12'))
# single values
result = ts2['2011'].sort_index()
expected = ts['2011']
assert_series_equal(result, expected)
# diff freq
rng = date_range(datetime(2005, 1, 1), periods=20, freq='M')
ts = Series(np.arange(len(rng)), index=rng)
ts = ts.take(np.random.permutation(20))
result = ts['2005']
for t in result.index:
assert t.year == 2005
def test_indexing(self):
idx = date_range("2001-1-1", periods=20, freq='M')
ts = Series(np.random.rand(len(idx)), index=idx)
# getting
# GH 3070, make sure semantics work on Series/Frame
expected = ts['2001']
expected.name = 'A'
df = DataFrame(dict(A=ts))
result = df['2001']['A']
assert_series_equal(expected, result)
# setting
ts['2001'] = 1
expected = ts['2001']
expected.name = 'A'
df.loc['2001', 'A'] = 1
result = df['2001']['A']
assert_series_equal(expected, result)
# GH3546 (not including times on the last day)
idx = date_range(start='2013-05-31 00:00', end='2013-05-31 23:00',
freq='H')
ts = Series(lrange(len(idx)), index=idx)
expected = ts['2013-05']
assert_series_equal(expected, ts)
idx = date_range(start='2013-05-31 00:00', end='2013-05-31 23:59',
freq='S')
ts = Series(lrange(len(idx)), index=idx)
expected = ts['2013-05']
assert_series_equal(expected, ts)
idx = [Timestamp('2013-05-31 00:00'),
Timestamp(datetime(2013, 5, 31, 23, 59, 59, 999999))]
ts = Series(lrange(len(idx)), index=idx)
expected = ts['2013']
assert_series_equal(expected, ts)
# GH14826, indexing with a seconds resolution string / datetime object
df = DataFrame(np.random.rand(5, 5),
columns=['open', 'high', 'low', 'close', 'volume'],
index=date_range('2012-01-02 18:01:00',
periods=5, tz='US/Central', freq='s'))
expected = df.loc[[df.index[2]]]
# this is a single date, so will raise
pytest.raises(KeyError, df.__getitem__, '2012-01-02 18:01:02', )
pytest.raises(KeyError, df.__getitem__, df.index[2], )
class TestDatetimeIndexing(object):
"""
Also test support for datetime64[ns] in Series / DataFrame
"""
def setup_method(self, method):
dti = DatetimeIndex(start=datetime(2005, 1, 1),
end=datetime(2005, 1, 10), freq='Min')
self.series = Series(np.random.rand(len(dti)), dti)
def test_fancy_getitem(self):
dti = DatetimeIndex(freq='WOM-1FRI', start=datetime(2005, 1, 1),
end=datetime(2010, 1, 1))
s = Series(np.arange(len(dti)), index=dti)
assert s[48] == 48
assert s['1/2/2009'] == 48
assert s['2009-1-2'] == 48
assert s[datetime(2009, 1, 2)] == 48
assert s[lib.Timestamp(datetime(2009, 1, 2))] == 48
pytest.raises(KeyError, s.__getitem__, '2009-1-3')
assert_series_equal(s['3/6/2009':'2009-06-05'],
s[datetime(2009, 3, 6):datetime(2009, 6, 5)])
def test_fancy_setitem(self):
dti = DatetimeIndex(freq='WOM-1FRI', start=datetime(2005, 1, 1),
end=datetime(2010, 1, 1))
s = Series(np.arange(len(dti)), index=dti)
s[48] = -1
assert s[48] == -1
s['1/2/2009'] = -2
assert s[48] == -2
s['1/2/2009':'2009-06-05'] = -3
assert (s[48:54] == -3).all()
def test_dti_snap(self):
dti = DatetimeIndex(['1/1/2002', '1/2/2002', '1/3/2002', '1/4/2002',
'1/5/2002', '1/6/2002', '1/7/2002'], freq='D')
res = dti.snap(freq='W-MON')
exp = date_range('12/31/2001', '1/7/2002', freq='w-mon')
exp = exp.repeat([3, 4])
assert (res == exp).all()
res = dti.snap(freq='B')
exp = date_range('1/1/2002', '1/7/2002', freq='b')
exp = exp.repeat([1, 1, 1, 2, 2])
assert (res == exp).all()
def test_dti_reset_index_round_trip(self):
dti = DatetimeIndex(start='1/1/2001', end='6/1/2001', freq='D')
d1 = DataFrame({'v': np.random.rand(len(dti))}, index=dti)
d2 = d1.reset_index()
assert d2.dtypes[0] == np.dtype('M8[ns]')
d3 = d2.set_index('index')
assert_frame_equal(d1, d3, check_names=False)
# #2329
stamp = datetime(2012, 11, 22)
df = DataFrame([[stamp, 12.1]], columns=['Date', 'Value'])
df = df.set_index('Date')
assert df.index[0] == stamp
assert df.reset_index()['Date'][0] == stamp
def test_series_set_value(self):
# #1561
dates = [datetime(2001, 1, 1), datetime(2001, 1, 2)]
index = DatetimeIndex(dates)
s = Series().set_value(dates[0], 1.)
s2 = s.set_value(dates[1], np.nan)
exp = Series([1., np.nan], index=index)
assert_series_equal(s2, exp)
# s = Series(index[:1], index[:1])
# s2 = s.set_value(dates[1], index[1])
# assert s2.values.dtype == 'M8[ns]'
@slow
def test_slice_locs_indexerror(self):
times = [datetime(2000, 1, 1) + timedelta(minutes=i * 10)
for i in range(100000)]
s = Series(lrange(100000), times)
s.loc[datetime(1900, 1, 1):datetime(2100, 1, 1)]
def test_slicing_datetimes(self):
# GH 7523
# unique
df = DataFrame(np.arange(4., dtype='float64'),
index=[datetime(2001, 1, i, 10, 00)
for i in [1, 2, 3, 4]])
result = df.loc[datetime(2001, 1, 1, 10):]
assert_frame_equal(result, df)
result = df.loc[:datetime(2001, 1, 4, 10)]
assert_frame_equal(result, df)
result = df.loc[datetime(2001, 1, 1, 10):datetime(2001, 1, 4, 10)]
assert_frame_equal(result, df)
result = df.loc[datetime(2001, 1, 1, 11):]
expected = df.iloc[1:]
assert_frame_equal(result, expected)
result = df.loc['20010101 11':]
assert_frame_equal(result, expected)
# duplicates
df = pd.DataFrame(np.arange(5., dtype='float64'),
index=[datetime(2001, 1, i, 10, 00)
for i in [1, 2, 2, 3, 4]])
result = df.loc[datetime(2001, 1, 1, 10):]
assert_frame_equal(result, df)
result = df.loc[:datetime(2001, 1, 4, 10)]
assert_frame_equal(result, df)
result = df.loc[datetime(2001, 1, 1, 10):datetime(2001, 1, 4, 10)]
assert_frame_equal(result, df)
result = df.loc[datetime(2001, 1, 1, 11):]
expected = df.iloc[1:]
assert_frame_equal(result, expected)
result = df.loc['20010101 11':]
assert_frame_equal(result, expected)
def test_frame_datetime64_duplicated(self):
dates = date_range('2010-07-01', end='2010-08-05')
tst = DataFrame({'symbol': 'AAA', 'date': dates})
result = tst.duplicated(['date', 'symbol'])
assert (-result).all()
tst = DataFrame({'date': dates})
result = tst.duplicated()
assert (-result).all()
class TestNatIndexing(object):
def setup_method(self, method):
self.series = Series(date_range('1/1/2000', periods=10))
# ---------------------------------------------------------------------
# NaT support
def test_set_none_nan(self):
self.series[3] = None
assert self.series[3] is NaT
self.series[3:5] = None
assert self.series[4] is NaT
self.series[5] = np.nan
assert self.series[5] is NaT
self.series[5:7] = np.nan
assert self.series[6] is NaT
def test_nat_operations(self):
# GH 8617
s = Series([0, pd.NaT], dtype='m8[ns]')
exp = s[0]
assert s.median() == exp
assert s.min() == exp
assert s.max() == exp
def test_round_nat(self):
# GH14940
s = Series([pd.NaT])
expected = Series(pd.NaT)
for method in ["round", "floor", "ceil"]:
round_method = getattr(s.dt, method)
for freq in ["s", "5s", "min", "5min", "h", "5h"]:
assert_series_equal(round_method(freq), expected)
| bsd-3-clause | -3,876,241,848,561,414,700 | 31.762737 | 79 | 0.525852 | false |
googlefonts/noto-emoji | materialize_emoji_images.py | 1 | 4076 | #!/usr/bin/env python3
#
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a copy of the emoji images that instantiates aliases, etc. as
symlinks."""
from __future__ import print_function
import argparse
import glob
import os
from os import path
import re
import shutil
from nototools import tool_utils
# copied from third_party/color_emoji/add_glyphs.py
EXTRA_SEQUENCES = {
'u1F46A': '1F468_200D_1F469_200D_1F466', # MWB
'u1F491': '1F469_200D_2764_FE0F_200D_1F468', # WHM
'u1F48F': '1F469_200D_2764_FE0F_200D_1F48B_200D_1F468', # WHKM
}
# Flag aliases - from: to
FLAG_ALIASES = {
'BV': 'NO',
'CP': 'FR',
'HM': 'AU',
'SJ': 'NO',
'UM': 'US',
}
OMITTED_FLAGS = set(
'BL BQ DG EA EH FK GF GP GS MF MQ NC PM RE TF WF XK YT'.split())
def _flag_str(ris_pair):
return '_'.join('%04x' % (ord(cp) - ord('A') + 0x1f1e6)
for cp in ris_pair)
def _copy_files(src, dst):
"""Copies files named 'emoji_u*.png' from dst to src, and return a set of
the names with 'emoji_u' and the extension stripped."""
code_strings = set()
tool_utils.check_dir_exists(src)
dst = tool_utils.ensure_dir_exists(dst, clean=True)
for f in glob.glob(path.join(src, 'emoji_u*.png')):
shutil.copy(f, dst)
code_strings.add(path.splitext(path.basename(f))[0][7:])
return code_strings
def _alias_people(code_strings, dst):
"""Create aliases for people in dst, based on code_strings."""
for src, ali in sorted(EXTRA_SEQUENCES.items()):
if src[1:].lower() in code_strings:
src_name = 'emoji_%s.png' % src.lower()
ali_name = 'emoji_u%s.png' % ali.lower()
print('creating symlink %s -> %s' % (ali_name, src_name))
os.symlink(path.join(dst, src_name), path.join(dst, ali_name))
else:
print('people image %s not found' % src, file=os.stderr)
def _alias_flags(code_strings, dst):
for ali, src in sorted(FLAG_ALIASES.items()):
src_str = _flag_str(src)
if src_str in code_strings:
src_name = 'emoji_u%s.png' % src_str
ali_name = 'emoji_u%s.png' % _flag_str(ali)
print('creating symlink %s (%s) -> %s (%s)' % (ali_name, ali, src_name, src))
os.symlink(path.join(dst, src_name), path.join(dst, ali_name))
else:
print('flag image %s (%s) not found' % (src_name, src), file=os.stderr)
def _alias_omitted_flags(code_strings, dst):
UNKNOWN_FLAG = 'fe82b'
if UNKNOWN_FLAG not in code_strings:
print('unknown flag missing', file=os.stderr)
return
dst_name = 'emoji_u%s.png' % UNKNOWN_FLAG
dst_path = path.join(dst, dst_name)
for ali in sorted(OMITTED_FLAGS):
ali_str = _flag_str(ali)
if ali_str in code_strings:
print('omitted flag %s has image %s' % (ali, ali_str), file=os.stderr)
continue
ali_name = 'emoji_u%s.png' % ali_str
print('creating symlink %s (%s) -> unknown_flag (%s)' % (
ali_str, ali, dst_name))
os.symlink(dst_path, path.join(dst, ali_name))
def materialize_images(src, dst):
code_strings = _copy_files(src, dst)
_alias_people(code_strings, dst)
_alias_flags(code_strings, dst)
_alias_omitted_flags(code_strings, dst)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-s', '--srcdir', help='path to input sources', metavar='dir',
default = 'build/compressed_pngs')
parser.add_argument(
'-d', '--dstdir', help='destination for output images', metavar='dir')
args = parser.parse_args()
materialize_images(args.srcdir, args.dstdir)
if __name__ == '__main__':
main()
| apache-2.0 | 7,824,723,636,050,095,000 | 31.349206 | 83 | 0.652601 | false |
esacosta/u-mooc | common/schema_fields.py | 1 | 5958 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapping from schema to backend properties."""
__author__ = 'Abhinav Khandelwal ([email protected])'
import collections
import json
from models.property import Property
from models.property import Registry
class SchemaField(Property):
"""SchemaField defines a solo field in REST API."""
def get_json_schema(self):
"""Get the JSCON schema for this field."""
prop = {}
prop['type'] = self._property_type
if self._optional:
prop['optional'] = self._optional
if self._description:
prop['description'] = self._description
return prop
def get_schema_dict_entry(self):
"""Get Schema annotation dictionary for this field."""
if self._extra_schema_dict_values:
schema = self._extra_schema_dict_values
else:
schema = {}
schema['label'] = self._label
schema['_type'] = self._property_type
if 'date' is self._property_type:
schema['dateFormat'] = 'Y/m/d'
schema['valueFormat'] = 'Y/m/d'
elif 'select' is self._property_type:
choices = []
for value, label in self._select_data:
choices.append({'value': value, 'label': label})
schema['choices'] = choices
if self._description:
schema['description'] = self._description
return schema
class FieldRegistry(Registry):
"""FieldRegistry is a collection of SchemaField's for an API."""
def add_sub_registry(
self, name, title=None, description=None, registry=None):
"""Add a sub registry to for this Registry."""
if not registry:
registry = FieldRegistry(title, description)
self._sub_registories[name] = registry
return registry
def get_json_schema_dict(self):
schema_dict = dict(self._registry)
schema_dict['properties'] = collections.OrderedDict()
for schema_field in self._properties:
schema_dict['properties'][schema_field.name] = (
schema_field.get_json_schema())
for key in self._sub_registories.keys():
schema_dict['properties'][key] = (
self._sub_registories[key].get_json_schema_dict())
return schema_dict
def get_json_schema(self):
"""Get the json schema for this API."""
return json.dumps(self.get_json_schema_dict())
def _get_schema_dict(self, prefix_key):
"""Get schema dict for this API."""
title_key = list(prefix_key)
title_key.append('title')
schema_dict = [(title_key, self._title)]
base_key = list(prefix_key)
base_key.append('properties')
for schema_field in self._properties:
field_key = list(base_key)
field_key.append(schema_field.name)
field_key.append('_inputex')
filed_tuple = field_key, schema_field.get_schema_dict_entry()
schema_dict.append(filed_tuple)
for key in self._sub_registories.keys():
sub_registry_key_prefix = list(base_key)
sub_registry_key_prefix.append(key)
sub_registry = self._sub_registories[key]
# pylint: disable-msg=protected-access
for entry in sub_registry._get_schema_dict(sub_registry_key_prefix):
schema_dict.append(entry)
# pylint: enable-msg=protected-access
return schema_dict
def get_schema_dict(self):
"""Get schema dict for this API."""
return self._get_schema_dict(list())
def _add_entry(self, key_part_list, value, entity):
if len(key_part_list) == 1:
entity[key_part_list[0]] = value
return
key = key_part_list.pop()
if not entity.has_key(key):
entity[key] = {}
else:
assert type(entity[key]) == type(dict())
self._add_entry(key_part_list, value, entity[key])
def convert_json_to_entity(self, json_entry, entity):
assert type(json_entry) == type(dict())
for key in json_entry.keys():
if type(json_entry[key]) == type(dict()):
self.convert_json_to_entity(json_entry[key], entity)
else:
key_parts = key.split(':')
key_parts.reverse()
self._add_entry(key_parts, json_entry[key], entity)
def _get_field_value(self, key_part_list, entity):
if len(key_part_list) == 1:
if entity.has_key(key_part_list[0]):
return entity[key_part_list[0]]
return None
key = key_part_list.pop()
if entity.has_key(key):
return self._get_field_value(key_part_list, entity[key])
return None
def convert_entity_to_json_entity(self, entity, json_entry):
for schema_field in self._properties:
field_name = schema_field.name
field_name_parts = field_name.split(':')
field_name_parts.reverse()
value = self._get_field_value(field_name_parts, entity)
if type(value) != type(None):
json_entry[field_name] = value
for key in self._sub_registories.keys():
json_entry[key] = {}
self._sub_registories[key].convert_entity_to_json_entity(
entity, json_entry[key])
| apache-2.0 | -4,220,608,419,550,851,600 | 36.2375 | 80 | 0.596173 | false |
fedora-infra/the-new-hotness | hotness/exceptions/http_exception.py | 1 | 1465 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
class HTTPException(Exception):
"""
Class representing HTTP exception. This exception should be returned by any wrapper receiving
HTTP response when the error code is not 200.
Attributes:
error_code: Error code of the response
message: Error message.
"""
def __init__(self, error_code: int, message: str):
"""
Class constructor.
"""
self.error_code = error_code
self.message = message
super(HTTPException, self).__init__(self.message)
def __str__(self):
"""
String representation of error.
"""
return f"Error code: {self.error_code}\n" f"Error message: {self.message}"
| lgpl-2.1 | -8,487,426,483,306,750,000 | 35.625 | 97 | 0.679863 | false |
LCBRU/reporter | reporter/uhl_reports/fast/data_quality/screening_clinic_redcap_dq.py | 1 | 3307 | #!/usr/bin/env python3
from reporter.core import Schedule
from reporter.connections import RedcapInstance
from reporter.emailing import (
RECIPIENT_FAST_MANAGER,
RECIPIENT_FAST_ADMIN,
)
from reporter.application_abstract_reports.redcap.data_quality import (
RedcapMissingDataWhen,
RedcapMissingAllWhen,
RedcapInvalidNhsNumber,
RedcapImpliesCheck,
RedcapInvalidEmailAddress,
RedcapInvalidDate,
RedcapInvalidHeightInCm,
RedcapInvalidHeightInFeetAndInches,
RedcapInvalidWeightInKg,
RedcapInvalidWeightInStonesAndPounds,
RedcapInvalidPostCode,
)
REDCAP_SCREENING_PROJECT_ID = 48
REDCAP_INSTANCE = RedcapInstance.internal
class FastRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['email_add'],
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastScreeningRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastScreeningRedcapInvalidNhsNumber(
RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['nhs_no'],
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['postcode'],
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastRedcapMissingDataWhenRecruited(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=[
'first_name',
'last_name',
'postcode',
'gp_practice',
'clinic_date',
'invitation_group',
'patient_attend',
'patient_agree_scan',
],
indicator_field='patient_recruited',
indicator_value='1',
recipients=[RECIPIENT_FAST_MANAGER, RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastRedcapMissingAddressWhenRecruited(RedcapMissingAllWhen):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['add_1', 'add_2', 'add_3', 'add_4'],
indicator_field='patient_recruited',
indicator_value='1',
recipients=[RECIPIENT_FAST_MANAGER, RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
| mit | 8,472,242,262,794,943,000 | 29.198113 | 71 | 0.589054 | false |
dholbach/snapcraft | snapcraft/_options.py | 1 | 4962 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import multiprocessing
import os
import platform
logger = logging.getLogger(__name__)
_ARCH_TRANSLATIONS = {
'armv7l': {
'kernel': 'arm',
'deb': 'armhf',
'cross-compiler-prefix': 'arm-linux-gnueabihf-',
'cross-build-packages': ['gcc-arm-linux-gnueabihf'],
'triplet': 'arm-linux-gnueabihf',
},
'aarch64': {
'kernel': 'arm64',
'deb': 'arm64',
'cross-compiler-prefix': 'aarch64-linux-gnu-',
'cross-build-packages': ['gcc-aarch64-linux-gnu'],
'triplet': 'aarch64-linux-gnu',
},
'i686': {
'kernel': 'x86',
'deb': 'i386',
'triplet': 'i386-linux-gnu',
},
'ppc64le': {
'kernel': 'powerpc',
'deb': 'ppc64el',
'cross-compiler-prefix': 'powerpc64le-linux-gnu-',
'cross-build-packages': ['gcc-powerpc64le-linux-gnu'],
'triplet': 'powerpc64le-linux-gnu',
},
'x86_64': {
'kernel': 'x86',
'deb': 'amd64',
'triplet': 'x86_64-linux-gnu',
},
's390x': {
'kernel': 's390x',
'deb': 's390x',
'cross-compiler-prefix': 's390x-linux-gnu-',
'cross-build-packages': ['gcc-s390x-linux-gnu'],
'triplet': 's390x-linux-gnu',
}
}
class ProjectOptions:
@property
def use_geoip(self):
return self.__use_geoip
@property
def parallel_builds(self):
return self.__parallel_builds
@property
def parallel_build_count(self):
build_count = 1
if self.__parallel_builds:
try:
build_count = multiprocessing.cpu_count()
except NotImplementedError:
logger.warning(
'Unable to determine CPU count; disabling parallel builds')
return build_count
@property
def is_cross_compiling(self):
return self.__target_machine != self.__host_machine
@property
def cross_compiler_prefix(self):
try:
return self.__machine_info['cross-compiler-prefix']
except KeyError:
raise EnvironmentError(
'Cross compilation not support for target arch {!}'.format(
self.__machine_target))
@property
def additional_build_packages(self):
packages = []
if self.is_cross_compiling:
packages.extend(self.__machine_info.get(
'cross-build-packages', []))
return packages
@property
def arch_triplet(self):
return self.__machine_info['triplet']
@property
def deb_arch(self):
return self.__machine_info['deb']
@property
def kernel_arch(self):
return self.__machine_info['kernel']
@property
def local_plugins_dir(self):
return os.path.join(self.parts_dir, 'plugins')
@property
def parts_dir(self):
return os.path.join(self.__project_dir, 'parts')
@property
def stage_dir(self):
return os.path.join(self.__project_dir, 'stage')
@property
def snap_dir(self):
return os.path.join(self.__project_dir, 'prime')
@property
def debug(self):
return self.__debug
def __init__(self, use_geoip=False, parallel_builds=True,
target_deb_arch=None, debug=False):
# TODO: allow setting a different project dir and check for
# snapcraft.yaml
self.__project_dir = os.getcwd()
self.__use_geoip = use_geoip
self.__parallel_builds = parallel_builds
self._set_machine(target_deb_arch)
self.__debug = debug
def _set_machine(self, target_deb_arch):
self.__host_machine = platform.machine()
if not target_deb_arch:
self.__target_machine = self.__host_machine
else:
self.__target_machine = _find_machine(target_deb_arch)
logger.info('Setting target machine to {!r}'.format(
target_deb_arch))
self.__machine_info = _ARCH_TRANSLATIONS[self.__target_machine]
def _find_machine(deb_arch):
for machine in _ARCH_TRANSLATIONS:
if _ARCH_TRANSLATIONS[machine].get('deb', '') == deb_arch:
return machine
raise EnvironmentError(
'Cannot set machine from deb_arch {!r}'.format(deb_arch))
| gpl-3.0 | 740,297,699,160,747,900 | 28.188235 | 79 | 0.592301 | false |
ekaakurniawan/Bioinformatics-Tools | DnC_LocalAlignment/DnC_LocalAlignment.py | 1 | 6963 | # Copyright (C) 2012 by Eka A. Kurniawan
# eka.a.kurniawan(ta)gmail(tod)com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Local Alignment in Linear Space - Divide and Conquer
# References:
# - Neil C. Jones, Pavel A. Pevzner. An Introduction to Bioinformatics Algorithms. Cambridge: The MIT Press, 2004.
import copy
# Seq1 = "CACCC"
# Seq2 = "CATC"
# Seq1 = "CAC"
# Seq2 = "CATC"
# Seq1 = "CTTGAT"
# Seq2 = "GCAT"
# Seq1 = "TCAAATCAACCAAGATGGAAGCAAAACTGTTTGTAC"
# Seq2 = "ATGAAGGCAATACTATTAGTCTTGCTATATACATTC"
# Seq1 = "MEAKLFVLFCTFTVLKADTICVGYHANNSTDTVDTVLEKNVTVTHSVNLLEDSHNGKLCSLNGIAPLQLGKCNVAGWLLGNPECDLLLTANSWSYIIETSNSENGTCYPGEFIDYEELREQLSSVSSFEKFEIFPKANSWPNHETTKGVTAACSYSGASSFYRNLLWITKKGTSYPKLSKSYTNNKGKEVLVLWGVHHPPTTSEQQSLYQNTDAYVSVGSSKYNRRFTPEIAARPKVRGQAGRMNYYWTLLDQGDTITFEATGNLIAPWYAFALNKGSDSGIITSDAPVHNCDTRCQTPHGALNSSLPFQNVHPITIGECPKYVKSTKLRMATGLRNVPSIQSRGLFGAIAGFIEGGWTGMIDGWYGYHHQNEQGSGYAADQKSTQNAIDGITNKVNSVIEKMNTQFTAVGKEFNNLERRIENLNKKVDDGFLDVWTYNAELLVLLENERTLDFHDSNVRNLYEKVRSQLRNNAKELGNGCFEFYHKCDDECMESVKNGTYDYPKYSEESKLNREEIDGVKLESMGVYQILAIYSTVASSLVLLVSLGAISFWMCSNGSLQCRICI"
# Seq2 = "MKAILVVLLYTFATANADTLCIGYHANNSTDTVDTVLEKNVTVTHSVNLLEDKHNGKLCKLRGVAPLHLGKCNIAGWILGNPECESLSTASSWSYIVETSSSDNGTCYPGDFIDYEELREQLSSVSSFERFEIFPKTSSWPNHDSNKGVTAACPHAGAKSFYKNLIWLVKKGNSYPKLSKSYINDKGKEVLVLWGIHHPSTSADQQSLYQNADAYVFVGTSRYSKKFKPEIAIRPKVRDQEGRMNYYWTLVEPGDKITFEATGNLVVPRYAFAMERNAGSGIIISDTPVHDCNTTCQTPKGAINTSLPFQNIHPITIGKCPKYVKSTKLRLATGLRNVPSIQSRGLFGAIAGFIEGGWTGMVDGWYGYHHQNEQGSGYAADLKSTQNAIDEITNKVNSVIEKMNTQFTAVGKEFNHLEKRIENLNKKVDDGFLDIWTYNAELLVLLENERTLDYHDSNVKNLYEKVRSQLKNNAKEIGNGCFEFYHKCDNTCMESVKNGTYDYPKYSEEAKLNREEIDGVKLESTRIYQILAIYSTVASSLVLVVSLGAISFWMCSNGSLQCRICI"
# Seq1 = "TTAAG"
# Seq2 = "AAGT"
Seq1 = "TCAAATCAAAAGCA"
Seq2 = "ATGAAGGCAATACCCTA"
mu = 1
sigma = 2
# ------------------------------------ Local Alignment - Dynamic Programming ---
def getScoreLocalAlignment(i, j):
if V[i] == W[j]:
m = S[i-1][j-1] + 1
else:
m = S[i-1][j-1] - mu
return max([0,
S[i-1][j] - sigma,
S[i][j-1] - sigma,
m])
def getMaxValue(M):
maxVal = float("-inf")
maxIndx = [None, None]
for i, r in enumerate(M):
curMax = max(r)
if maxVal < curMax:
maxVal = curMax
maxIndx = [i, r.index(maxVal)]
return maxVal, maxIndx
V = "0" + Seq1
W = "0" + Seq2
lenV = len(V)
lenW = len(W)
S = [[0 for j in xrange(lenW)] for i in xrange(lenV)]
for i in xrange(1, lenV):
for j in range(1, lenW):
S[i][j] = getScoreLocalAlignment(i, j)
val, endPoint = getMaxValue(S)
endPoint = [endPoint[0] - 1, endPoint[1] - 1]
V = "0" + Seq1[::-1]
W = "0" + Seq2[::-1]
for i in xrange(1, lenV):
for j in range(1, lenW):
S[i][j] = getScoreLocalAlignment(i, j)
val, startPoint = getMaxValue(S)
startPoint = [lenV - startPoint[0] - 1, lenW - startPoint[1] - 1]
# -------------------- Global Alignment in Linear Space - Divide and Conquer ---
oriSeq1 = Seq1
oriSeq2 = Seq2
Seq1 = Seq1[startPoint[0]:endPoint[0]+1]
Seq2 = Seq2[startPoint[1]:endPoint[1]+1]
def getScoreGlobalAlignment(i, j):
if V[i] == W[j]:
m = S[i-1][j-1] + 1
else:
m = S[i-1][j-1] - mu
scores = [S[i-1][j] - sigma,
S[i][j-1] - sigma,
m]
return max(scores)
def calculatePrefix(source, sink, i):
global V, W, S
V = "0" + Seq1[source[0]:i + 1]
W = "0" + Seq2[source[1]:sink[1]]
lenV = len(V)
lenW = len(W)
S = [[0 for j in xrange(lenW)] for i in xrange(lenV)]
for a in range(lenV): S[a][0] = a * -sigma
for b in range(lenW): S[0][b] = b * -sigma
for a in xrange(1, lenV):
for b in range(1, lenW):
S[a][b] = getScoreGlobalAlignment(a, b)
return S[lenV - 1][1:lenW + 1]
def calculateSuffix(source, sink, i):
global V, W, S
V = "0" + Seq1[i:sink[0]][::-1]
W = "0" + Seq2[source[1]:sink[1]][::-1]
lenV = len(V)
lenW = len(W)
S = [[0 for j in xrange(lenW)] for i in xrange(lenV)]
for a in range(lenV): S[a][0] = a * -sigma
for b in range(lenW): S[0][b] = b * -sigma
for a in xrange(1, lenV):
for b in range(1, lenW):
S[a][b] = getScoreGlobalAlignment(a, b)
return S[lenV - 1][1:lenW + 1][::-1]
def getPath(source, sink):
end = False
if (sink[0] - source[0]) <= 2:
if D[source[0]] == None:
mid_i = source[0]
elif D[source[0] + 1] == None:
mid_i = source[0] + 1
else:
return
end = True
else:
mid_i = source[0] + ((sink[0] - source[0]) / 2)
prefix = calculatePrefix(source, sink, mid_i)
suffix = calculateSuffix(source, sink, mid_i)
sumScore = [prefix[b] + suffix[b] for b in xrange(sink[1] - source[1])]
maxScore = max(sumScore)
mid_k = source[1] + sumScore.index(maxScore)
D[mid_i] = maxScore
K[mid_i] = mid_k
if end:
return
getPath(source, [mid_i + 1, mid_k + 1])
getPath([mid_i, mid_k], sink)
def generateSequence():
indx = 0
k_indx = 0
for i in xrange(0, n):
if i in K[k_indx:]:
total = sum([1 for j in K[k_indx:] if j == i])
if total > 1:
R[0] += [indx + j + 1 for j in xrange(total)]
startIndx = k_indx + K[k_indx:].index(i)
maxVal = max(D[startIndx:startIndx+total])
R[1] += [indx + D[startIndx:startIndx+total].index(maxVal) + 1]
indx += total
k_indx += total
else:
R[0] += [indx + 1]
R[1] += [indx + 1]
indx += 1
k_indx += 1
else:
R[1] += [indx + 1]
indx += 1
def displaySequence():
V = "0" + Seq1
W = "0" + Seq2
Vseq = ""
Wseq = ""
for indx in xrange(max(R[0] + R[1])):
indx += 1
if indx in R[0]:
Vseq += V[R[0].index(indx)]
else :
Vseq += "-"
if indx in R[1]:
Wseq += W[R[1].index(indx)]
else :
Wseq += "-"
print Vseq
print Wseq
print ""
m = len(Seq1)
n = len(Seq2)
S = []
V = ""
W = ""
D = [None for i in xrange(m)]
K = copy.deepcopy(D)
R = [[0], [0]]
getPath([0,0], [m,n])
generateSequence()
print R #bar
displaySequence()
| gpl-2.0 | -8,100,298,384,511,656,000 | 28.629787 | 577 | 0.60922 | false |
charman2/rsas | examples/unsteady.py | 1 | 5254 | # -*- coding: utf-8 -*-
"""Storage selection (SAS) functions: example with multiple fluxes out at steady state
Runs the rSAS model for a synthetic dataset with one flux in and
multiple fluxes out and steady state flow
Theory is presented in:
Harman, C. J. (2014), Time-variable transit time distributions and transport:
Theory and application to storage-dependent transport of chloride in a watershed,
Water Resour. Res., 51, doi:10.1002/2014WR015707.
"""
from __future__ import division
import rsas
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Initializes the random number generator so we always get the same result
np.random.seed(0)
# =====================================
# Load the input data
# =====================================
data = pd.read_csv('Q1.csv', index_col=0, parse_dates=[1])
# length of the dataset
N = len(data)
# The individual timeseries can be pulled out of the dataframe
S = data['S'].values
J = data['J'].values
Q = data['Q1'].values
C_J = data['C_J'].values-2
C_Q1 = data['C_Q1'].values
ST_min = data['ST_min'].values
ST_max = data['ST_max'].values
# =========================
# Parameters needed by rsas
# =========================
# The concentration of water older than the start of observations
C_old = ((J*C_J)[J>0]).sum()/((J)[J>0]).sum()
# =========================
# Create the rsas functions
# =========================
S_dead = 10.
#lam = 0.
# Uniform
# Parameters for the rSAS function
Q_rSAS_fun_type = 'uniform'
ST_min = np.zeros(N)
ST_max = S + S_dead
Q_rSAS_fun_parameters = np.c_[ST_min, ST_max]
rSAS_fun_Q1 = rsas.create_function(Q_rSAS_fun_type, Q_rSAS_fun_parameters)
rSAS_fun = [rSAS_fun_Q1]
# Kumaraswami
## Parameters for the rSAS function
#Q_rSAS_fun_type = 'kumaraswami'
#ST_min = np.ones(N) * 0.
#ST_max = S + S_dead
#a = np.maximum(0.01, 2. + lam * (S - S.mean())/S.std())
#b = np.ones(N) * 5.
#Q_rSAS_fun_parameters = np.c_[a, b, ST_min, ST_max]
#rSAS_fun_Q1 = rsas.create_function(Q_rSAS_fun_type, Q_rSAS_fun_parameters)
#rSAS_fun = [rSAS_fun_Q1]
# =================
# Initial condition
# =================
# Unknown initial age distribution, so just set this to zeros
ST_init = np.zeros(N + 1)
# =============
# Run the model
# =============
# Run it
outputs = rsas.solve(J, Q, rSAS_fun, ST_init=ST_init,
mode='RK4', dt = 1., n_substeps=3, C_J=C_J, C_old=[C_old], verbose=False, debug=False)
# Let's pull these out to make the outputs from rsas crystal clear
# State variables: age-ranked storage of water and solutes
# ROWS of ST, MS are T - ages
# COLUMNS of ST, MS are t - times
# LAYERS of MS are s - solutes
ST = outputs['ST']
MS = outputs['MS'][:,:,0]
# Timestep-averaged backwards TTD
# ROWS of PQ are T - ages
# COLUMNS of PQ are t - times
# LAYERS of PQ are q - fluxes
PQ1m = outputs['PQ'][:,:,0]
# Timestep-averaged outflow concentration
# ROWS of C_Q are t - times
# COLUMNS of PQ are q - fluxes
C_Q1m1 = outputs['C_Q'][:,0,0]
# Timestep averaged solute load out
# ROWS of MQ are T - ages
# COLUMNS of MQ are t - times
# LAYERS of MQ are q - fluxes
# Last dimension of MS are s - solutes
MQ1m = outputs['MQ'][:,:,0,0]
#%%
# ==================================
# Plot the rSAS function
# ==================================
STx = np.linspace(0,S.max()+S_dead,100)
Omega = np.r_[[rSAS_fun_Q1.cdf_i(STx,i) for i in range(N)]].T
import matplotlib.cm as cm
fig = plt.figure(0)
plt.clf()
for i in range(N):
plt.plot(STx, Omega[:,i], lw=1, color=cm.jet((S[i]-S.min())/S.ptp()))
plt.ylim((0,1))
plt.ylabel('$\Omega_Q(T)$')
plt.xlabel('age-ranked storage $S_T$')
plt.title('Cumulative rSAS function')
#%%
# ==================================
# Plot the transit time distribution
# ==================================
fig = plt.figure(1)
plt.clf()
plt.plot(PQ1m, lw=1)
plt.ylim((0,1))
plt.ylabel('$P_Q(T)$')
plt.xlabel('age $T$')
plt.title('Cumulative transit time distribution')
#%%
# =====================================================================
# Outflow concentration estimated using several different TTD
# =====================================================================
# Lets get the instantaneous value of the TTD at the end of each timestep
PQ1i = np.zeros((N+1, N+1))
PQ1i[:,0] = rSAS_fun_Q1.cdf_i(ST[:,0],0)
PQ1i[:,1:] = np.r_[[rSAS_fun_Q1.cdf_i(ST[:,i+1],i) for i in range(N)]].T
# Use the transit time distribution and input timeseries to estimate
# the output timeseries for the instantaneous and timestep-averaged cases
C_Q1i, C_Q1i_raw, Q1i_observed_fraction = rsas.transport(PQ1i, C_J, C_old)
C_Q1m2, C_Q1m2_raw, Q1m2_observed_fraction = rsas.transport(PQ1m, C_J, C_old)
# Plot the results
fig = plt.figure(2)
plt.clf()
plt.step(data['datetime'], C_Q1m1, 'g', ls='--', label='mean rsas internal', lw=2, where='post')
plt.step(data['datetime'], C_Q1m2, 'b', ls=':', label='mean rsas.transport', lw=2, where='post')
plt.step(data['datetime'], C_Q1m2_raw, '0.5', ls=':', label='mean rsas.transport (obs part)', lw=2, where='post')
plt.plot(data['datetime'], C_Q1i, 'b:o', label='inst. rsas.transport', lw=1)
#plt.plot(data['datetime'], data['C_Q1'], 'r.', label='observed', lw=2)
plt.ylim((-2, 0))
plt.legend(loc=0)
plt.ylabel('Concentration [-]')
plt.xlabel('time')
plt.title('Outflow concentration')
plt.show()
| mit | 1,715,018,500,754,261,800 | 35.234483 | 113 | 0.60906 | false |
Galarzaa90/NabBot | cogs/tracking.py | 1 | 83628 | # Copyright 2019 Allan Galarza
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import datetime as dt
import logging
import pickle
import re
import time
from collections import defaultdict
from typing import List, NamedTuple, Union, Optional, Dict
import asyncpg
import discord
import tibiapy
from discord.ext import commands
from tibiapy import Death, Guild, OnlineCharacter, OtherCharacter, World
from nabbot import NabBot
from .utils import CogUtils, EMBED_LIMIT, FIELD_VALUE_LIMIT, checks, config, get_user_avatar, is_numeric, join_list, \
online_characters, safe_delete_message, split_params
from .utils.context import NabCtx
from .utils.database import DbChar, DbDeath, DbLevelUp, get_affected_count, get_server_property, PoolConn
from .utils.errors import CannotPaginate, NetworkError
from .utils.messages import death_messages_monster, death_messages_player, format_message, level_messages, \
split_message, weighed_choice, DeathMessageCondition, LevelCondition, SIMPLE_LEVEL, SIMPLE_DEATH, SIMPLE_PVP_DEATH
from .utils.pages import Pages, VocationPages
from .utils.tibia import HIGHSCORE_CATEGORIES, NabChar, get_character, get_current_server_save_time, get_guild, \
get_highscores, get_share_range, get_voc_abb, get_voc_emoji, get_world, tibia_worlds, normalize_vocation
log = logging.getLogger("nabbot")
# Storage used to keep a cache of guilds for watchlists
GUILD_CACHE = defaultdict(dict) # type: defaultdict[str, Dict[str, Guild]]
WATCHLIST_SEPARATOR = "·"
class CharactersResult(NamedTuple):
skipped: List[OtherCharacter]
no_user: List[DbChar]
same_owner: List[DbChar]
different_user: List[DbChar]
new: List[NabChar]
all_skipped: bool
# region Database Helper classes
class Watchlist:
"""Represents a Watchlist from the database"""
def __init__(self, **kwargs):
self.server_id: int = kwargs.get("server_id")
self.channel_id: int = kwargs.get("channel_id")
self.message_id: int = kwargs.get("message_id")
self.user_id: int = kwargs.get("user_id")
self.show_count: bool = kwargs.get("show_count", True)
self.created: dt.datetime = kwargs.get("created")
# Not columns
self.entries: List['WatchlistEntry'] = []
self.world = None
self.content = ""
self.online_characters: List[OnlineCharacter] = []
self.online_guilds: List[Guild] = []
self.disbanded_guilds: List[str] = []
self.description = ""
@property
def online_count(self) -> int:
"""Total number of online characters across entries."""
return len(self.online_characters) + sum(g.online_count for g in self.online_guilds)
def __repr__(self):
return "<{0.__class__.__name__} server_id={0.server_id} channel_id={0.channel_id} message_id={0.message_id}>"\
.format(self)
async def add_entry(self, conn: PoolConn, name: str, is_guild: bool, user_id: int, reason: Optional[str]) ->\
Optional['WatchlistEntry']:
""" Adds an entry to the watchlist.
:param conn: Connection to the database.
:param name: Name of the character or guild.
:param is_guild: Whether the entry is a guild or not.
:param user_id: The user that created the entry.
:param reason: The reason for the entry.
:return: The new created entry or None if it already exists.
"""
try:
return await WatchlistEntry.insert(conn, self.channel_id, name, is_guild, user_id, reason)
except asyncpg.UniqueViolationError:
return None
async def get_entries(self, conn: PoolConn) -> List['WatchlistEntry']:
"""Gets all entries in this watchlist.
:param conn: Connection to the database.
:return: List of entries if any.
"""
return await WatchlistEntry.get_entries_by_channel(conn, self.channel_id)
async def update_message_id(self, conn: PoolConn, message_id: int):
"""Update's the message id.
:param conn: Connection to the database.
:param message_id: The new message id.
"""
await conn.execute("UPDATE watchlist SET message_id = $1 WHERE channel_id = $2", message_id, self.channel_id)
self.message_id = message_id
async def update_show_count(self, conn: PoolConn, show_count: bool):
"""Update's the show_count property.
If the property is True, the number of online entries will be shown in the channel's name.
:param conn: Connection to the database.
:param show_count: The property's new value.
"""
await conn.execute("UPDATE watchlist SET show_count = $1 WHERE channel_id = $2", show_count, self.channel_id)
self.show_count = show_count
@classmethod
async def insert(cls, conn: PoolConn, server_id: int, channel_id: int, user_id: int) -> 'Watchlist':
"""Adds a new watchlist to the database.
:param conn: Connection to the database.
:param server_id: The discord guild's id.
:param channel_id: The channel's id.
:param user_id: The user that created the watchlist.
:return: The created watchlist.
"""
row = await conn.fetchrow("INSERT INTO watchlist(server_id, channel_id, user_id) VALUES($1,$2,$3) RETURNING *",
server_id, channel_id, user_id)
return cls(**row)
@classmethod
async def get_by_channel_id(cls, conn: PoolConn, channel_id: int) -> Optional['Watchlist']:
"""Gets a watchlist corresponding to the channel id.
:param conn: Connection to the database.
:param channel_id: The id of the channel.
:return: The found watchlist, if any."""
row = await conn.fetchrow("SELECT * FROM watchlist WHERE channel_id = $1", channel_id)
if row is None:
return None
return cls(**row)
@classmethod
async def get_by_world(cls, conn: PoolConn, world: str) -> List['Watchlist']:
"""
Gets all watchlist from a Tibia world.
:param conn: Connection to the database.
:param world: The name of the world.
:return: A list of watchlists from the world.
"""
query = """SELECT t0.* FROM watchlist t0
LEFT JOIN server_property t1 ON t1.server_id = t0.server_id AND key = 'world'
WHERE value ? $1"""
rows = await conn.fetch(query, world)
return [cls(**row) for row in rows]
@classmethod
def sort_by_voc_and_level(cls):
"""Sorting function to order by vocation and then by level."""
return lambda char: (normalize_vocation(char.vocation), -char.level)
class WatchlistEntry:
"""Represents a watchlist entry."""
def __init__(self, **kwargs):
self.channel_id: int = kwargs.get("channel_id")
self.name: str = kwargs.get("name")
self.is_guild: bool = kwargs.get("is_guild", False)
self.reason: Optional[str] = kwargs.get("reason")
self.user_id: int = kwargs.get("user_id")
self.created: dt.datetime = kwargs.get("created")
async def remove(self, conn: PoolConn):
"""Removes a watchlist entry from the database.
:param conn: Connection to the database.
"""
await self.delete(conn, self.channel_id, self.name, self.is_guild)
@classmethod
async def delete(cls, conn: PoolConn, channel_id: int, name: str, is_guild: bool):
"""
:param conn: Connection to the databse.
:param channel_id: The id of the watchlist's channel.
:param name: The name of the entry.
:param is_guild: Whether the entry is a guild or a character.
"""
await conn.execute("DELETE FROM watchlist_entry WHERE channel_id = $1 AND lower(name) = $2 AND is_guild = $3",
channel_id, name.lower().strip(), is_guild)
@classmethod
async def get_by_name(cls, conn: PoolConn, channel_id: int, name: str, is_guild: bool) -> \
Optional['WatchlistEntry']:
"""Gets an entry by its name.
:param conn: Connection to the database.
:param channel_id: The id of the channel.
:param name: Name of the entry.
:param is_guild: Whether the entry is a guild or a character.
:return: The entry if found.
"""
row = await conn.fetchrow("SELECT * FROM watchlist_entry "
"WHERE channel_id = $1 AND lower(name) = $2 AND is_guild = $3",
channel_id, name.lower().strip(), is_guild)
if row is None:
return None
return cls(**row)
@classmethod
async def get_entries_by_channel(cls, conn, channel_id) -> List['WatchlistEntry']:
"""Gets entries related to a watchlist channel.
:param conn: Connection to the database.
:param channel_id: Id of the channel.
:return: A list of entries corresponding to the channel.
"""
rows = await conn.fetch("SELECT * FROM watchlist_entry WHERE channel_id = $1", channel_id)
return [cls(**row) for row in rows]
@classmethod
async def insert(cls, conn: PoolConn, channel_id: int, name: str, is_guild: bool, user_id: int, reason=None)\
-> Optional['WatchlistEntry']:
"""Inserts a watchlist entry into the database.
:param conn: Connection to the database.
:param channel_id: The id of the watchlist's channel.
:param name: Name of the entry.
:param is_guild: Whether the entry is a guild or a character.
:param user_id: The id of the user that added the entry.
:param reason: The reason for the entry.
:return: The inserted entry.
"""
row = await conn.fetchrow("INSERT INTO watchlist_entry(channel_id, name, is_guild, reason, user_id) "
"VALUES($1, $2, $3, $4, $5) RETURNING *", channel_id, name, is_guild, reason, user_id)
if row is None:
return None
return cls(**row)
# endregion
class Tracking(commands.Cog, CogUtils):
"""Commands related to NabBot's tracking system."""
def __init__(self, bot: NabBot):
self.bot = bot
self.scan_online_chars_task = bot.loop.create_task(self.scan_online_chars())
self.scan_highscores_task = bot.loop.create_task(self.scan_highscores())
self.world_tasks = {}
self.world_times = {}
# region Tasks
async def scan_deaths(self, world):
"""Iterates through online characters, checking if they have new deaths.
This task is created for every tracked world.
On every iteration, the last element is checked and reinserted at the beginning."""
#################################################
# Nezune's cave #
# Do not touch anything, enter at your own risk #
#################################################
tag = f"{self.tag}[{world}][scan_deaths]"
await self.bot.wait_until_ready()
log.info(f"{tag} Started")
while not self.bot.is_closed():
try:
await asyncio.sleep(config.death_scan_interval)
if len(online_characters[world]) == 0:
await asyncio.sleep(0.5)
continue
skip = False
# Pop last char in queue, reinsert it at the beginning
current_char = online_characters[world].pop()
if hasattr(current_char, "last_check") and time.time() - current_char.last_check < 45:
skip = True
current_char.last_check = time.time()
online_characters[world].insert(0, current_char)
if not skip:
# Check for new death
char = await get_character(self.bot, current_char.name)
await self.compare_deaths(char)
else:
await asyncio.sleep(0.5)
except NetworkError:
await asyncio.sleep(0.3)
continue
except asyncio.CancelledError:
# Task was cancelled, so this is fine
break
except KeyError:
continue
except Exception as e:
log.exception(f"{tag} Exception: {e}")
continue
async def scan_highscores(self):
"""Scans the highscores, storing the results in the database.
The task checks if the last stored data is from the current server save or not."""
#################################################
# Nezune's cave #
# Do not touch anything, enter at your own risk #
#################################################
tag = f"{self.tag}[scan_highscores]"
await self.bot.wait_until_ready()
log.info(f"{tag} Started")
while not self.bot.is_closed():
if len(self.bot.tracked_worlds_list) == 0:
# If no worlds are tracked, just sleep, worlds might get registered later
await asyncio.sleep(10*60)
continue
for world in self.bot.tracked_worlds_list:
tag = f"{self.tag}[{world}](scan_highscores)"
world_count = 0
if world not in tibia_worlds:
log.warning(f"{tag} Tracked world is no longer a valid world.")
await asyncio.sleep(0.1)
try:
for key, values in HIGHSCORE_CATEGORIES.items():
# Check the last scan time, highscores are updated every server save
last_scan = await self.bot.pool.fetchval(
"SELECT last_scan FROM highscores WHERE world = $1 AND category = $2", world, key)
if last_scan:
last_scan_ss = get_current_server_save_time(last_scan)
current_ss = get_current_server_save_time()
# If the saved results are from the current server save, saving is skipped
if last_scan_ss >= current_ss:
log.debug(f"{tag} {values[0].name} | {values[1].name} | Already saved")
await asyncio.sleep(0.1)
continue
try:
highscores = await get_highscores(world, *values)
except NetworkError:
continue
await self.save_highscores(world, key, highscores)
except asyncio.CancelledError:
# Task was cancelled, so this is fine
break
except Exception:
log.exception(f"{tag}")
continue
if world_count:
log.info(f"{tag} {world_count:,} entries saved.")
await asyncio.sleep(5)
await asyncio.sleep(60*30)
async def scan_online_chars(self):
"""Scans tibia.com's character lists to store them locally.
A online list per world is created, with the online registered characters.
When a character enters the online list, their deaths are checked.
On every cycle, their levels are compared.
When a character leaves the online list, their levels and deaths are compared."""
#################################################
# Nezune's cave #
# Do not touch anything, enter at your own risk #
#################################################
await self.bot.wait_until_ready()
tag = f"{self.tag}[scan_online_chars]"
log.info(f"{tag} Task started")
try:
with open("data/online_list.dat", "rb") as f:
saved_list, timestamp = pickle.load(f)
if (time.time() - timestamp) < config.online_list_expiration:
online_characters.clear()
online_characters.update(saved_list)
count = len([c for v in online_characters.values() for c in v])
log.info(f"{tag} Loaded cached online list | {count:,} players")
else:
log.info(f"{tag} Cached online list is too old, discarding")
except FileNotFoundError:
pass
except (ValueError, pickle.PickleError):
log.info(f"{tag} Couldn't read cached online list.")
while not self.bot.is_closed():
try:
# Pop last server in queue, reinsert it at the beginning
current_world = tibia_worlds.pop()
tibia_worlds.insert(0, current_world)
if current_world.capitalize() not in self.bot.tracked_worlds_list:
await asyncio.sleep(0.1)
continue
if time.time() - self.world_times.get(current_world.capitalize(), 0) < config.online_scan_interval:
await asyncio.sleep(0.2)
continue
tag = f"{self.tag}[{current_world}][scan_online_chars]"
log.debug(f"{tag} Checking online list")
# Get online list for this server
try:
world = await get_world(current_world)
if world is None:
await asyncio.sleep(0.1)
continue
log.debug(f"{tag} {world.online_count} players online")
except NetworkError:
await asyncio.sleep(0.1)
continue
current_world_online = world.online_players
if len(current_world_online) == 0:
await asyncio.sleep(0.1)
continue
self.world_times[world.name] = time.time()
self.bot.dispatch("world_scanned", world)
# Save the online list in file
with open("data/online_list.dat", "wb") as f:
pickle.dump((online_characters, time.time()), f, protocol=pickle.HIGHEST_PROTOCOL)
if current_world not in online_characters:
online_characters[current_world] = []
# List of characters that are now offline
offline_list = [c for c in online_characters[current_world] if c not in current_world_online]
for offline_char in offline_list:
# Check if characters got level ups when they went offline
log.debug(f"{tag} Character no longer online | {offline_char.name}")
online_characters[current_world].remove(offline_char)
try:
_char = await get_character(self.bot, offline_char.name)
await self.compare_levels(_char)
await self.compare_deaths(_char)
except NetworkError:
continue
# Add new online chars and announce level differences
for server_char in current_world_online:
db_char = await DbChar.get_by_name(self.bot.pool, server_char.name)
if db_char:
try:
if server_char not in online_characters[current_world]:
# If the character wasn't in the online list we add them
# (We insert them at the beginning of the list to avoid messing with the checks order)
server_char.last_check = time.time()
log.debug(f"{tag} Character added to online list | {server_char.name}")
online_characters[current_world].insert(0, server_char)
_char = await get_character(self.bot, server_char.name)
await self.compare_deaths(_char)
# Only update level up, but don't count it as a level up
await self.compare_levels(_char, True)
else:
await self.compare_levels(server_char)
# Update character in the list
_char_index = online_characters[current_world].index(server_char)
online_characters[current_world][_char_index].level = server_char.level
except NetworkError:
continue
except (ValueError, IndexError):
continue
except asyncio.CancelledError:
# Task was cancelled, so this is fine
break
except Exception:
log.exception("scan_online_chars")
continue
# endregion
# region Custom Events
@commands.Cog.listener()
async def on_world_scanned(self, scanned_world: World):
"""Event called each time a world is checked.
Updates the watchlists
:param scanned_world: The scanned world's information.
"""
# Schedule Scan Deaths task for this world
if scanned_world.name not in self.world_tasks:
self.world_tasks[scanned_world.name] = self.bot.loop.create_task(self.scan_deaths(scanned_world.name))
GUILD_CACHE[scanned_world.name].clear()
await self._run_watchlist(scanned_world)
async def _run_watchlist(self, scanned_world: World):
watchlists = await Watchlist.get_by_world(self.bot.pool, scanned_world.name)
for watchlist in watchlists:
watchlist.world = scanned_world.name
log.debug(f"{self.tag}[{scanned_world.name}] Checking entries for watchlist | "
f"Guild ID: {watchlist.server_id} | Channel ID: {watchlist.channel_id} "
f"| World: {scanned_world.name}")
guild: discord.Guild = self.bot.get_guild(watchlist.server_id)
if guild is None:
await asyncio.sleep(0.01)
continue
discord_channel: discord.TextChannel = guild.get_channel(watchlist.channel_id)
if discord_channel is None:
await asyncio.sleep(0.1)
continue
watchlist.entries = await watchlist.get_entries(self.bot.pool)
if not watchlist.entries:
await asyncio.sleep(0.1)
continue
await self._watchlist_scan_entries(watchlist, scanned_world)
await self._watchlist_build_content(watchlist)
await self._watchlist_update_content(watchlist, discord_channel)
async def _watchlist_scan_entries(self, watchlist: Watchlist, scanned_world: World):
for entry in watchlist.entries:
if entry.is_guild:
await self._watchlist_check_guild(watchlist, entry)
# If it is a character, check if he's in the online list
else:
self._watchlist_add_characters(watchlist, entry, scanned_world)
watchlist.online_characters.sort(key=Watchlist.sort_by_voc_and_level())
@classmethod
async def _watchlist_check_guild(cls, watchlist, watched_guild: WatchlistEntry):
try:
tibia_guild = await cls.cached_get_guild(watched_guild.name, watchlist.world)
except NetworkError:
return
# Save disbanded guilds separately
if tibia_guild is None:
watchlist.disbanded_guilds.append(watched_guild.name)
return
# If there's at least one member online, add guild to list
if tibia_guild.online_count:
watchlist.online_guilds.append(tibia_guild)
@staticmethod
def _watchlist_add_characters(watchlist, watched_char: WatchlistEntry, scanned_world: World):
for online_char in scanned_world.online_players:
if online_char.name == watched_char.name:
# Add to online list
watchlist.online_characters.append(online_char)
return
@staticmethod
def _watchlist_get_msg_entries(characters):
return [f"\t{char.name} - Level {char.level} {get_voc_emoji(char.vocation)}" for char in characters]
async def _watchlist_build_content(self, watchlist):
if watchlist.online_count > 0:
msg_entries = self._watchlist_get_msg_entries(watchlist.online_characters)
watchlist.content = "\n".join(msg_entries)
self._watchlist_build_guild_content(watchlist)
else:
watchlist.description = "There are no watched characters online."
def _watchlist_build_guild_content(self, watchlist):
for guild_name in watchlist.disbanded_guilds:
watchlist.content += f"\n__Guild: **{guild_name}**__\n"
watchlist.content += "\t*Guild was disbanded.*"
for tibia_guild in watchlist.online_guilds:
watchlist.content += f"\n__Guild: **{tibia_guild.name}**__\n"
online_members = tibia_guild.online_members[:]
online_members.sort(key=Watchlist.sort_by_voc_and_level())
watchlist.content += "\n".join(self._watchlist_get_msg_entries(online_members))
async def _watchlist_update_content(self, watchlist: Watchlist, channel: discord.TextChannel):
# Send new watched message or edit last one
embed = discord.Embed(description=watchlist.description, timestamp=dt.datetime.utcnow())
embed.set_footer(text="Last updated")
if watchlist.content:
if len(watchlist.content) >= EMBED_LIMIT - 50:
watchlist.content = split_message(watchlist.content, EMBED_LIMIT - 50)[0]
watchlist.content += "\n*And more...*"
fields = split_message(watchlist.content, FIELD_VALUE_LIMIT)
for s, split_field in enumerate(fields):
name = "Watchlist" if s == 0 else "\u200F"
embed.add_field(name=name, value=split_field, inline=False)
try:
await self._watchlist_update_message(self.bot.pool, watchlist, channel, embed)
await self._watchlist_update_name(watchlist, channel)
except discord.HTTPException:
# log.exception(f"{self.tag}[_watchlist_update_content] {watchlist}")
pass
@staticmethod
async def _watchlist_update_name(watchlist: Watchlist, channel: discord.TextChannel):
try:
original_name = channel.name.split(WATCHLIST_SEPARATOR, 1)[0]
if original_name != channel.name and not watchlist.show_count:
await channel.edit(name=original_name, reason="Removing online count")
elif watchlist.show_count:
new_name = f"{original_name}{WATCHLIST_SEPARATOR}{watchlist.online_count}"
# Reduce unnecessary API calls and Audit log spam
if new_name != channel.name:
await channel.edit(name=new_name, reason="Online count changed")
except discord.Forbidden:
pass
@staticmethod
async def _watchlist_update_message(conn, watchlist, channel, embed):
# We try to get the watched message, if the bot can't find it, we just create a new one
# This may be because the old message was deleted or this is the first time the list is checked
try:
message = await channel.fetch_message(watchlist.message_id)
except discord.HTTPException:
message = None
if message is None:
new_message = await channel.send(embed=embed)
await watchlist.update_message_id(conn, new_message.id)
else:
await message.edit(embed=embed)
# endregion
# region Discord Events
@commands.Cog.listener()
async def on_guild_channel_delete(self, channel: discord.abc.GuildChannel):
"""Called when a guild channel is deleted.
Deletes associated watchlist and entries."""
if not isinstance(channel, discord.TextChannel):
return
result = await self.bot.pool.execute("DELETE FROM watchlist_entry WHERE channel_id = $1", channel.id)
deleted_entries = get_affected_count(result)
result = await self.bot.pool.execute("DELETE FROM watchlist WHERE channel_id = $1", channel.id)
deleted = get_affected_count(result)
if deleted:
# Dispatch event so ServerLog cog can handle it.
log.info(f"{self.tag} Watchlist channel deleted | Channel {channel.id} | Guild {channel.guild.id}")
self.bot.dispatch("watchlist_deleted", channel, deleted_entries)
# endregion
# region Commands
@checks.server_mod_only()
@checks.tracking_world_only()
@commands.command(name="addchar", aliases=["registerchar"], usage="<user>,<character>")
async def add_char(self, ctx: NabCtx, *, params):
"""Register a character and optionally all other visible characters to a discord user.
This command can only be used by server moderators.
If a character is hidden, only that character will be added. Characters in other worlds are skipped."""
params = params.split(",")
if len(params) != 2:
raise commands.BadArgument()
target_name, char_name = params
target_name = target_name.strip()
target = self.bot.get_member(target_name, ctx.guild)
if target is None:
return await ctx.error(f"I couldn't find any users named `{target_name}`")
if target.bot:
return await ctx.error("You can't register characters to discord bots!")
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
char = await get_character(ctx.bot, char_name)
if char is None:
return await msg.edit(content="That character doesn't exist.")
except NetworkError:
return await msg.edit(content="I couldn't fetch the character, please try again.")
check_other = False
if len(char.other_characters) > 1:
message = await ctx.send("Do you want to attempt to add the other visible characters in this account?")
check_other = await ctx.react_confirm(message, timeout=60, delete_after=True)
if check_other is None:
await safe_delete_message(msg)
return await ctx.error("You ran out of time, try again."
"Remember you have to react or click on the reactions.")
if check_other:
await safe_delete_message(msg)
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
results = await self.check_char_availability(ctx, ctx.author.id, char, [ctx.world], check_other)
except NetworkError:
return await msg.edit("I'm having network issues, please try again.")
if results.all_skipped:
await safe_delete_message(msg)
await ctx.error(f"Sorry, I couldn't find any characters in **{ctx.world}**.")
return
reply = await self.process_character_assignment(ctx, results, target, ctx.author)
await safe_delete_message(msg)
await ctx.send(reply)
@commands.command()
@checks.tracking_world_somewhere()
async def claim(self, ctx: NabCtx, *, char_name: str = None):
"""Claims a character registered as yours.
Claims a character as yours, even if it is already registered to someone else.
In order for this to work, you have to put a special code in the character's comment.
You can see this code by using the command with no parameters. The code looks like this: `/NB-23FC13AC7400000/`
Once you had set the code, you can use the command with that character, if the code matches,
it will be reassigned to you.
Note that it may take some time for the code to be visible to NabBot because of caching.
This code is unique for your discord user, so the code will only work for your discord account and no one else.
No one can claim a character of yours unless you put **their** code on your character's comment.
"""
user = ctx.author
claim_pattern = re.compile(r"/NB-([^/]+)/")
user_code = hex(user.id)[2:].upper()
# List of Tibia worlds tracked in the servers the user is
if ctx.is_private:
user_tibia_worlds = [ctx.world]
else:
user_tibia_worlds = ctx.bot.get_user_worlds(user.id)
if not ctx.is_private and self.bot.tracked_worlds.get(ctx.guild.id) is None:
return await ctx.send("This server is not tracking any tibia worlds.")
if len(user_tibia_worlds) == 0:
return
if char_name is None:
await ctx.send(f"To use this command, add `/NB-{user_code}/` to the comment of the character you want to"
f"claim, and then use `/claim character_name`.")
return
msg = await ctx.send(f"{config.loading_emoji} Fetching character...")
try:
char = await get_character(ctx.bot, char_name)
if char is None:
return await msg.edit(content=f"{ctx.tick(False)} That character doesn't exist.")
except NetworkError:
return await msg.edit(content=f"{ctx.tick(False)} I couldn't fetch the character, please try again.")
match = claim_pattern.search(char.comment if char.comment is not None else "")
if not match:
await ctx.error(f"Couldn't find verification code on character's comment.\n"
f"Add `/NB-{user_code}/` to the comment to authenticate.")
return
code = match.group(1)
if code != user_code:
await ctx.error(f"The verification code on the character's comment doesn't match yours.\n"
f"Use `/NB-{user_code}/` to authenticate.")
return
check_other = False
if len(char.other_characters) > 1:
message = await ctx.send("Do you want to attempt to add the other visible characters in this account?")
check_other = await ctx.react_confirm(message, timeout=60, delete_after=True)
if check_other is None:
await safe_delete_message(msg)
return await ctx.send("You ran out of time, try again."
"Remember you have to react or click on the reactions.")
if check_other:
await safe_delete_message(msg)
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
results = await self.check_char_availability(ctx, ctx.author.id, char, user_tibia_worlds, check_other)
except NetworkError:
return await msg.edit("I'm having network issues, please try again.")
if results.all_skipped:
reply = "Sorry, I couldn't find any characters from the worlds in the context ({0})."
return await msg.edit(content=reply.format(join_list(user_tibia_worlds)))
reply = await self.process_character_assignment(ctx, results, ctx.author, claim=True)
await safe_delete_message(msg)
await ctx.send(reply)
@checks.tracking_world_somewhere()
@commands.command(aliases=["i'm", "iam"])
async def im(self, ctx: NabCtx, *, char_name: str):
"""Lets you add your tibia character(s) for the bot to track.
If there are other visible characters, the bot will ask for confirmation to add them too.
Characters in other worlds other than the currently tracked world are skipped.
If it finds a character owned by another user, the whole process will be stopped.
If a character is already registered to someone else, `claim` can be used."""
# List of Tibia worlds tracked in the servers the user is
if ctx.is_private:
user_tibia_worlds = [ctx.world]
else:
user_tibia_worlds = ctx.bot.get_user_worlds(ctx.author.id)
msg = await ctx.send(f"{config.loading_emoji} Fetching character...")
try:
char = await get_character(ctx.bot, char_name)
if char is None:
return await msg.edit(content=f"{ctx.tick(False)} That character doesn't exist.")
except NetworkError:
return await msg.edit(content=f"{ctx.tick(False)} I couldn't fetch the character, please try again.")
check_other = False
if len(char.other_characters) > 1:
await msg.edit(content="Do you want to attempt to add the other visible characters in this account?")
check_other = await ctx.react_confirm(msg, timeout=60, delete_after=True)
if check_other is None:
await safe_delete_message(msg)
return await ctx.send("You didn't reply in time, try again."
"Remember that you have to react or click on the icons.")
if check_other:
await safe_delete_message(msg)
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
results = await self.check_char_availability(ctx, ctx.author.id, char, user_tibia_worlds, check_other)
except NetworkError:
return await msg.edit("I'm having network issues, please try again.")
if results.all_skipped:
reply = "Sorry, I couldn't find any characters from the worlds in the context ({0})."
return await msg.edit(content=reply.format(join_list(user_tibia_worlds)))
reply = await self.process_character_assignment(ctx, results, ctx.author)
await safe_delete_message(msg)
await ctx.send(reply)
@checks.tracking_world_somewhere()
@commands.command(aliases=["i'mnot"])
async def imnot(self, ctx: NabCtx, *, name):
"""Removes a character assigned to you.
All registered level ups and deaths will be lost forever."""
db_char = await DbChar.get_by_name(ctx.pool, name)
if db_char is None or db_char.user_id == 0:
return await ctx.error("There's no character registered with that name.")
if db_char.user_id != ctx.author.id:
return await ctx.error(f"The character **{db_char.name}** is not registered to you.")
message = await ctx.send(f"Are you sure you want to unregister "
f"**{db_char.name}** ({abs(db_char.level)} {db_char.vocation})?")
confirm = await ctx.react_confirm(message, timeout=50)
if confirm is None:
return await ctx.send("I guess you changed your mind.")
if not confirm:
return await ctx.send("No then? Ok.")
await db_char.update_user(ctx.pool, 0)
await ctx.success(f"**{db_char.name}** is no longer registered to you.")
self.bot.dispatch("character_change", ctx.author.id)
self.bot.dispatch("character_unregistered", ctx.author, db_char)
@checks.can_embed()
@checks.tracking_world_only()
@commands.command()
async def online(self, ctx: NabCtx):
"""Tells you which users are online on Tibia.
This list gets updated based on Tibia.com online list, so it takes a couple minutes to be updated."""
world = ctx.world
per_page = 20 if await ctx.is_long() else 5
now = dt.datetime.utcnow()
uptime = (now - self.bot.start_time).total_seconds()
count = 0
entries = []
vocations = []
for char in online_characters.get(world, []):
name = char.name
db_char = await DbChar.get_by_name(ctx.pool, name)
if not db_char:
continue
# Skip characters of members not in the server
owner = ctx.guild.get_member(db_char.user_id)
if owner is None:
continue
owner = owner.display_name
emoji = get_voc_emoji(char.vocation)
vocations.append(char.vocation.value)
vocation = get_voc_abb(char.vocation)
entries.append(f"{char.name} (Lvl {char.level} {vocation}{emoji}, **@{owner}**)")
count += 1
if count == 0:
if uptime < 90:
await ctx.send("I just started, give me some time to check online lists...⌛")
else:
await ctx.send("There is no one online from Discord.")
return
pages = VocationPages(ctx, entries=entries, vocations=vocations, per_page=per_page)
pages.embed.title = "Users online"
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.send(e)
@commands.command(name="searchteam", aliases=["whereteam", "findteam"], usage="<params>")
@checks.tracking_world_only()
@checks.can_embed()
async def search_team(self, ctx: NabCtx, *, params=None):
"""Searches for a registered character that meets the criteria
There are 3 ways to use this command:
- Show characters in share range with a specific character. (`searchteam <name>`)
- Show characters in share range with a specific level. (`searchteam <level>`)
- Show characters in a level range. (`searchteam <min>,<max>`)
Online characters are shown first on the list, they also have an icon."""
permissions = ctx.bot_permissions
if not permissions.embed_links:
await ctx.send("Sorry, I need `Embed Links` permission for this command.")
return
invalid_arguments = "Invalid arguments used, examples:\n" \
"```/searchteam charname\n" \
"/searchteam level\n" \
"/searchteam minlevel,maxlevel```"
if ctx.world is None:
await ctx.send("This server is not tracking any tibia worlds.")
return
if params is None:
await ctx.send(invalid_arguments)
return
entries = []
vocations = []
online_entries = []
online_vocations = []
per_page = 20 if await ctx.is_long() else 5
char = None
params = split_params(params)
if len(params) < 1 or len(params) > 2:
await ctx.send(invalid_arguments)
return
# params[0] could be a character's name, a character's level or one of the level ranges
# If it's not a number, it should be a player's name
if not is_numeric(params[0]):
# We shouldn't have another parameter if a character name was specified
if len(params) == 2:
await ctx.send(invalid_arguments)
return
char = await get_character(ctx.bot, params[0])
if char is None:
await ctx.send("I couldn't find a character with that name.")
return
low, high = get_share_range(char.level)
title = f"Characters in share range with {char.name}({low}-{high}):"
empty = f"I didn't find anyone in share range with **{char.name}**({low}-{high})"
else:
# Check if we have another parameter, meaning this is a level range
if len(params) == 2:
try:
level1 = int(params[0])
level2 = int(params[1])
except ValueError:
await ctx.send(invalid_arguments)
return
if level1 <= 0 or level2 <= 0:
await ctx.send("You entered an invalid level.")
return
low = min(level1, level2)
high = max(level1, level2)
title = f"Characters between level {low} and {high}"
empty = f"I didn't find anyone between levels **{low}** and **{high}**"
# We only got a level, so we get the share range for it
else:
if int(params[0]) <= 0:
await ctx.send("You entered an invalid level.")
return
low, high = get_share_range(int(params[0]))
title = f"Characters in share range with level {params[0]} ({low}-{high})"
empty = f"I didn't find anyone in share range with level **{params[0]}** ({low}-{high})"
async with ctx.pool.acquire() as conn:
count = 0
online_list = [x.name for v in online_characters.values() for x in v]
async for db_char in DbChar.get_chars_in_range(conn, low, high, ctx.world):
if char is not None and char.name == db_char.name:
continue
owner = ctx.guild.get_member(db_char.user_id)
if owner is None:
continue
count += 1
owner = owner.display_name
emoji = get_voc_emoji(db_char.vocation)
voc_abb = get_voc_abb(db_char.vocation)
entry = f"**{db_char.name}** - Level {abs(db_char.level)} {voc_abb}{emoji} - @**{owner}**"
if db_char.name in online_list:
entry = f"{config.online_emoji}{entry}"
online_entries.append(entry)
online_vocations.append(db_char.vocation)
else:
entries.append(entry)
vocations.append(db_char.vocation)
if count < 1:
await ctx.send(empty)
return
pages = VocationPages(ctx, entries=online_entries + entries, per_page=per_page,
vocations=online_vocations + vocations)
pages.embed.title = title
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.send(e)
@checks.server_mod_only()
@checks.tracking_world_only()
@commands.command(name="removechar", aliases=["deletechar", "unregisterchar"])
async def remove_char(self, ctx: NabCtx, *, name):
"""Removes a registered character from someone.
This can only be used by server moderators.
Note that you can only remove chars if they are from users exclusively in your server.
You can't remove any characters that would alter other servers NabBot is in."""
# This could be used to remove deleted chars so we don't need to check anything
# Except if the char exists in the database...
db_char = await DbChar.get_by_name(ctx.pool, name.strip())
if db_char is None or db_char.user_id == 0:
return await ctx.error("There's no character with that name registered.")
if db_char.world != ctx.world:
return await ctx.error(f"The character **{db_char.name}** is in a different world.")
user = self.bot.get_user(db_char.user_id)
if user is not None:
user_guilds = self.bot.get_user_guilds(user.id)
# Iterating every world where the user is, to check if it wouldn't affect other admins.
for guild in user_guilds:
if guild == ctx.guild:
continue
if self.bot.tracked_worlds.get(guild.id, None) != ctx.world:
continue
author: discord.Member = guild.get_member(ctx.author.id)
if author is None or not author.guild_permissions.manage_guild:
await ctx.error(f"The user of this server is also in another server tracking "
f"**{ctx.world}**, where you are not an admin. You can't alter other servers.")
return
username = "unknown" if user is None else user.display_name
await db_char.update_user(ctx.pool, 0)
await ctx.send("**{0}** was removed successfully from **@{1}**.".format(db_char.name, username))
self.bot.dispatch("character_unregistered", user, db_char, ctx.author)
@checks.server_mod_only()
@checks.tracking_world_only()
@commands.group(invoke_without_command=True, case_insensitive=True, aliases=["huntedlist"])
async def watchlist(self, ctx: NabCtx):
"""Create or manage watchlists.
Watchlists are channels where the online status of selected characters are shown.
You can create multiple watchlists and characters and guilds to each one separately.
Try the subcommands."""
await ctx.send("To manage watchlists, use one of the subcommands.\n"
f"Try `{ctx.clean_prefix}help {ctx.invoked_with}`.")
@checks.tracking_world_only()
@checks.channel_mod_somewhere()
@watchlist.command(name="add", aliases=["addplayer", "addchar"], usage="<channel> <name>[,reason]")
async def watchlist_add(self, ctx: NabCtx, channel: discord.TextChannel, *, params):
"""Adds a character to a watchlist.
A reason can be specified by adding it after the character's name, separated by a comma."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist channel.")
if not channel.permissions_for(ctx.author).manage_channels:
return await ctx.error(f"You need `Manage Channel` permissions in {channel.mention} to add entries.")
params = params.split(",", 1)
name = params[0]
reason = None
if len(params) > 1:
reason = params[1]
char = await get_character(ctx.bot, name)
if char is None:
await ctx.error("A character with that name doesn't exist.")
return
world = ctx.world
if char.world != world:
await ctx.error(f"This character is not in **{world}**.")
return
message = await ctx.send(f"Do you want to add **{char.name}** (Level {char.level} {char.vocation}) "
f"to the watchlist {channel.mention}")
confirm = await ctx.react_confirm(message, delete_after=True)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
entry = await watchlist.add_entry(ctx.pool, char.name, False, ctx.author.id, reason)
if entry:
await ctx.success(f"Character **{char.name}** added to the watchlist {channel.mention}.")
else:
await ctx.error(f"**{char.name}** is already registered in {channel.mention}")
@checks.tracking_world_only()
@checks.channel_mod_somewhere()
@watchlist.command(name="addguild", usage="<channel> <name>[,reason]")
async def watchlist_addguild(self, ctx: NabCtx, channel: discord.TextChannel, *, params):
"""Adds an entire guild to a watchlist.
Guilds are displayed in the watchlist as a group."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist channel.")
if not channel.permissions_for(ctx.author).manage_channels:
return await ctx.error(f"You need `Manage Channel` permissions in {channel.mention} to add entries.")
params = params.split(",", 1)
name = params[0]
reason = None
if len(params) > 1:
reason = params[1]
guild = await get_guild(name)
if guild is None:
await ctx.error("There's no guild with that name.")
return
if guild.world != ctx.world:
await ctx.error(f"This guild is not in **{ctx.world}**.")
return
message = await ctx.send(f"Do you want to add the guild **{guild.name}** to the watchlist {channel.mention}?")
confirm = await ctx.react_confirm(message, delete_after=True)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
entry = await watchlist.add_entry(ctx.pool, guild.name, True, ctx.author.id, reason)
if entry:
await ctx.success(f"Guild **{guild.name}** added to the watchlist {channel.mention}.")
else:
await ctx.error(f"**{guild.name}** is already registered in {channel.mention}")
@checks.tracking_world_only()
@checks.channel_mod_somewhere()
@watchlist.command(name="adduser", usage="<channel> <user>[,reason]")
async def watchlist_adduser(self, ctx: NabCtx, channel: discord.TextChannel, *, params):
"""Adds the currently registered characters of a user to the watchlist.
A reason can be specified by adding it after the character's name, separated by a comma."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist channel.")
if not channel.permissions_for(ctx.author).manage_channels:
return await ctx.error(
f"You need `Manage Channel` permissions in {channel.mention} to add entries.")
params = params.split(",", 1)
name = params[0]
reason = None
if len(params) > 1:
reason = params[1]
user = ctx.bot.get_member(name, ctx.guild)
if user is None:
return await ctx.error("I don't see any users with that name or id.")
characters = await DbChar.get_chars_by_user(ctx.pool, user.id, worlds=ctx.world)
if not characters:
await ctx.error(f"This user doesn't have any registered characters in {ctx.world}.")
return
char_list = "\n".join(f"• {c.name}" for c in characters)
message = await ctx.send(f"Do you want to add currently registered characters of `{user}` to this watchlist?\n"
f"{char_list}")
confirm = await ctx.react_confirm(message)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
results = ""
for char in characters:
entry = await watchlist.add_entry(ctx.pool, char.name, False, ctx.author.id, reason)
if entry:
results += f"\n• {char.name}"
if results:
await ctx.success(f"I added the following characters to the list {channel.mention}, "
f"duplicates where skipped:{results}")
else:
await ctx.error("No characters where added, as they were all duplicates.")
@checks.server_mod_only()
@checks.tracking_world_only()
@watchlist.command(name="create")
async def watchlist_create(self, ctx: NabCtx, *, name):
"""Creates a watchlist channel.
Creates a new text channel for the watchlist to be posted.
The watch list shows which characters from it are online. Entire guilds can be added too.
The channel can be renamed at anytime. If the channel is deleted, all its entries are deleted too.
"""
if WATCHLIST_SEPARATOR in name:
await ctx.error(f"Channel name cannot contain the special character **{WATCHLIST_SEPARATOR}**")
return
if not ctx.bot_permissions.manage_channels:
return await ctx.error(f"I need `Manage Channels` permission in the server to use this command.")
message = await ctx.send(f"Do you want to create a new watchlist named `{name}`?")
confirm = await ctx.react_confirm(message, delete_after=True)
if not confirm:
return
try:
overwrites = {
ctx.guild.default_role: discord.PermissionOverwrite(send_messages=False, read_messages=True),
ctx.guild.me: discord.PermissionOverwrite(send_messages=True, read_messages=True, manage_channels=True)
}
channel = await ctx.guild.create_text_channel(name, overwrites=overwrites, category=ctx.channel.category)
except discord.Forbidden:
await ctx.error(f"Sorry, I don't have permissions to create channels.")
except discord.HTTPException:
await ctx.error(f"Something went wrong, the channel name you chose is probably invalid.")
else:
log.info(f"Watchlist created (Channel ID: {channel.id}, Guild ID: {channel.guild.id})")
await ctx.success(f"Channel created successfully: {channel.mention}\n")
await channel.send("This is where I will post a list of online watched characters.\n"
"Edit this channel's permissions to allow the roles you want.\n"
"This channel can be renamed freely.\n"
"Anyone with `Manage Channel` permission here can add entries.\n"
f"Example: {ctx.clean_prefix}{ctx.command.full_parent_name} add {channel.mention} "
f"Galarzaa Fidera\n"
"If this channel is deleted, all related entries will be lost.\n"
"**It is important to not allow anyone to write in here**\n"
"*This message can be deleted now.*")
watchlist = await Watchlist.insert(ctx.pool, ctx.guild.id, channel.id, ctx.author.id)
log.debug(f"{self.tag} Watchlist created | {watchlist}")
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="info", aliases=["details", "reason"])
async def watchlist_info(self, ctx: NabCtx, channel: discord.TextChannel, *, name: str):
"""Shows information about a watchlist entry.
This shows who added the player, when, and if there's a reason why they were added."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, False)
if not entry:
return await ctx.error(f"There's no character with that name registered to {channel.mention}.")
embed = discord.Embed(title=entry.name, url=tibiapy.Character.get_url(entry.name), timestamp=entry.created,
description=f"**Reason:** {entry.reason}" if entry.reason else "No reason provided.")
embed.set_author(name=f"In #{channel}")
author = ctx.guild.get_member(entry.user_id)
if author:
embed.set_footer(text=f"Added by {author.name}#{author.discriminator}",
icon_url=get_user_avatar(author))
await ctx.send(embed=embed)
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="infoguild", aliases=["detailsguild", "reasonguild"])
async def watchlist_infoguild(self, ctx: NabCtx, channel: discord.TextChannel, *, name: str):
""""Shows details about a guild entry in a watchlist.
This shows who added the player, when, and if there's a reason why they were added."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, True)
if not entry:
return await ctx.error(f"There's no guild with that name registered to {channel.mention}.")
embed = discord.Embed(title=entry.name, timestamp=entry.created, url=tibiapy.Guild.get_url(entry.name),
description=f"**Reason:** {entry.reason}" if entry.reason else "No reason provided.")
embed.set_author(name=f"In #{channel}")
author = ctx.guild.get_member(entry.user_id)
if author:
embed.set_footer(text=f"Added by {author.name}#{author.discriminator}",
icon_url=get_user_avatar(author))
await ctx.send(embed=embed)
@checks.tracking_world_only()
@watchlist.command(name="list")
async def watchlist_list(self, ctx: NabCtx, channel: discord.TextChannel):
"""Shows characters belonging to that watchlist.
Note that this lists all characters, not just online characters."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
if not channel.permissions_for(ctx.author).read_messages:
return await ctx.error("You can't see the list of a watchlist you can't see.")
entries = await WatchlistEntry.get_entries_by_channel(ctx.pool, channel.id)
entries = [entry for entry in entries if not entry.is_guild]
if not entries:
return await ctx.error(f"This watchlist has no registered characters.")
pages = Pages(ctx, entries=[f"[{r.name}]({NabChar.get_url(r.name)})" for r in entries])
pages.embed.title = f"Watched Characters in #{channel.name}"
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.error(e)
@checks.tracking_world_only()
@watchlist.command(name="listguilds", aliases=["guilds", "guildlist"])
async def watchlist_list_guild(self, ctx: NabCtx, channel: discord.TextChannel):
"""Shows a list of guilds in the watchlist."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entries = await WatchlistEntry.get_entries_by_channel(ctx.pool, channel.id)
entries = [entry for entry in entries if entry.is_guild]
if not channel.permissions_for(ctx.author).read_messages:
return await ctx.error("You can't see the list of a watchlist you can't see.")
if not entries:
return await ctx.error(f"This watchlist has no registered characters.")
pages = Pages(ctx, entries=[f"[{r.name}]({Guild.get_url(r.name)})" for r in entries])
pages.embed.title = f"Watched Guilds in #{channel.name}"
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.error(e)
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="remove", aliases=["removeplayer", "removechar"])
async def watchlist_remove(self, ctx: NabCtx, channel: discord.TextChannel, *, name):
"""Removes a character from a watchlist."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, False)
if entry is None:
return await ctx.error(f"There's no character with that name registered in {channel.mention}.")
message = await ctx.send(f"Do you want to remove **{name}** from this watchlist?")
confirm = await ctx.react_confirm(message)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
await entry.remove(ctx.pool)
await ctx.success("Character removed from the watchlist.")
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="removeguild")
async def watchlist_removeguild(self, ctx: NabCtx, channel: discord.TextChannel, *, name):
"""Removes a guild from the watchlist."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, True)
if entry is None:
return await ctx.error(f"There's no guild with that name registered in {channel.mention}.")
message = await ctx.send(f"Do you want to remove **{name}** from this watchlist?")
confirm = await ctx.react_confirm(message)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
await entry.remove(ctx.pool)
await ctx.success("Guild removed from the watchlist.")
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="showcount", usage="<channel> <yes|no>")
async def watchlist_showcount(self, ctx: NabCtx, channel: discord.TextChannel, yes_no):
"""Changes whether the online count will be displayed in the watchlist's channel's name or not."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist.")
if yes_no.lower().strip() in ["yes", "true"]:
await watchlist.update_show_count(ctx.pool, True)
await ctx.success("Showing online count is now enabled. The name will be updated on the next cycle.")
elif yes_no.lower().strip() in ["no", "false"]:
await watchlist.update_show_count(ctx.pool, False)
await ctx.success("Showing online count is now disabled. The name will be updated on the next cycle.")
else:
await ctx.error("That's not a valid option, try `yes` or `no`.")
# endregion
# region Methods
async def announce_death(self, char: NabChar, death: Death, levels_lost=0):
"""Announces a level up on the corresponding servers."""
log_msg = f"{self.tag}[{char.world}] announce_death: {char.name} | {death.level} | {death.killer.name}"
# Find killer article (a/an)
killer_article = ""
if not death.by_player:
killer_article = death.killer.name.split(" ", 1)
if killer_article[0] in ["a", "an"] and len(killer_article) > 1:
death.killer.name = killer_article[1]
killer_article = killer_article[0] + " "
else:
killer_article = ""
if death.killer.name.lower() in ["death", "energy", "earth", "fire", "pit battler", "pit berserker",
"pit blackling",
"pit brawler", "pit condemned", "pit demon", "pit destroyer", "pit fiend",
"pit groveller", "pit grunt", "pit lord", "pit maimer", "pit overlord",
"pit reaver",
"pit scourge"] and levels_lost == 0:
# Skip element damage deaths unless player lost a level to avoid spam from arena deaths
# This will cause a small amount of deaths to not be announced but it's probably worth the tradeoff
log.debug(f"{log_msg} | Skipping arena death")
return
guilds = [s for s, w in self.bot.tracked_worlds.items() if w == char.world]
for guild_id in guilds:
guild = self.bot.get_guild(guild_id)
if guild is None:
continue
min_level = await get_server_property(self.bot.pool, guild_id, "announce_level", config.announce_threshold)
if death.level < min_level:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Level under limit")
continue
if guild.get_member(char.owner_id) is None:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Owner not in server")
continue
simple_messages = await get_server_property(self.bot.pool, guild_id, "simple_messages", False)
condition = DeathMessageCondition(char=char, death=death, levels_lost=levels_lost, min_level=min_level)
# Select a message
if death.by_player:
message = weighed_choice(death_messages_player, condition) if not simple_messages else SIMPLE_DEATH
else:
message = weighed_choice(death_messages_monster, condition) if not simple_messages else SIMPLE_PVP_DEATH
# Format message with death information
message = message.format(**{'name': char.name, 'level': death.level, 'killer': death.killer.name,
'killer_article': killer_article, 'he_she': char.he_she.lower(),
'his_her': char.his_her.lower(), 'him_her': char.him_her.lower()})
# Format extra stylization
message = f"{config.pvpdeath_emoji if death.by_player else config.death_emoji} {format_message(message)}"
channel_id = await get_server_property(self.bot.pool, guild.id, "levels_channel")
channel = self.bot.get_channel_or_top(guild, channel_id)
try:
await channel.send(message[:1].upper() + message[1:])
log.debug(f"{log_msg} | Announced in {guild_id}")
except discord.Forbidden:
log.warning(f"{log_msg} | Forbidden error | Channel {channel.id} | Server {guild.id}")
except discord.HTTPException:
log.exception(f"{log_msg}")
async def announce_level(self, char: NabChar, level: int):
"""Announces a level up on corresponding servers."""
log_msg = f"{self.tag}[{char.world}] announce_level: : {char.name} | {level}"
guilds = [s for s, w in self.bot.tracked_worlds.items() if w == char.world]
for guild_id in guilds:
guild: discord.Guild = self.bot.get_guild(guild_id)
if guild is None:
continue
min_level = await get_server_property(self.bot.pool, guild_id, "announce_level", config.announce_threshold)
if char.level < min_level:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Level under limit")
continue
if guild.get_member(char.owner_id) is None:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Owner not in server")
continue
channel_id = await get_server_property(self.bot.pool, guild.id, "levels_channel")
simple_messages = await get_server_property(self.bot.pool, guild_id, "simple_messages", False)
channel = self.bot.get_channel_or_top(guild, channel_id)
try:
# Select a message
if not simple_messages:
message = weighed_choice(level_messages, LevelCondition(char=char, level=level,
min_level=min_level))
else:
message = SIMPLE_LEVEL
# Format message with level information
message = message.format(**{'name': char.name, 'level': level, 'he_she': char.he_she.lower(),
'his_her': char.his_her.lower(), 'him_her': char.him_her.lower()})
# Format extra stylization
message = f"{config.levelup_emoji} {format_message(message)}"
await channel.send(message)
log.debug(f"{log_msg} | Announced in {guild_id}")
except discord.Forbidden:
log.warning(f"{log_msg} | Forbidden error | Channel {channel.id} | Server {guild.id}")
except discord.HTTPException:
log.exception(f"{log_msg}")
@staticmethod
async def cached_get_guild(guild_name: str, world: str) -> Optional[Guild]:
"""
Used to cache guild info, to avoid fetching the same guild multiple times if they are in multiple lists
"""
if guild_name in GUILD_CACHE[world]:
return GUILD_CACHE[world][guild_name]
guild = await get_guild(guild_name)
GUILD_CACHE[world][guild_name] = guild
return guild
@classmethod
async def check_char_availability(cls, ctx: NabCtx, user_id: int, char: NabChar, worlds: List[str],
check_other=False):
"""Checks the availability of a character and other visible characters optionally.
:param ctx: The command context where this is called.
:param user_id: The id of the user against which the characters will be checked for.
:param char: The character to be checked.
:param worlds: The worlds to filter characters from.
:param check_other: Whether other characters in the same account should be processed to or not.
:return: A named tuple containing the different categories of characters found.
"""
skipped = [] # type: List[OtherCharacter]
"""Characters that were skipped due to being in another world or scheduled for deletion."""
no_user = [] # type: List[DbChar]
"""Characters that belong to users no longer visible to NabBot, most of the time abandoned temporal users."""
same_owner = [] # type: List[DbChar]
"""Characters that already belong to the user."""
different_user = [] # type: List[DbChar]
"""Characters belonging to a different user."""
unregistered = [] # type: List[NabChar]
"""Characters that have never been registered."""
if check_other and not char.hidden:
chars: List[Union[OtherCharacter, NabChar]] = char.other_characters
_char = next((x for x in chars if x.name == char.name))
chars[chars.index(_char)] = char
else:
chars = [char]
for char in chars:
if char.world not in worlds or char.deleted:
skipped.append(char)
continue
db_char = await DbChar.get_by_name(ctx.pool, char.name)
if db_char:
owner = ctx.bot.get_user(db_char.user_id)
if owner is None:
no_user.append(db_char)
continue
elif db_char.user_id == user_id:
same_owner.append(db_char)
continue
different_user.append(db_char)
continue
if isinstance(char, OtherCharacter):
char = await get_character(ctx.bot, char.name)
unregistered.append(char)
return CharactersResult._make((skipped, no_user, same_owner, different_user, unregistered,
len(skipped) == len(chars)))
async def compare_deaths(self, char: NabChar):
"""Checks if the player has new deaths.
New deaths are announced if they are not older than 30 minutes."""
if char is None:
return
async with self.bot.pool.acquire() as conn:
db_char = await DbChar.get_by_name(conn, char.name)
if db_char is None:
return
pending_deaths = []
for death in char.deaths:
# Check if we have a death that matches the time
exists = await DbDeath.exists(conn, db_char.id, death.level, death.time)
if exists:
# We already have this death, we're assuming we already have older deaths
break
pending_deaths.append(death)
# Announce and save deaths from older to new
for death in reversed(pending_deaths):
db_death = DbDeath.from_tibiapy(death)
db_death.character_id = db_char.id
await db_death.save(conn)
log_msg = f"{self.tag}[{char.world}] Death detected: {char.name} | {death.level} |" \
f" {death.killer.name}"
if (dt.datetime.now(dt.timezone.utc)- death.time) >= dt.timedelta(minutes=30):
log.info(f"{log_msg} | Too old to announce.")
# Only try to announce if character has an owner
elif char.owner_id:
log.info(log_msg)
await self.announce_death(char, death, max(death.level - char.level, 0))
async def compare_levels(self, char: Union[NabChar, OnlineCharacter], update_only=False):
"""Compares the character's level with the stored level in database.
This should only be used on online characters or characters that just became offline."""
if char is None:
return
async with self.bot.pool.acquire() as conn:
db_char = await DbChar.get_by_name(conn, char.name)
if not db_char:
return
# OnlineCharacter has no sex attribute, so we get it from database and convert to NabChar
if isinstance(char, OnlineCharacter):
char = NabChar.from_online(char, db_char.sex, db_char.user_id)
level_before = db_char.level
if level_before != char.level:
await db_char.update_level(conn, char.level)
log.debug(f"{self.tag}[{char.world}][compare_level] {char.name}'s level updated:"
f" {level_before} -> {char.level}")
if not (char.level > level_before > 0) or update_only:
return
# Saving level up date in database
await DbLevelUp.insert(conn, db_char.id, char.level)
# Announce the level up
log.info(f"{self.tag}[{char.world}] Level up detected: {char.name} | {char.level}")
# Only try to announce level if char has an owner.
if char.owner_id:
await self.announce_level(char, char.level)
else:
log.debug(f"{self.tag}[{char.world}] Character has no owner, skipping")
@classmethod
async def process_character_assignment(cls, ctx: NabCtx, results: CharactersResult, user: discord.User,
author: discord.User = None, claim=False):
"""Processes the results of a character check and applies the changes
:param ctx: The command context
:param results: The character results
:param user: The user that will get the characters assigned.
:param author: The user that did the action, None if it was the same user.
:param claim: Whether the operation is a claim.
:return: A summary of the applied actions.
"""
recipient = f"**@{user.display_name}**" if author else "you"
author_log = f"| By {author}" if author else ""
reply = ""
if results.different_user and not claim:
first = results.different_user[0].name
reply = f"{ctx.tick(False)} Sorry, a character in that account ({first}) is already registered to " \
f"someone else.\n" \
f"If the character really belongs to {recipient}, `{ctx.clean_prefix}claim {first}` should be used."
return reply
if results.same_owner:
existent_names = [e.name for e in results.same_owner]
reply += f"\n⚫ The following characters were already registered to {recipient}: {join_list(existent_names)}"
if results.new:
added_names = [a.name for a in results.new]
reply += f"\n🔵 The following characters were added to {recipient}: {join_list(added_names)}"
if results.no_user:
updated_names = [r.name for r in results.no_user]
reply += f"\n⚪ The following characters were reassigned to {recipient}: {join_list(updated_names)}"
if results.different_user:
reclaimed_chars = [c.name for c in results.different_user]
reply += f"\n🔴 The following characters were reclaimed by you: {join_list(reclaimed_chars)}"
async with ctx.pool.acquire() as conn:
for char in results.different_user:
await char.update_user(conn, user.id)
log.info(f"{cls.get_tag()} Character Claimed | {char.name} | {user} ({user.id}){author_log}")
for char in results.no_user:
await char.update_user(conn, user.id)
log.info(f"{cls.get_tag()} Character Reassigned | {char.name} | {user} ({user.id}){author_log}")
for char in results.new:
db_char = await DbChar.insert(conn, char.name, char.level, char.vocation.value, user.id, char.world,
char.guild_name)
char.id = db_char.id
log.info(f"{cls.get_tag()} Character Registered | {char.name} | {user} ({user.id}){author_log}")
# If we are claiming, different user characters are also passed
if claim:
results.no_user.extend(results.different_user)
ctx.bot.dispatch("characters_registered", user, results.new, results.no_user, author)
ctx.bot.dispatch("character_change", user.id)
return reply
async def save_highscores(self, world: str, key: str, highscores: tibiapy.Highscores) -> int:
"""Saves the highscores of a world and category to the database."""
if highscores is None:
return 0
rows = [(e.rank, key, world, e.name, e.vocation.value, e.value) for e in highscores.entries]
async with self.bot.pool.acquire() as conn: # type: asyncpg.Connection
async with conn.transaction():
# Delete old records
await conn.execute("DELETE FROM highscores_entry WHERE category = $1 AND world = $2", key, world)
# Add current entries
await conn.copy_records_to_table("highscores_entry", records=rows,
columns=["rank", "category", "world", "name", "vocation", "value"])
log.debug(f"{self.tag}[{world}][save_highscores] {key} | {len(rows)} entries saved")
# Update scan times
await conn.execute("""INSERT INTO highscores(world, category, last_scan)
VALUES($1, $2, $3)
ON CONFLICT (world,category)
DO UPDATE SET last_scan = EXCLUDED.last_scan""",
world, key, dt.datetime.now(dt.timezone.utc))
return len(rows)
# endregion
def cog_unload(self):
log.info(f"{self.tag} Unloading cog")
self.scan_highscores_task.cancel()
self.scan_online_chars_task.cancel()
for k, v in self.world_tasks.items():
v.cancel()
def setup(bot):
bot.add_cog(Tracking(bot))
| apache-2.0 | 2,001,326,355,620,940,500 | 47.838201 | 120 | 0.590449 | false |
Fiona/AreWeAlone | __main__.py | 1 | 4157 | ##########
# LD 22
# The theme is alone
# it's a dumb theme
# fiona wrote this
##########
# System and Python lib imports
import sys
sys.path += ['.']
# Game engine imports
from myrmidon.myrmidon import MyrmidonGame, MyrmidonProcess
from myrmidon.consts import *
from pygame.locals import *
# Game imports
from consts import *
from media import Media
from gui import GUI
from galaxy import Galaxy
from game_galaxy import Galaxy_background, Solar_system_star, Player_ship, Galaxy_player_ship
class Game(MyrmidonProcess):
# Current state
game_state = 0
# Player state
money = 2000000000
fuel = 0
crew = 0
current_system = "Sol"
current_object = "Earth"
fuel_cost = 1000000000
crew_cost = 500000000
actions_done = {}
home_planet_result = []
first_time = True
# Self explanitory object pointers and lists
fps_text = None
gui = None
media = None
solar_system_objects = []
player_ship = None
background = None
galaxy = None
def execute(self):
# Pre launch set-up
MyrmidonGame.current_fps = 60
self.priority = PRIORITY_MAIN_GAME
# Load all media
self.media = Media()
self.media.load_fonts()
self.media.load_graphics()
self.media.load_audio()
# Debug display
if DEBUG_SHOW_FPS:
self.fps_text = MyrmidonGame.write_text(0.0, 0.0, font = self.media.fonts['basic'], text = 0)
self.fps_text.colour = (1, 1, 1, 1)
self.fps_text.z = -2000
# Set up starting game objects
self.galaxy = Galaxy(self)
self.gui = GUI(self)
self.switch_game_state_to(GAME_STATE_SOLAR_SYSTEM)
self.media.audio['ambient'].play(loops = -1)
while True:
# update debug display
if DEBUG_SHOW_FPS:
self.fps_text.text = "fps: " + str(MyrmidonGame.fps)
yield
def quit_game(self):
sys.exit()
def switch_game_state_to(self, state, gui_state = None):
"""
Pass in a state and this will switch to it.
It will also clean up everying necessary to go out of the
previous game state.
"""
# Undo and destroy everything in the current state
self.gui.destroy_current_gui_state()
col = (1.0, 1.0, 1.0)
if self.game_state == GAME_STATE_SOLAR_SYSTEM:
for x in self.solar_system_objects:
x.signal(S_KILL)
self.solar_system_objects = []
self.player_ship.signal(S_KILL)
self.background.signal(S_KILL)
elif self.game_state == GAME_STATE_GALAXY:
self.player_ship.signal(S_KILL)
self.background.signal(S_KILL)
# Switch to new state
self.game_state = state
# Create everything we require
if state == GAME_STATE_GALAXY:
self.background = Galaxy_background(self)
self.gui.fade_toggle()
self.gui.switch_gui_state_to(GUI_STATE_GALAXY if gui_state is None else gui_state)
self.player_ship = Galaxy_player_ship(self)
elif state == GAME_STATE_SOLAR_SYSTEM:
self.background = Galaxy_background(self)
self.solar_system_objects = []
self.solar_system_objects.append(Solar_system_star(self, self.galaxy.solar_systems[self.current_system]))
self.gui.fade_toggle()
self.gui.switch_gui_state_to(GUI_STATE_SOLAR_SYSTEM if gui_state is None else gui_state)
self.player_ship = Player_ship(self)
def do_home_planet_results(self):
if len(self.home_planet_result) > 0:
result = self.home_planet_result.pop()
result[0](self, *result[1])
if __name__ == '__main__':
MyrmidonGame.screen_resolution = (1024, 768)
MyrmidonGame.lowest_resolution = (1024, 768)
MyrmidonGame.full_screen = False
Game()
| mit | 1,547,291,332,813,934,600 | 27.06993 | 117 | 0.570604 | false |
BatedUrGonnaDie/salty_bot | modules/helpers/yt_video_link.py | 1 | 1246 | #! /usr/bin/env python3.7
import re
import isodate
import modules.extensions.regexes as regexes
import modules.commands.helpers.time_formatter as time_formatter
ON_ACTION = "PRIVMSG"
def call(salty_inst, c_msg, balancer, **kwargs):
video_ids = re.findall(regexes.YOUTUBE_URL, c_msg["message"])
if not video_ids:
return False, "No video ids"
seen_ids = set()
seen_add = seen_ids.add
video_ids = [x for x in video_ids if not (x in seen_ids or seen_add(x))]
parts = ["snippet", "statistics", "contentDetails"]
final_list = []
success, response = salty_inst.youtube_api.get_videos(video_ids, parts, **kwargs)
if not success:
return False, \
"Error retrieving info from youtube API ({0})".format(response.status_code)
if len(response["items"]) == 0:
return False, "No valid ID's found."
for i in response["items"]:
final_list.append("[{0}] {1} uploaded by {2}. Views: {3}".format(
time_formatter.format_time(isodate.parse_duration(i["contentDetails"]["duration"]).seconds),
i["snippet"]["title"],
i["snippet"]["channelTitle"],
i["statistics"]["viewCount"]
))
return True, " | ".join(final_list)
| mit | -6,777,776,183,059,641,000 | 32.675676 | 104 | 0.623596 | false |
google/tf_mesh_renderer | mesh_renderer/rasterize_triangles_test.py | 1 | 7681 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
import test_utils
import camera_utils
import rasterize_triangles
class RenderTest(tf.test.TestCase):
def setUp(self):
self.test_data_directory = 'mesh_renderer/test_data/'
tf.reset_default_graph()
self.cube_vertex_positions = tf.constant(
[[-1, -1, 1], [-1, -1, -1], [-1, 1, -1], [-1, 1, 1], [1, -1, 1],
[1, -1, -1], [1, 1, -1], [1, 1, 1]],
dtype=tf.float32)
self.cube_triangles = tf.constant(
[[0, 1, 2], [2, 3, 0], [3, 2, 6], [6, 7, 3], [7, 6, 5], [5, 4, 7],
[4, 5, 1], [1, 0, 4], [5, 6, 2], [2, 1, 5], [7, 4, 0], [0, 3, 7]],
dtype=tf.int32)
self.tf_float = lambda x: tf.constant(x, dtype=tf.float32)
self.image_width = 640
self.image_height = 480
self.perspective = camera_utils.perspective(
self.image_width / self.image_height,
self.tf_float([40.0]), self.tf_float([0.01]),
self.tf_float([10.0]))
def runTriangleTest(self, w_vector, target_image_name):
"""Directly renders a rasterized triangle's barycentric coordinates.
Tests only the kernel (rasterize_triangles_module).
Args:
w_vector: 3 element vector of w components to scale triangle vertices.
target_image_name: image file name to compare result against.
"""
clip_init = np.array(
[[-0.5, -0.5, 0.8, 1.0], [0.0, 0.5, 0.3, 1.0], [0.5, -0.5, 0.3, 1.0]],
dtype=np.float32)
clip_init = clip_init * np.reshape(
np.array(w_vector, dtype=np.float32), [3, 1])
clip_coordinates = tf.constant(clip_init)
triangles = tf.constant([[0, 1, 2]], dtype=tf.int32)
rendered_coordinates, _, _ = (
rasterize_triangles.rasterize_triangles_module.rasterize_triangles(
clip_coordinates, triangles, self.image_width, self.image_height))
rendered_coordinates = tf.concat(
[rendered_coordinates,
tf.ones([self.image_height, self.image_width, 1])], axis=2)
with self.test_session() as sess:
image = rendered_coordinates.eval()
baseline_image_path = os.path.join(self.test_data_directory,
target_image_name)
test_utils.expect_image_file_and_render_are_near(
self, sess, baseline_image_path, image)
def testRendersSimpleTriangle(self):
self.runTriangleTest((1.0, 1.0, 1.0), 'Simple_Triangle.png')
def testRendersPerspectiveCorrectTriangle(self):
self.runTriangleTest((0.2, 0.5, 2.0), 'Perspective_Corrected_Triangle.png')
def testRendersTwoCubesInBatch(self):
"""Renders a simple cube in two viewpoints to test the python wrapper."""
vertex_rgb = (self.cube_vertex_positions * 0.5 + 0.5)
vertex_rgba = tf.concat([vertex_rgb, tf.ones([8, 1])], axis=1)
center = self.tf_float([[0.0, 0.0, 0.0]])
world_up = self.tf_float([[0.0, 1.0, 0.0]])
look_at_1 = camera_utils.look_at(self.tf_float([[2.0, 3.0, 6.0]]),
center, world_up)
look_at_2 = camera_utils.look_at(self.tf_float([[-3.0, 1.0, 6.0]]),
center, world_up)
projection_1 = tf.matmul(self.perspective, look_at_1)
projection_2 = tf.matmul(self.perspective, look_at_2)
projection = tf.concat([projection_1, projection_2], axis=0)
background_value = [0.0, 0.0, 0.0, 0.0]
rendered = rasterize_triangles.rasterize(
tf.stack([self.cube_vertex_positions, self.cube_vertex_positions]),
tf.stack([vertex_rgba, vertex_rgba]), self.cube_triangles, projection,
self.image_width, self.image_height, background_value)
with self.test_session() as sess:
images = sess.run(rendered, feed_dict={})
for i in (0, 1):
image = images[i, :, :, :]
baseline_image_name = 'Unlit_Cube_{}.png'.format(i)
baseline_image_path = os.path.join(self.test_data_directory,
baseline_image_name)
test_utils.expect_image_file_and_render_are_near(
self, sess, baseline_image_path, image)
def testSimpleTriangleGradientComputation(self):
"""Verifies the Jacobian matrix for a single pixel.
The pixel is in the center of a triangle facing the camera. This makes it
easy to check which entries of the Jacobian might not make sense without
worrying about corner cases.
"""
test_pixel_x = 325
test_pixel_y = 245
clip_coordinates = tf.placeholder(tf.float32, shape=[3, 4])
triangles = tf.constant([[0, 1, 2]], dtype=tf.int32)
barycentric_coordinates, _, _ = (
rasterize_triangles.rasterize_triangles_module.rasterize_triangles(
clip_coordinates, triangles, self.image_width, self.image_height))
pixels_to_compare = barycentric_coordinates[
test_pixel_y:test_pixel_y + 1, test_pixel_x:test_pixel_x + 1, :]
with self.test_session():
ndc_init = np.array(
[[-0.5, -0.5, 0.8, 1.0], [0.0, 0.5, 0.3, 1.0], [0.5, -0.5, 0.3, 1.0]],
dtype=np.float32)
theoretical, numerical = tf.test.compute_gradient(
clip_coordinates, (3, 4),
pixels_to_compare, (1, 1, 3),
x_init_value=ndc_init,
delta=4e-2)
jacobians_match, message = (
test_utils.check_jacobians_are_nearly_equal(
theoretical, numerical, 0.01, 0.0, True))
self.assertTrue(jacobians_match, message)
def testInternalRenderGradientComputation(self):
"""Isolates and verifies the Jacobian matrix for the custom kernel."""
image_height = 21
image_width = 28
clip_coordinates = tf.placeholder(tf.float32, shape=[8, 4])
barycentric_coordinates, _, _ = (
rasterize_triangles.rasterize_triangles_module.rasterize_triangles(
clip_coordinates, self.cube_triangles, image_width, image_height))
with self.test_session():
# Precomputed transformation of the simple cube to normalized device
# coordinates, in order to isolate the rasterization gradient.
# pyformat: disable
ndc_init = np.array(
[[-0.43889722, -0.53184521, 0.85293502, 1.0],
[-0.37635487, 0.22206162, 0.90555805, 1.0],
[-0.22849123, 0.76811147, 0.80993629, 1.0],
[-0.2805393, -0.14092168, 0.71602166, 1.0],
[0.18631913, -0.62634289, 0.88603103, 1.0],
[0.16183566, 0.08129397, 0.93020856, 1.0],
[0.44147962, 0.53497446, 0.85076219, 1.0],
[0.53008741, -0.31276882, 0.77620775, 1.0]],
dtype=np.float32)
# pyformat: enable
theoretical, numerical = tf.test.compute_gradient(
clip_coordinates, (8, 4),
barycentric_coordinates, (image_height, image_width, 3),
x_init_value=ndc_init,
delta=4e-2)
jacobians_match, message = (
test_utils.check_jacobians_are_nearly_equal(
theoretical, numerical, 0.01, 0.01))
self.assertTrue(jacobians_match, message)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -5,375,903,968,414,628,000 | 38.188776 | 80 | 0.622836 | false |
crask/redisproxy | test/memcache/memcache.py | 1 | 15420 | # Copyright 2012 Mixpanel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
a minimal, pure python client for memcached, kestrel, etc.
Usage example::
import memcache
mc = memcache.Client("127.0.0.1", 11211, timeout=1, connect_timeout=5)
mc.set("some_key", "Some value")
value = mc.get("some_key")
mc.delete("another_key")
'''
import errno
import re
import socket
class ClientException(Exception):
'''
Raised when the server does something we don't expect
| This does not include `socket errors <http://docs.python.org/library/socket.html#socket.error>`_
| Note that ``ValidationException`` subclasses this so, technically, this is raised on any error
'''
def __init__(self, msg, item=None):
if item is not None:
msg = '%s: %r' % (msg, item) # use repr() to better see special chars
super(ClientException, self).__init__(msg)
class ValidationException(ClientException):
'''
Raised when an invalid parameter is passed to a ``Client`` function
'''
def __init__(self, msg, item):
super(ValidationException, self).__init__(msg, item)
class Client(object):
def __init__(self, host, port, timeout=None, connect_timeout=None):
'''
If ``connect_timeout`` is None, ``timeout`` will be used instead
(for connect and everything else)
'''
self._addr = (host, port)
self._timeout = timeout
self._connect_timeout = connect_timeout
self._socket = None
def __del__(self):
self.close()
def _get_addr(self):
return self._addr
address = property(_get_addr)
''' A read-only (str, int) tuple representing the host operations are performed on '''
def _get_timeout(self):
return self._timeout
def _set_timeout(self, timeout):
# presumably this should fail rarely
# set locally before on socket
# b/c if socket fails, it will probably be closed/reopened
# and will want to use last intended value
self._timeout = timeout
if self._socket:
self._socket.settimeout(timeout)
timeout = property(_get_timeout, _set_timeout)
'''
A float representing the timeout in seconds for reads and sends on the underlying socket
(``connect_timeout`` cannot be changed once init)
Setting a timeout can raise a ``TypeError`` (non-float) or a ``ValueError`` (negative)
'''
def _connect(self):
# buffer needed since we always ask for 4096 bytes at a time
# thus, might read more than the current expected response
# cleared on every reconnect since old bytes are part of old session and can't be reused
self._buffer = ''
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect_timeout = self._connect_timeout if self._connect_timeout is not None else self._timeout
self._socket.settimeout(connect_timeout) # passing None means blocking
try:
self._socket.connect(self._addr)
self._socket.settimeout(self._timeout)
except (socket.error, socket.timeout):
self._socket = None # don't want to hang on to bad socket
raise
def _read(self, length=None):
'''
Return the next length bytes from server
Or, when length is None,
Read a response delimited by \r\n and return it (including \r\n)
(Use latter only when \r\n is unambiguous -- aka for control responses, not data)
'''
result = None
while result is None:
if length: # length = 0 is ambiguous, so don't use
if len(self._buffer) >= length:
result = self._buffer[:length]
self._buffer = self._buffer[length:]
else:
delim_index = self._buffer.find('\r\n')
if delim_index != -1:
result = self._buffer[:delim_index+2]
self._buffer = self._buffer[delim_index+2:]
if result is None:
try:
tmp = self._socket.recv(4096)
except (socket.error, socket.timeout) as e:
self.close()
raise e
if not tmp:
# we handle common close/retry cases in _send_command
# however, this can happen if server suddenly goes away
# (e.g. restarting memcache under sufficient load)
raise socket.error, 'unexpected socket close on recv'
else:
self._buffer += tmp
return result
def _send_command(self, command):
'''
Send command to server and return initial response line
Will reopen socket if it got closed (either locally or by server)
'''
if self._socket: # try to find out if the socket is still open
try:
self._socket.settimeout(0)
self._socket.recv(0)
# if recv didn't raise, then the socket was closed or there is junk
# in the read buffer, either way, close
self.close()
except socket.error as e:
if e.errno == errno.EAGAIN: # this is expected if the socket is still open
self._socket.settimeout(self._timeout)
else:
self.close()
if not self._socket:
self._connect()
self._socket.sendall(command)
return self._read()
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile('^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, str): # avoid bugs subtle and otherwise
raise ValidationException('key must be str', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
def close(self):
'''
Closes the socket if its open
| Sockets are automatically closed when the ``Client`` object is garbage collected
| Sockets are opened the first time a command is run (such as ``get`` or ``set``)
| Raises socket errors
'''
if self._socket:
self._socket.close()
self._socket = None
def delete(self, key):
'''
Deletes a key/value pair from the server
Raises ``ClientException`` and socket errors
'''
# req - delete <key> [noreply]\r\n
# resp - DELETED\r\n
# or
# NOT_FOUND\r\n
key = self._validate_key(key)
command = 'delete %s\r\n' % key
resp = self._send_command(command)
if resp != 'DELETED\r\n' and resp != 'NOT_FOUND\r\n':
raise ClientException('delete failed', resp)
def get(self, key):
'''
Gets a single value from the server; returns None if there is no value
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
return self.multi_get([key])[0]
def multi_get(self, keys):
'''
Takes a list of keys and returns a list of values
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
if len(keys) == 0:
return []
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
keys = [self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
command = 'get %s\r\n' % ' '.join(keys)
received = {}
resp = self._send_command(command)
error = None
while resp != 'END\r\n':
terms = resp.split()
if len(terms) == 4 and terms[0] == 'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
error = ClientException('received non zero flags')
val = self._read(length+2)[:-2]
if key in received:
error = ClientException('duplicate results from server')
received[key] = val
else:
raise ClientException('get failed', resp)
resp = self._read()
if error is not None:
# this can happen if a memcached instance contains items set by a previous client
# leads to subtle bugs, so fail fast
raise error
if len(received) > len(keys):
raise ClientException('received too many responses')
# memcache client is used by other servers besides memcached.
# In the case of kestrel, responses coming back to not necessarily
# match the requests going out. Thus we just ignore the key name
# if there is only one key and return what we received.
if len(keys) == 1 and len(received) == 1:
response = received.values()
else:
response = [received.get(key) for key in keys]
return response
def getex(self, key):
'''
Gets a single value from the server; returns None if there is no value
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
return self.multi_getex([key])[0]
def multi_getex(self, keys):
'''
Takes a list of keys and returns a list of values
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
if len(keys) == 0:
return []
# req - getex <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> <cas unique> <expire time>\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
keys = [self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
command = 'getex %s\r\n' % ' '.join(keys)
received = {}
resp = self._send_command(command)
error = None
while resp != 'END\r\n':
terms = resp.split()
if len(terms) == 6 and terms[0] == 'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
error = ClientException('received non zero flags')
val = self._read(length+2)[:-2]
if key in received:
error = ClientException('duplicate results from server')
received[key] = val
else:
raise ClientException('get failed', resp)
resp = self._read()
if error is not None:
# this can happen if a memcached instance contains items set by a previous client
# leads to subtle bugs, so fail fast
raise error
if len(received) > len(keys):
raise ClientException('received too many responses')
# memcache client is used by other servers besides memcached.
# In the case of kestrel, responses coming back to not necessarily
# match the requests going out. Thus we just ignore the key name
# if there is only one key and return what we received.
if len(keys) == 1 and len(received) == 1:
response = received.values()
else:
response = [received.get(key) for key in keys]
return response
def set(self, key, val, exptime=0):
'''
Sets a key to a value on the server with an optional exptime (0 means don't auto-expire)
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
key = self._validate_key(key)
# the problem with supporting types is it oftens leads to uneven and confused usage
# some code sites use the type support, others do manual casting to/from str
# worse yet, some sites don't even know what value they are putting in and mis-cast on get
# by uniformly requiring str, the end-use code is much more uniform and legible
if not isinstance(val, str):
raise ValidationException('value must be str', val)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
command = 'set %s 0 %d %d\r\n%s\r\n' % (key, exptime, len(val), val)
resp = self._send_command(command)
if resp != 'STORED\r\n':
raise ClientException('set failed', resp)
def stats(self, additional_args=None):
'''
Runs a stats command on the server.
``additional_args`` are passed verbatim to the server.
See `the memcached wiki <http://code.google.com/p/memcached/wiki/NewCommands#Statistics>`_ for details
or `the spec <https://github.com/memcached/memcached/blob/master/doc/protocol.txt>`_ for even more details
Raises ``ClientException`` and socket errors
'''
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if additional_args is not None:
command = 'stats %s\r\n' % additional_args
else:
command = 'stats\r\n'
resp = self._send_command(command)
result = {}
while resp != 'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == 'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == 'STAT':
result[terms[1]] = terms[2]
else:
raise ClientException('stats failed', resp)
resp = self._read()
return result
| apache-2.0 | -2,465,647,685,932,409,000 | 37.074074 | 114 | 0.569455 | false |
sterlingbaldwin/acme_workbench | workbench-backend/index/tests.py | 1 | 3781 | import json
from django.test import TestCase
from django.test import Client
class IndexViewTests(TestCase):
fixtures = ['seed.json']
"""
Tests for the Index app views
"""
def test_get_index(self):
"""
Test that the index page returns success
"""
client = Client()
response = client.get('/')
self.assertEqual(response.status_code, 200)
def test_get_workbench_no_login(self):
"""
Test that the workbench redirects when not logged in
"""
client = Client()
response = client.get('/workbench')
self.assertEqual(response.status_code, 302)
def test_get_workbench_with_login(self):
"""
Test that the workbench renders when logged in
"""
client = Client()
self.assertTrue(
client.login(
username='test_user',
password='qwertyuiop'))
res = client.get('/workbench')
self.assertEqual(res.status_code, 200)
def test_valid_user_registration(self):
"""
test ability to register new users
"""
client = Client()
res = client.get('/register')
self.assertEqual(res.status_code, 200)
post_data = {
'username': 'test_user1',
'password1': 'test_pass',
'password2': 'test_pass',
'firstname': 'test',
'lastname': 'test',
'email': '[email protected]'
}
res = client.post('/register', post_data)
self.assertEqual(res.status_code, 200)
def test_invalid_user_registration(self):
"""
test ability to register new users
"""
client = Client()
res = client.get('/register')
self.assertEqual(res.status_code, 200)
post_data = {
'username': 'test_user1',
'password1': 'test_pass',
'password2': 'THIS IS NOT VALID',
'firstname': 'test',
'lastname': 'test',
'email': '[email protected]'
}
res = client.post('/register', post_data)
self.assertNotEqual(res.status_code, 200)
def test_valid_user_login(self):
"""
test users ability to login with valid credentials
"""
client = Client()
post_data = {
'password': 'qwertyuiop',
'username': 'test_user'
}
res = client.post('/login', post_data)
self.assertEqual(res.status_code, 200)
def test_invalid_user_login(self):
"""
Test rejection of invalid credentials
"""
client = Client()
post_data = {
'username': 'test_user',
'password': 'IM A LITTLE TEA POT'
}
res = client.post('/login', post_data)
self.assertEqual(res.status_code, 401)
def test_valid_user_logout(self):
"""
test users ability to logout
"""
client = Client()
post_data = {
'password': 'qwertyuiop',
'username': 'test_user'
}
res = client.post('/login', post_data)
self.assertEqual(res.status_code, 200)
res = client.get('/logout')
self.assertEqual(res.status_code, 200)
self.assertFalse(res.context['request'].user.is_authenticated())
def test_get_user_list(self):
"""
test of the get user list view
"""
client = Client()
url = '/get_user_list/'
expected_result = ['test_user', 'baldwin32']
res = client.get(url)
self.assertEqual(res.status_code, 200)
data = json.loads(res.content)
for user in data:
self.assertTrue(user in expected_result)
| bsd-2-clause | 3,293,972,023,252,596,000 | 28.310078 | 72 | 0.530283 | false |
lixiangning888/whole_project | lib/cuckoo/common/demux.py | 1 | 7192 | # Copyright (C) 2015 Accuvant, Inc. ([email protected])
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import tempfile
from zipfile import ZipFile
try:
from rarfile import RarFile
HAS_RARFILE = True
except ImportError:
HAS_RARFILE = False
from lib.cuckoo.common.config import Config
from lib.cuckoo.common.objects import File
from lib.cuckoo.common.email_utils import find_attachments_in_email
from lib.cuckoo.common.office.msgextract import Message
def demux_zip(filename, options):
retlist = []
try:
# don't try to extract from office docs
magic = File(filename).get_type()
if "Microsoft" in magic or "Java Jar" in magic:
return retlist
extracted = []
password="infected"
fields = options.split(",")
for field in fields:
try:
key, value = field.split("=", 1)
if key == "password":
password = value
break
except:
pass
with ZipFile(filename, "r") as archive:
infolist = archive.infolist()
for info in infolist:
# avoid obvious bombs
if info.file_size > 100 * 1024 * 1024 or not info.file_size:
continue
# ignore directories
if info.filename.endswith("/"):
continue
base, ext = os.path.splitext(info.filename)
basename = os.path.basename(info.filename)
ext = ext.lower()
if ext == "" and len(basename) and basename[0] == ".":
continue
extensions = ["", ".exe", ".dll", ".pdf", ".doc", ".ppt", ".pptx", ".docx", ".xls", ".msi", ".bin", ".scr"]
for theext in extensions:
if ext == theext:
extracted.append(info.filename)
break
options = Config()
tmp_path = options.cuckoo.get("tmppath", "/tmp")
target_path = os.path.join(tmp_path, "cuckoo-zip-tmp")
if not os.path.exists(target_path):
os.mkdir(target_path)
tmp_dir = tempfile.mkdtemp(prefix='cuckoozip_',dir=target_path)
for extfile in extracted:
try:
retlist.append(archive.extract(extfile, path=tmp_dir, pwd=password))
except:
retlist.append(archive.extract(extfile, path=tmp_dir))
except:
pass
return retlist
def demux_rar(filename, options):
retlist = []
if not HAS_RARFILE:
return retlist
try:
# don't try to auto-extract RAR SFXes
magic = File(filename).get_type()
if "PE32" in magic or "MS-DOS executable" in magic:
return retlist
extracted = []
password="infected"
fields = options.split(",")
for field in fields:
try:
key, value = field.split("=", 1)
if key == "password":
password = value
break
except:
pass
with RarFile(filename, "r") as archive:
infolist = archive.infolist()
for info in infolist:
# avoid obvious bombs
if info.file_size > 100 * 1024 * 1024 or not info.file_size:
continue
# ignore directories
if info.filename.endswith("\\"):
continue
# add some more sanity checking since RarFile invokes an external handler
if "..\\" in info.filename:
continue
base, ext = os.path.splitext(info.filename)
basename = os.path.basename(info.filename)
ext = ext.lower()
if ext == "" and len(basename) and basename[0] == ".":
continue
extensions = ["", ".exe", ".dll", ".pdf", ".doc", ".ppt", ".pptx", ".docx", ".xls", ".msi", ".bin", ".scr"]
for theext in extensions:
if ext == theext:
extracted.append(info.filename)
break
options = Config()
tmp_path = options.cuckoo.get("tmppath", "/tmp")
target_path = os.path.join(tmp_path, "cuckoo-rar-tmp")
if not os.path.exists(target_path):
os.mkdir(target_path)
tmp_dir = tempfile.mkdtemp(prefix='cuckoorar_',dir=target_path)
for extfile in extracted:
# RarFile differs from ZipFile in that extract() doesn't return the path of the extracted file
# so we have to make it up ourselves
try:
archive.extract(extfile, path=tmp_dir, pwd=password)
retlist.append(os.path.join(tmp_dir, extfile.replace("\\", "/")))
except:
archive.extract(extfile, path=tmp_dir)
retlist.append(os.path.join(tmp_dir, extfile.replace("\\", "/")))
except:
pass
return retlist
def demux_email(filename, options):
retlist = []
try:
with open(filename, "rb") as openfile:
buf = openfile.read()
atts = find_attachments_in_email(buf, True)
if atts and len(atts):
for att in atts:
retlist.append(att[0])
except:
pass
return retlist
def demux_msg(filename, options):
retlist = []
try:
retlist = Message(filename).get_extracted_attachments()
except:
pass
return retlist
def demux_sample(filename, package, options):
"""
If file is a ZIP, extract its included files and return their file paths
If file is an email, extracts its attachments and return their file paths (later we'll also extract URLs)
"""
# if a package was specified, then don't do anything special
# this will allow for the ZIP package to be used to analyze binaries with included DLL dependencies
if package:
return [ filename ]
retlist = demux_zip(filename, options)
if not retlist:
retlist = demux_rar(filename, options)
if not retlist:
retlist = demux_email(filename, options)
if not retlist:
retlist = demux_msg(filename, options)
# handle ZIPs/RARs inside extracted files
if retlist:
newretlist = []
for item in retlist:
zipext = demux_zip(item, options)
if zipext:
newretlist.extend(zipext)
else:
rarext = demux_rar(item, options)
if rarext:
newretlist.extend(rarext)
else:
newretlist.append(item)
retlist = newretlist
# if it wasn't a ZIP or an email or we weren't able to obtain anything interesting from either, then just submit the
# original file
if not retlist:
retlist.append(filename)
return retlist
| lgpl-3.0 | -6,092,322,796,231,694,000 | 33.411483 | 123 | 0.535595 | false |
jeromecc/doctoctocbot | src/customer/forms.py | 1 | 3367 | from django import forms
from django.utils.translation import ugettext_lazy as _
from django_countries.fields import CountryField
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from customer.models import Customer
from bootstrap_modal_forms.forms import BSModalForm, BSModalModelForm
class CustomerReadOnlyForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(CustomerReadOnlyForm, self).__init__(*args, **kwargs)
try:
customer = Customer.objects.get(user=self.user)
except Customer.DoesNotExist:
return
self.helper = FormHelper()
self.helper.form_id = 'customer-form'
self.helper.form_class = 'form-horizontal'
self.helper.form_method = 'post'
self.helper.form_action = '/customer/'
self.helper.form_group_wrapper_class = 'row'
self.helper.label_class = 'offset-md-1 col-md-1'
self.helper.field_class = 'col-md-8'
self.fields['country'].label = _('Country')
self.helper.add_input(Submit('submit', 'Submit', css_class='btn-primary'))
self.fields['id'].initial=customer.id
self.fields['first_name'].initial=customer.first_name
self.fields['last_name'].initial=customer.last_name
self.fields['company'].initial=customer.company
self.fields['address_1'].initial=customer.address_1
self.fields['address_2'].initial=customer.address_2
self.fields['country'].initial=customer.country
self.fields['email'].initial=customer.email
self.fields['city'].initial=customer.city
#self.fields['state'].initial=customer.state
self.fields['zip_code'].initial=customer.zip_code
id = forms.CharField(
disabled=True,
widget=forms.HiddenInput(),
)
first_name = forms.CharField(
label=_('First name'),
max_length=128,
disabled=True,
)
last_name = forms.CharField(
label=_('Last name'),
max_length=128,
disabled=True,
)
company = forms.CharField(
label=_('Company'),
max_length=128,
disabled=True,
)
address_1 = forms.CharField(
label=_('Address'),
max_length=128,
disabled=True,
)
address_2 = forms.CharField(
label=_('Address'),
max_length=128,
required=False,
disabled=True,
)
country = CountryField(
blank_label=_('(select country)')
).formfield(disabled=True,)
phone = forms.CharField(
label=_('Telephone'),
max_length=32,
required=False,
disabled=True,
)
email = forms.CharField(
label=_('Email'),
max_length=254,
disabled=True,
)
city = forms.CharField(
label=_('City'),
max_length=128,
disabled=True,
)
"""
state = forms.CharField(
label=_('State'),
max_length=128,
required=False,
disabled=True,
)
"""
zip_code = forms.CharField(
label=_('ZIP code'),
max_length=32,
disabled=True,
)
class CustomerModelForm(BSModalModelForm):
class Meta:
model = Customer
exclude = [
'silver_id',
'user',
'state',
]
| mpl-2.0 | 2,314,816,419,920,023,000 | 28.535088 | 82 | 0.591922 | false |
rain87/pc-health | create_graph.py | 1 | 6855 | #!/usr/bin/python
# coding=utf8
import rrd_config as C
import os
import subprocess
from collections import namedtuple
import gzip
import sys
import itertools
from smart_attributes import names as smart_names
DataSource = namedtuple('DataSource', 'db_fname field legend is_area color stack')
DataSource.__new__.__defaults__ = (False, None, False)
Graph = namedtuple('Graph', 'fname title vlabel ds')
graph_colors = [ '#396AB1', '#DA7C30', '#3E9651', '#CC2529', '#535154', '#6B4C9A', '#922428', '#948B3D', '#00adb5', '#f08a5d' ]
def hdd_ds(field):
return [ DataSource('hdd_' + d + '.rrd', field, d, False) for d in C.drives ]
def traffic_ds(units, direction):
color = itertools.cycle(graph_colors[:3])
field = '_{units}_{direction}'.format(units=units, direction=direction)
return [
DataSource(db_fname='traffic_{dev}.rrd'.format(dev=dev), field=proto + field,
legend='{}-{}'.format(dev, proto.upper()), is_area=True, color=color.next())
for dev, proto in itertools.product(C.network_devices[:-1], ['tcp', 'udp', 'all'])
] + [
DataSource('traffic_eth0.rrd', 'tcp' + field, '', False, ''),
DataSource('traffic_eth0.rrd', 'udp' + field, '', False, '', True),
DataSource('traffic_eth0.rrd', 'all' + field, 'eth0', False, '#000000', True)
]
def connections_ds(direction):
color = itertools.cycle(graph_colors[:2])
return [
DataSource(db_fname='traffic_{dev}.rrd'.format(dev=dev),
field='{proto}_new_{direction}'.format(proto=proto, direction=direction),
legend='{}-{}'.format(dev, proto),
is_area=True, color=color.next())
for dev, proto in itertools.product(C.network_devices, ['tcp', 'udp'])
]
def smart_graph(attr, field, label=None):
sattr = str(attr).zfill(3)
return Graph('smart_' + sattr, '{} ({}-{})'.format(smart_names[attr], sattr, field), label,
[ DataSource('smart_' + hdd + '.rrd', 'a{}_{}'.format(sattr, field), hdd, False) for hdd in C.drives ])
graphs = [
Graph('hdd_rrqm_s', 'Read requests merged per second that were queued to the device', 'rrqm/s', hdd_ds('rrqm_s')),
Graph('hdd_wrqm_s', 'Write requests merged per second that were queued to the device', 'wrqm/s ', hdd_ds('wrqm_s')),
Graph('hdd_r_s', 'Read requests that were issued to the device per second', 'r/s', hdd_ds('r_s')),
Graph('hdd_w_s', 'Write requests that were issued to the device per second', 'w/s', hdd_ds('w_s')),
Graph('hdd_rkB_s', 'Kilobytes read from the device per second', 'rkB/s ', hdd_ds('rkB_s')),
Graph('hdd_wkB_s', 'Kilobytes written to the device per second', 'wkB/s ', hdd_ds('wkB_s')),
Graph('hdd_avgrq_sz', 'Avg size of the requests that were issued to the device', 'sectors', hdd_ds('avgrq_sz')),
Graph('hdd_avgqu_sz', 'Avg queue length of the requests that were issued to the device', 'requests', hdd_ds('avgqu_sz')),
Graph('hdd_await', 'Avg time for I/O requests issued to the device to be served', 'milliseconds', hdd_ds('await')),
Graph('hdd_r_await', 'Avg time for READ requests issued to the device to be served', 'milliseconds', hdd_ds('r_await')),
Graph('hdd_w_await', 'Avg time for WRITE requests issued to the device to be served', 'milliseconds', hdd_ds('w_await')),
Graph('hdd_svctm', '(OBSOLETE) Avg service time for I/O requests that were issued to the device', 'milliseconds', hdd_ds('svctm')),
Graph('hdd_util', 'Percentage of CPU time during which I/O requests were issued to the device', '%', hdd_ds('util')),
Graph('cpu_load', 'CPU loads', '%', [ DataSource('cpu.rrd', field, field, True) for field in C.CpuStat._fields if field != 'idle']),
Graph('cpu_la', 'CPU load averages', None, [ DataSource('cpu_la.rrd', field, field, False) for field in C.CpuLa._fields]),
Graph('traffic_in_bytes', 'Incoming bytes', 'bytes/s', traffic_ds('bytes', 'in')),
Graph('traffic_out_bytes', 'Outgoing bytes', 'bytes/s', traffic_ds('bytes', 'out')),
Graph('traffic_in_pckts', 'Incoming packets', 'packets/s', traffic_ds('pckts', 'in')),
Graph('traffic_out_pckts', 'Outgoing packets', 'packets/s', traffic_ds('pckts', 'out')),
Graph('incoming_connections', 'Incoming connections', 'count', connections_ds('in')),
Graph('outgoing_connections', 'Outgoing connections', 'count', connections_ds('out')),
Graph('sockets', 'Sockets', 'sockets',
[ DataSource('sockets.rrd', field, field, True) for field in 'estab closed orphaned synrecv tw tw2'.split(' ') ] +\
[ DataSource('sockets.rrd', field, field, False) for field in 'total tcp ports'.split(' ') ]),
Graph('ups_v', 'Voltages', 'volts', [ DataSource('ups.rrd', 'LINEV', 'AC line', False), DataSource('ups.rrd', 'BATTV', 'UPS battery', False)]),
Graph('ups_load', 'Load and charge', '%', [ DataSource('ups.rrd', 'LOADPCT', 'UPS load', False), DataSource('ups.rrd', 'BCHARGE', 'Battery charge', False) ]),
Graph('ups_misc', 'Misc UPS stats', None, [ DataSource('ups.rrd', 'TIMELEFT', 'Time on battery left', False),
DataSource('ups.rrd', 'NUMXFERS', 'Number of transfers', False), DataSource('ups.rrd', 'TONBATT', 'Time on battery', False),
DataSource('ups.rrd', 'CUMONBATT', 'CUMONBATT', False) ]),
smart_graph(194, 'raw', '°C'),
smart_graph(1, 'cur'),
smart_graph(3, 'raw', 'msec'),
smart_graph(4, 'raw'),
smart_graph(7, 'cur'),
smart_graph(9, 'raw'),
smart_graph(11, 'raw'),
smart_graph(12, 'raw'),
smart_graph(195, 'cur'),
]
graph_intervals = {
'hourly': 'now-1h',
'optimal': 'now-400m',
'daily': 'now-1d',
'weekly': 'now-1w',
'monthly': 'now-30d',
'yearly': 'now-1y'
}
def plot(graph, interval):
assert interval in graph_intervals
cmd = ['rrdtool', 'graph', '-' , '--start', graph_intervals[interval], '--title', graph.title, '--imgformat', 'SVG',
'--lower-limit', '0' ]
if graph.vlabel:
cmd += ['--vertical-label', graph.vlabel]
ds_list = graph.ds if isinstance(graph.ds, list) else [graph.ds]
color = itertools.cycle(graph_colors)
for i in range(0, len(ds_list)):
ds = ds_list[i]
cmd.append('DEF:v{i}={db}:{field}:AVERAGE'.format(i=i, db=os.path.join(C.rrd_path, ds.db_fname), field=ds.field))
cmd.append('{type}:v{i}{color}:{legend}{stack}'.format(
type='AREA' if ds.is_area else 'LINE1', i=i, color=color.next() if ds.color is None else ds.color,
legend=ds.legend, stack=':STACK' if ds.is_area or ds.stack else ''))
#print(' '.join(cmd))
rrd = subprocess.Popen(cmd, stdout=subprocess.PIPE)
gz = gzip.open(os.path.join(C.graph_path, graph.fname + '_' + interval + '.svgz'), 'wb')
while rrd.poll() is None:
gz.write(rrd.stdout.read())
gz.close()
assert rrd.poll() == 0
for graph in graphs:
plot(graph, sys.argv[1])
| mit | -2,728,556,869,338,496,500 | 54.723577 | 162 | 0.626204 | false |
dendory/chartjs | sample3.py | 1 | 5887 | # This script parses the CSV files gathered from the Canadian Weather Service and makes charts
import chartjs
import csv
# We will cover these years
startyear = 1981
endyear = 2012
# We will make charts for 3 major Canadian cities
cities = [
{'name': "Montreal", 'fillColor': "rgba(100,50,200,0.25)", 'strokeColor': "rgba(100,50,200,0.75)", 'pointColor': "rgba(100,50,200,0.75)"},
{'name': "Toronto", 'fillColor': "rgba(200,100,100,0.25)", 'strokeColor': "rgba(200,100,100,0.75)", 'pointColor': "rgba(200,100,100,0.75)"},
{'name': "Vancouver", 'fillColor': "rgba(100,200,100,0.25)", 'strokeColor': "rgba(100,200,100,0.75)", 'pointColor': "rgba(100,200,100,0.75)"},
]
# 3 of the charts will cover all 12 months
months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
# The first chart will show median temperatures over the years
global_chart = chartjs.chart("Temperature medians for 1981 - 2012 in Celsius<br><font color='#6432C8'>Montreal</font>, <font color='#B1846B'>Toronto</font>, <font color='#6CCB6C'>Vancouver</font>", "Line", 1200, 600)
global_chart.set_params(JSinline = False)
# Each city will have a chart showing each month's median temperature
montreal_chart = chartjs.chart("Montreal temperatures for 2012 in Celsius", "Line", 390, 200)
montreal_chart.canvas = "montreal"
montreal_chart.set_labels(months)
toronto_chart = chartjs.chart("Toronto temperatures for 2012 in Celsius", "Line", 390, 200)
toronto_chart.canvas = "toronto"
toronto_chart.set_labels(months)
vancouver_chart = chartjs.chart("Vancouver temperatures for 2012 in Celsius", "Line", 390, 200)
vancouver_chart.canvas = "vancouver"
vancouver_chart.set_labels(months)
_startyear = startyear
# Loop one city at a time
for city in cities:
city_data = []
years = []
medians = []
# Loop one year at a time
while startyear < endyear+1:
# Open CSV file for the city and year
f = open("data/" + city['name'] + "/" + str(startyear) + ".csv", 'r', newline='')
next(f)
csvreader = csv.reader(f, delimiter=',')
totalvalues = 0
values = 0
monthly_values = 0
monthly_totalvalues = 0
current_month = '01'
# Parse the CSV line by line
for line in csvreader:
try:
# For each line, we add the value and the number of values
values += float(line[9])
totalvalues += 1
except:
pass
try:
# For year 2012, we also record monthly medians for the city charts
if startyear == 2012:
# If the month column changed, that means we must compute the median for last month
if str(line[2]) != str(current_month):
# All the added values, divided by the number of values
median = "{0:.2f}".format(float(monthly_values / monthly_totalvalues))
# Append the median to the current city's list
city_data.append(median)
# Set the current month to the new value
current_month = str(line[2])
# Reset variables to 0
monthly_values = 0
monthly_totalvalues = 0
# For each line in this month, add the value and add the number of values
monthly_values += float(line[9])
monthly_totalvalues += 1
except:
pass
# For the last month, we need to calculate the median one last time
if monthly_totalvalues > 0:
median = "{0:.2f}".format(float(monthly_values / monthly_totalvalues))
city_data.append(median)
# After reading all the lines in the file, calculate the median for the year
if totalvalues > 0:
median = "{0:.2f}".format(float(values / totalvalues))
medians.append(median)
else:
medians.append(0)
# Append the current year to the labels
years.append(startyear)
# Create all of the city charts
if startyear == 2012:
if city['name'] == "Montreal":
montreal_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
montreal_chart.add_dataset(city_data)
if city['name'] == "Toronto":
toronto_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
toronto_chart.add_dataset(city_data)
if city['name'] == "Vancouver":
vancouver_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
vancouver_chart.add_dataset(city_data)
startyear += 1
# Create the global chart
global_chart.set_labels(years)
global_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
global_chart.add_dataset(medians)
startyear = _startyear
f.close()
# Create the HTML page and the 4 charts individually
f = open("sample3.html", 'w')
output = """<!doctype html>
<html>
<head>
<title>Temperature charts</title>
{1}
</head>
<body>
<div style="width: {2}px; height: {3}px; max-width: 99%" class="chartjs">
<center><h2>{0}</h2></center>
""".format(global_chart.title, global_chart.js, str(global_chart.width), str(global_chart.height))
output += global_chart.make_chart_canvas()
output += " <table width='99%'><tr><td><center><h4>" + montreal_chart.title + "</h4></center>"
output += montreal_chart.make_chart_canvas()
output += " </td><td><center><h4>" + toronto_chart.title + "</h4></center>"
output += toronto_chart.make_chart_canvas()
output += " </td><td><center><h4>" + vancouver_chart.title + "</h4></center>"
output += vancouver_chart.make_chart_canvas()
output += """ </td></tr></table>
<script>
window.onload = function()
{"""
output += global_chart.make_chart_onload()
output += montreal_chart.make_chart_onload()
output += toronto_chart.make_chart_onload()
output += vancouver_chart.make_chart_onload()
output += """ }
</script>
</div>
</body>
</html>
"""
f.write(output)
f.close()
| mit | 831,402,048,602,100,100 | 39.167832 | 216 | 0.660608 | false |
MSHallOpenSoft/plotter | GUI_final.py | 1 | 59129 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'GUI_final.ui'
#
# Created: Thu Mar 19 22:03:17 2015
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1396, 727)
MainWindow.setStyleSheet(_fromUtf8("QFrame{\n"
"border:none;\n"
"}\n"
"QStatusBar{ \n"
"background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.33, stop:0 rgba(255, 255, 255, 255), stop:0.125 rgba(155, 174, 198, 255), stop:0.318182 rgba(104, 117, 133, 255), stop:0.534091 rgba(65, 73, 83, 255), stop:0.875 rgba(42, 47, 54, 255)); }\n"
" QMainWindow{\n"
" background-image: url(:/img/Icons/rsz_back1.jpg); border:none;\n"
" background-color:qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255));\n"
" text-align: center; }\n"
" QGroupBox{ \n"
"background-color: qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255)); }\n"
" QTabWidget{\n"
" background-color: qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255)); }\n"
" QDockWidget{\n"
" background-color:#737373;\n"
" border:none;\n"
" padding:0px; \n"
"}\n"
" QSlider::groove:horizontal {\n"
" background:red;\n"
" height: 15px;\n"
" position: absolute; \n"
"left: 4px; \n"
"right: 4px; }\n"
" QSlider::handle:horizontal {\n"
" height:20px;\n"
" width: 10px; \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.477, x2:0, y2:0, stop:0.125 rgba(42, 47, 54, 255), stop:0.465909 rgba(65, 73, 83, 255), stop:0.681818 rgba(104, 117, 133, 255), stop:0.875 rgba(155, 174, 198, 255), stop:1 rgba(255, 255, 255, 255));\n"
" margin: -4px; }\n"
" QSlider::handle:hover:horizontal { \n"
"height:20px;\n"
" width: 10px;\n"
" background:qlineargradient(spread:pad, x1:0, y1:0.477, x2:0, y2:0, stop:0.125 rgba(91, 95, 100, 255), stop:0.465909 rgba(122, 132, 146, 255), stop:0.681818 rgba(141, 153, 167, 255), stop:0.875 rgba(181, 195, 212, 255), stop:1 rgba(255, 255, 255, 255));\n"
" margin: -4px;\n"
" }\n"
" QSlider::add-page:horizontal { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" }\n"
" QSlider::sub-page:horizontal { \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255)) ;\n"
" }\n"
" QToolButton{ \n"
"position: relative;\n"
" border: none; \n"
"outline:none;\n"
" color: black;\n"
" padding: 0px;\n"
" border-radius: 2px;\n"
" font-size: 22px;\n"
" }\n"
" QToolButton:hover:!pressed{ \n"
"position: relative;\n"
" border: none; \n"
"outline:none; \n"
"background-color:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" color: white;\n"
" padding: 0px;\n"
" border-radius: 2px;\n"
" font-size: 22px; \n"
"}\n"
" QPushButton{ \n"
"position: relative;\n"
" border:none;\n"
" outline:none; \n"
"background-color: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" color: white;\n"
" padding: 6px 20px; \n"
"border-radius: 2px;\n"
" font-size: 20px;\n"
" }\n"
" QPushButton:hover:!pressed{ \n"
"position: relative;\n"
" border: none; \n"
"outline:none;\n"
" background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" color: white; \n"
"padding: 6px 20px; \n"
"border-radius: 2px;\n"
" font-size:20px; \n"
"} \n"
"QComboBox { \n"
"border: none; \n"
"padding: 1px 18px 1px 3px; \n"
"min-width: 6em;\n"
" }\n"
" QComboBox, QComboBox:drop-down \n"
"{\n"
" background:qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" }\n"
" QComboBox:on, QComboBox:drop-down:on { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255)); \n"
"}\n"
" QComboBox:on {\n"
" padding-top: 3px;\n"
" padding-left: 4px; \n"
"} \n"
"QComboBox::drop-down{\n"
" subcontrol-origin: padding; \n"
"subcontrol-position: top right;\n"
" width: 15px; \n"
"border-left-width: 1px; \n"
"border-left-color: darkgray; \n"
"border-left-style: solid;\n"
" }\n"
" QComboBox::down-arrow { \n"
"image:url(:/arrow/Icons/arrow-new.png);\n"
" } \n"
"QComboBox::down-arrow:on {\n"
" top: 1px;\n"
" left: 1px;\n"
" }\n"
" QMenu {\n"
" background-color: qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255)); \n"
"border: none; \n"
"}\n"
" QMenu::item {\n"
" background-color: transparent;\n"
" }\n"
" QMenu::item:selected {\n"
" background-color:rgb(120, 255, 13);\n"
" }\n"
" QMenuBar { \n"
"background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:1, stop:0 #DBDBDB, stop:1 rgba(255, 255, 255, 255)) } QMenuBar::item {\n"
" spacing: 3px;\n"
" padding: 1px 4px; \n"
"background: transparent; \n"
"border-radius: 2px;\n"
" }\n"
" QMenuBar::item:selected {\n"
" background:#737373;\n"
" }\n"
" QMenuBar::item:pressed \n"
"{ background: #414953; \n"
"} \n"
"QTableWidget{ \n"
"background:qlineargradient(spread:pad, x1:1, y1:1, x2:0, y2:0, stop:0 #DBDBDB, stop:1 rgba(255, 255, 255, 255));\n"
" border:1px solid rgb(171, 173, 179);\n"
" }\n"
" QTextEdit{ \n"
"background:qlineargradient(spread:pad, x1:1, y1:1, x2:0, y2:0, stop:0 #DBDBDB, stop:1 rgba(255, 255, 255, 255)); \n"
"}\n"
" QScrollBar:horizontal {\n"
" border: none; background: #DBDBDB; height: 15px; margin: 0px 20px 0 20px; \n"
"}\n"
" QScrollBar::handle:horizontal { background:qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" min-width: 20px;\n"
" }\n"
" QScrollBar::handle:horizontal:hover { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" min-width: 20px;\n"
" } \n"
"QScrollBar::add-line:horizontal {\n"
" border: none;\n"
" background:#DBDBDB; \n"
"width: 20px;\n"
" subcontrol-position: right;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::sub-line:horizontal {\n"
" border:none; \n"
"background:#DBDBDB; \n"
"width: 20px;\n"
" subcontrol-position: left;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::add-line:horizontal:hover:!pressed { \n"
"border: none;\n"
" background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255)); \n"
"width: 20px;\n"
" subcontrol-position: right; \n"
"subcontrol-origin: margin; \n"
"}\n"
" QScrollBar::sub-line:horizontal:hover:!pressed { \n"
"border:none;\n"
" background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" width: 20px; \n"
"subcontrol-position: left;\n"
" subcontrol-origin: margin; \n"
"}\n"
" QScrollBar::left-arrow:horizontal{\n"
" image: url(:/arrow/Icons/left-arrow.png);\n"
" }\n"
" QScrollBar::right-arrow:horizontal{\n"
" image: url(:/arrow/Icons/right-arrow.png);\n"
" }\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: #DBDBDB;\n"
" width: 15px; \n"
"margin: 0px 20px 0 20px; \n"
"} \n"
"QScrollBar::handle:vertical { background:qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" min-height: 20px; }\n"
" QScrollBar::handle:vertical:hover { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" min-height: 15px;\n"
" }\n"
" QScrollBar::add-line:vertical {\n"
" border: none;\n"
" background:#DBDBDB; \n"
"height: 20px;\n"
" subcontrol-position: bottom; \n"
"subcontrol-origin: margin; \n"
"}\n"
" QScrollBar::sub-line:vertical {\n"
" border:none; \n"
"background:#DBDBDB; \n"
"height: 20px;\n"
" subcontrol-position: top;\n"
" subcontrol-origin: margin;\n"
" } \n"
"QScrollBar::add-line:vertical:hover:!pressed { \n"
"border: none; \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" height: 20px;\n"
" subcontrol-position:bottom; \n"
"subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::sub-line:vertical:hover:!pressed { b\n"
"order:none; \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" height: 20px; \n"
"subcontrol-position:top;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::up-arrow:vertical{ \n"
"image: url(:/arrow/Icons/up-arrow.png); \n"
"} \n"
"QScrollBar::down-arrow:vertical{\n"
" image: url(:/arrow/Icons/down-arrow.png);\n"
" }"))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.frame_2 = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_2.sizePolicy().hasHeightForWidth())
self.frame_2.setSizePolicy(sizePolicy)
self.frame_2.setMinimumSize(QtCore.QSize(20, 0))
self.frame_2.setStyleSheet(_fromUtf8(""))
self.frame_2.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtGui.QFrame.Raised)
self.frame_2.setObjectName(_fromUtf8("frame_2"))
self.horizontalLayout_4 = QtGui.QHBoxLayout(self.frame_2)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.pushButton = QtGui.QPushButton(self.frame_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton.sizePolicy().hasHeightForWidth())
self.pushButton.setSizePolicy(sizePolicy)
self.pushButton.setMaximumSize(QtCore.QSize(20, 50))
self.pushButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/arrow/Icons/double-right.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton.setIcon(icon)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.verticalLayout_5.addWidget(self.pushButton)
self.horizontalLayout_4.addLayout(self.verticalLayout_5)
self.horizontalLayout_3.addWidget(self.frame_2)
self.frame = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setMaximumSize(QtCore.QSize(320, 16777215))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.frame)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setSpacing(6)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.pushButton_3 = QtGui.QPushButton(self.frame)
self.pushButton_3.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_3.sizePolicy().hasHeightForWidth())
self.pushButton_3.setSizePolicy(sizePolicy)
self.pushButton_3.setMinimumSize(QtCore.QSize(50, 0))
self.pushButton_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_3.setStyleSheet(_fromUtf8(""))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.horizontalLayout_5.addWidget(self.pushButton_3)
self.toolButton_7 = QtGui.QToolButton(self.frame)
self.toolButton_7.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_7.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_7.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_7.setStyleSheet(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Add-New-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_7.setIcon(icon1)
self.toolButton_7.setIconSize(QtCore.QSize(40, 30))
self.toolButton_7.setObjectName(_fromUtf8("toolButton_7"))
self.horizontalLayout_5.addWidget(self.toolButton_7)
self.toolButton_9 = QtGui.QToolButton(self.frame)
self.toolButton_9.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_9.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_9.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_9.setStyleSheet(_fromUtf8(""))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Minus-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_9.setIcon(icon2)
self.toolButton_9.setIconSize(QtCore.QSize(40, 30))
self.toolButton_9.setObjectName(_fromUtf8("toolButton_9"))
self.horizontalLayout_5.addWidget(self.toolButton_9)
self.toolButton_8 = QtGui.QToolButton(self.frame)
self.toolButton_8.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_8.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_8.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_8.setStyleSheet(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Folder-Open-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_8.setIcon(icon3)
self.toolButton_8.setIconSize(QtCore.QSize(40, 30))
self.toolButton_8.setObjectName(_fromUtf8("toolButton_8"))
self.horizontalLayout_5.addWidget(self.toolButton_8)
self.toolButton_5 = QtGui.QToolButton(self.frame)
self.toolButton_5.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_5.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_5.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_5.setStyleSheet(_fromUtf8(""))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Save-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_5.setIcon(icon4)
self.toolButton_5.setIconSize(QtCore.QSize(40, 30))
self.toolButton_5.setObjectName(_fromUtf8("toolButton_5"))
self.horizontalLayout_5.addWidget(self.toolButton_5)
spacerItem = QtGui.QSpacerItem(20, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem)
self.verticalLayout_3.addLayout(self.horizontalLayout_5)
self.tableWidget = QtGui.QTableWidget(self.frame)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget.sizePolicy().hasHeightForWidth())
self.tableWidget.setSizePolicy(sizePolicy)
self.tableWidget.setMinimumSize(QtCore.QSize(300, 0))
self.tableWidget.setStyleSheet(_fromUtf8(""))
self.tableWidget.setObjectName(_fromUtf8("tableWidget"))
self.tableWidget.setColumnCount(3)
self.tableWidget.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
self.verticalLayout_3.addWidget(self.tableWidget)
self.pushButton_21 = QtGui.QPushButton(self.frame)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_21.sizePolicy().hasHeightForWidth())
self.pushButton_21.setSizePolicy(sizePolicy)
self.pushButton_21.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_21.setStyleSheet(_fromUtf8(""))
self.pushButton_21.setObjectName(_fromUtf8("pushButton_21"))
self.verticalLayout_3.addWidget(self.pushButton_21)
self.horizontalLayout_3.addWidget(self.frame)
self.verticalLayout_6 = QtGui.QVBoxLayout()
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.widget = QtGui.QWidget(self.centralwidget)
self.widget.setStyleSheet(_fromUtf8(""))
self.widget.setObjectName(_fromUtf8("widget"))
self.verticalLayout_6.addWidget(self.widget)
self.horizontalLayout_3.addLayout(self.verticalLayout_6)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1396, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuEdit = QtGui.QMenu(self.menubar)
self.menuEdit.setObjectName(_fromUtf8("menuEdit"))
self.menuView = QtGui.QMenu(self.menubar)
self.menuView.setObjectName(_fromUtf8("menuView"))
self.menuAbout = QtGui.QMenu(self.menubar)
self.menuAbout.setObjectName(_fromUtf8("menuAbout"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.dockWidget = QtGui.QDockWidget(MainWindow)
self.dockWidget.setMinimumSize(QtCore.QSize(320, 91))
self.dockWidget.setObjectName(_fromUtf8("dockWidget"))
self.dockWidgetContents = QtGui.QWidget()
self.dockWidgetContents.setObjectName(_fromUtf8("dockWidgetContents"))
self.gridLayout = QtGui.QGridLayout(self.dockWidgetContents)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.comboBox_5 = QtGui.QComboBox(self.dockWidgetContents)
self.comboBox_5.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_5.setObjectName(_fromUtf8("comboBox_5"))
self.comboBox_5.addItem(_fromUtf8(""))
self.comboBox_5.addItem(_fromUtf8(""))
self.comboBox_5.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBox_5, 0, 0, 1, 1)
self.textEdit = QtGui.QTextEdit(self.dockWidgetContents)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit.sizePolicy().hasHeightForWidth())
self.textEdit.setSizePolicy(sizePolicy)
self.textEdit.setMinimumSize(QtCore.QSize(0, 20))
self.textEdit.setObjectName(_fromUtf8("textEdit"))
self.gridLayout.addWidget(self.textEdit, 0, 1, 1, 1)
self.comboBox_6 = QtGui.QComboBox(self.dockWidgetContents)
self.comboBox_6.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_6.setObjectName(_fromUtf8("comboBox_6"))
self.comboBox_6.addItem(_fromUtf8(""))
self.comboBox_6.addItem(_fromUtf8(""))
self.comboBox_6.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBox_6, 1, 0, 1, 1)
self.textEdit_2 = QtGui.QTextEdit(self.dockWidgetContents)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit_2.sizePolicy().hasHeightForWidth())
self.textEdit_2.setSizePolicy(sizePolicy)
self.textEdit_2.setMinimumSize(QtCore.QSize(0, 20))
self.textEdit_2.setObjectName(_fromUtf8("textEdit_2"))
self.gridLayout.addWidget(self.textEdit_2, 1, 1, 1, 1)
self.dockWidget.setWidget(self.dockWidgetContents)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget)
self.dockWidget_2 = QtGui.QDockWidget(MainWindow)
self.dockWidget_2.setMinimumSize(QtCore.QSize(427, 324))
self.dockWidget_2.setStyleSheet(_fromUtf8(""))
self.dockWidget_2.setObjectName(_fromUtf8("dockWidget_2"))
self.dockWidgetContents_2 = QtGui.QWidget()
self.dockWidgetContents_2.setObjectName(_fromUtf8("dockWidgetContents_2"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.dockWidgetContents_2)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.groupBox_2 = QtGui.QGroupBox(self.dockWidgetContents_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 50))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.horizontalLayout_12 = QtGui.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.comboBox = QtGui.QComboBox(self.groupBox_2)
self.comboBox.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox.setStyleSheet(_fromUtf8(""))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.horizontalLayout_7.addWidget(self.comboBox)
self.comboBox_3 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_3.setObjectName(_fromUtf8("comboBox_3"))
self.comboBox_3.addItem(_fromUtf8(""))
self.comboBox_3.addItem(_fromUtf8(""))
self.comboBox_3.addItem(_fromUtf8(""))
self.horizontalLayout_7.addWidget(self.comboBox_3)
self.comboBox_2 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_2.setLayoutDirection(QtCore.Qt.LeftToRight)
self.comboBox_2.setAutoFillBackground(False)
self.comboBox_2.setFrame(True)
self.comboBox_2.setObjectName(_fromUtf8("comboBox_2"))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.addItem(_fromUtf8(""))
self.horizontalLayout_7.addWidget(self.comboBox_2)
self.horizontalLayout_12.addLayout(self.horizontalLayout_7)
self.verticalLayout_4.addWidget(self.groupBox_2)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.tabWidget_2 = QtGui.QTabWidget(self.dockWidgetContents_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabWidget_2.sizePolicy().hasHeightForWidth())
self.tabWidget_2.setSizePolicy(sizePolicy)
self.tabWidget_2.setMinimumSize(QtCore.QSize(310, 0))
self.tabWidget_2.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.tabWidget_2.setAutoFillBackground(False)
self.tabWidget_2.setStyleSheet(_fromUtf8(""))
self.tabWidget_2.setTabPosition(QtGui.QTabWidget.South)
self.tabWidget_2.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget_2.setIconSize(QtCore.QSize(16, 25))
self.tabWidget_2.setElideMode(QtCore.Qt.ElideNone)
self.tabWidget_2.setTabsClosable(False)
self.tabWidget_2.setMovable(True)
self.tabWidget_2.setObjectName(_fromUtf8("tabWidget_2"))
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.verticalLayout_8 = QtGui.QVBoxLayout(self.tab_3)
self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8"))
self.groupBox = QtGui.QGroupBox(self.tab_3)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_7 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.label = QtGui.QLabel(self.groupBox)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_8.amenuddWidget(self.label)
self.horizontalSlider = QtGui.QSlider(self.groupBox)
self.horizontalSlider.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.horizontalSlider.setStyleSheet(_fromUtf8(""))
self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider.setObjectName(_fromUtf8("horizontalSlider"))
self.horizontalLayout_8.addWidget(self.horizontalSlider)
self.verticalLayout_7.addLayout(self.horizontalLayout_8)
self.horizontalLayout_9 = QtGui.QHBoxLayout()
self.horizontalLayout_9.setSizeConstraint(QtGui.QLayout.SetNoConstraint)
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
self.label_2 = QtGui.QLabel(self.groupBox)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_9.addWidget(self.label_2)
self.label_3 = QtGui.QLabel(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
self.label_3.setMinimumSize(QtCore.QSize(20, 20))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_9.addWidget(self.label_3)
self.label_4 = QtGui.QLabel(self.groupBox)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout_9.addWidget(self.label_4)
self.radioButton = QtGui.QRadioButton(self.groupBox)
self.radioButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.radioButton.setObjectName(_fromUtf8("radioButton"))
self.horizontalLayout_9.addWidget(self.radioButton)
self.radioButton_3 = QtGui.QRadioButton(self.groupBox)
self.radioButton_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.radioButton_3.setObjectName(_fromUtf8("radioButton_3"))
self.horizontalLayout_9.addWidget(self.radioButton_3)
self.radioButton_2 = QtGui.QRadioButton(self.groupBox)
self.radioButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.radioButton_2.setObjectName(_fromUtf8("radioButton_2"))
self.horizontalLayout_9.addWidget(self.radioButton_2)
self.verticalLayout_7.addLayout(self.horizontalLayout_9)
self.horizontalLayout_10 = QtGui.QHBoxLayout()
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.label_5 = QtGui.QLabel(self.groupBox)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.horizontalLayout_10.addWidget(self.label_5)
self.comboBox_4 = QtGui.QComboBox(self.groupBox)
self.comboBox_4.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_4.setObjectName(_fromUtf8("comboBox_4"))
self.horizontalLayout_10.addWidget(self.comboBox_4)
self.label_6 = QtGui.QLabel(self.groupBox)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout_10.addWidget(self.label_6)
self.horizontalSlider_2 = QtGui.QSlider(self.groupBox)
self.horizontalSlider_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.horizontalSlider_2.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_2.setObjectName(_fromUtf8("horizontalSlider_2"))
self.horizontalLayout_10.addWidget(self.horizontalSlider_2)
self.verticalLayout_7.addLayout(self.horizontalLayout_10)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.label_7 = QtGui.QLabel(self.groupBox)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.horizontalLayout_11.addWidget(self.label_7)
self.horizontalSlider_3 = QtGui.QSlider(self.groupBox)
self.horizontalSlider_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.horizontalSlider_3.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_3.setObjectName(_fromUtf8("horizontalSlider_3"))
self.horizontalLayout_11.addWidget(self.horizontalSlider_3)
self.verticalLayout_7.addLayout(self.horizontalLayout_11)
self.verticalLayout_8.addWidget(self.groupBox)
self.tabWidget_2.addTab(self.tab_3, _fromUtf8(""))
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.horizontalLayout_13 = QtGui.QHBoxLayout(self.tab_4)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.tabWidget_3 = QtGui.QTabWidget(self.tab_4)
self.tabWidget_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.tabWidget_3.setStyleSheet(_fromUtf8(""))
self.tabWidget_3.setUsesScrollButtons(False)
self.tabWidget_3.setMovable(True)
self.tabWidget_3.setObjectName(_fromUtf8("tabWidget_3"))
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName(_fromUtf8("tab_5"))
self.tabWidget_3.addTab(self.tab_5, _fromUtf8(""))
self.tab_6 = QtGui.QWidget()
self.tab_6.setObjectName(_fromUtf8("tab_6"))
self.tabWidget_3.addTab(self.tab_6, _fromUtf8(""))
self.horizontalLayout_13.addWidget(self.tabWidget_3)
self.tabWidget_2.addTab(self.tab_4, _fromUtf8(""))
self.verticalLayout_2.addWidget(self.tabWidget_2)
self.verticalLayout_4.addLayout(self.verticalLayout_2)
self.dockWidget_2.setWidget(self.dockWidgetContents_2)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget_2)
self.dockWidget_3 = QtGui.QDockWidget(MainWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dockWidget_3.sizePolicy().hasHeightForWidth())
self.dockWidget_3.setSizePolicy(sizePolicy)
self.dockWidget_3.setMinimumSize(QtCore.QSize(489, 70))
self.dockWidget_3.setMaximumSize(QtCore.QSize(524287, 524287))
self.dockWidget_3.setObjectName(_fromUtf8("dockWidget_3"))
self.dockWidgetContents_3 = QtGui.QWidget()
self.dockWidgetContents_3.setObjectName(_fromUtf8("dockWidgetContents_3"))
self.horizontalLayout = QtGui.QHBoxLayout(self.dockWidgetContents_3)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.toolButton_17 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_17.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_17.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_17.setStyleSheet(_fromUtf8(""))
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Item-New-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_17.setIcon(icon5)
self.toolButton_17.setIconSize(QtCore.QSize(30, 30))
self.toolButton_17.setObjectName(_fromUtf8("toolButton_17"))
self.horizontalLayout.addWidget(self.toolButton_17)
self.toolButton_10 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_10.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_10.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_10.setStyleSheet(_fromUtf8(""))
self.toolButton_10.setIcon(icon3)
self.toolButton_10.setIconSize(QtCore.QSize(30, 30))
self.toolButton_10.setObjectName(_fromUtf8("toolButton_10"))
self.horizontalLayout.addWidget(self.toolButton_10)
self.toolButton_20 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_20.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_20.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_20.setStyleSheet(_fromUtf8(""))
self.toolButton_20.setIcon(icon4)
self.toolButton_20.setIconSize(QtCore.QSize(30, 30))
self.toolButton_20.setObjectName(_fromUtf8("toolButton_20"))
self.horizontalLayout.addWidget(self.toolButton_20)
self.toolButton_18 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_18.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_18.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_18.setStyleSheet(_fromUtf8(""))
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Open-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_18.setIcon(icon6)
self.toolButton_18.setIconSize(QtCore.QSize(30, 30))
self.toolButton_18.setObjectName(_fromUtf8("toolButton_18"))
self.horizontalLayout.addWidget(self.toolButton_18)
self.line_4 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_4.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_4.setFrameShape(QtGui.QFrame.VLine)
self.line_4.setFrameShadow(QtGui.QFrame.Sunken)
self.line_4.setObjectName(_fromUtf8("line_4"))
self.horizontalLayout.addWidget(self.line_4)
self.toolButton_4 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_4.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_4.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_4.setStyleSheet(_fromUtf8(""))
self.toolButton_4.setIcon(icon1)
self.toolButton_4.setIconSize(QtCore.QSize(30, 30))
self.toolButton_4.setObjectName(_fromUtf8("toolButton_4"))
self.horizontalLayout.addWidget(self.toolButton_4)
self.toolButton_3 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_3.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_3.setStyleSheet(_fromUtf8(""))
self.toolButton_3.setIcon(icon2)
self.toolButton_3.setIconSize(QtCore.QSize(30, 30))
self.toolButton_3.setObjectName(_fromUtf8("toolButton_3"))
self.horizontalLayout.addWidget(self.toolButton_3)
self.line_5 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_5.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_5.setFrameShape(QtGui.QFrame.VLine)
self.line_5.setFrameShadow(QtGui.QFrame.Sunken)
self.line_5.setObjectName(_fromUtf8("line_5"))
self.horizontalLayout.addWidget(self.line_5)
self.checkBox = QtGui.QCheckBox(self.dockWidgetContents_3)
self.checkBox.setMaximumSize(QtCore.QSize(20, 25))
self.checkBox.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.checkBox.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox.setText(_fromUtf8(""))
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.horizontalLayout.addWidget(self.checkBox)
self.Example = QtGui.QToolButton(self.dockWidgetContents_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Example.sizePolicy().hasHeightForWidth())
self.Example.setSizePolicy(sizePolicy)
self.Example.setMaximumSize(QtCore.QSize(16777215, 25))
self.Example.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.Example.setStyleSheet(_fromUtf8("QToolButton{\n"
"font-size: 15px;\n"
"color:rgb(255, 255, 255);\n"
"}"))
self.Example.setIconSize(QtCore.QSize(24, 24))
self.Example.setObjectName(_fromUtf8("Example"))
self.horizontalLayout.addWidget(self.Example)
self.line_6 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_6.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_6.setFrameShape(QtGui.QFrame.VLine)
self.line_6.setFrameShadow(QtGui.QFrame.Sunken)
self.line_6.setObjectName(_fromUtf8("line_6"))
self.horizontalLayout.addWidget(self.line_6)
self.toolButton = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton.setStyleSheet(_fromUtf8(""))
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Board-Pin-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton.setIcon(icon7)
self.toolButton.setIconSize(QtCore.QSize(30, 30))
self.toolButton.setObjectName(_fromUtf8("toolButton"))
self.horizontalLayout.addWidget(self.toolButton)
self.toolButton_25 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_25.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_25.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_25.setStyleSheet(_fromUtf8(""))
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Table-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_25.setIcon(icon8)
self.toolButton_25.setIconSize(QtCore.QSize(30, 30))
self.toolButton_25.setObjectName(_fromUtf8("toolButton_25"))
self.horizontalLayout.addWidget(self.toolButton_25)
self.line_8 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_8.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_8.setFrameShape(QtGui.QFrame.VLine)
self.line_8.setFrameShadow(QtGui.QFrame.Sunken)
self.line_8.setObjectName(_fromUtf8("line_8"))
self.horizontalLayout.addWidget(self.line_8)
self.dockWidget_3.setWidget(self.dockWidgetContents_3)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(4), self.dockWidget_3)
self.dockWidget_4 = QtGui.QDockWidget(MainWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dockWidget_4.sizePolicy().hasHeightForWidth())
self.dockWidget_4.setSizePolicy(sizePolicy)
self.dockWidget_4.setMinimumSize(QtCore.QSize(624, 70))
self.dockWidget_4.setMaximumSize(QtCore.QSize(524287, 70))
self.dockWidget_4.setObjectName(_fromUtf8("dockWidget_4"))
self.dockWidgetContents_4 = QtGui.QWidget()
self.dockWidgetContents_4.setObjectName(_fromUtf8("dockWidgetContents_4"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.dockWidgetContents_4)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.line_7 = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line_7.sizePolicy().hasHeightForWidth())
self.line_7.setSizePolicy(sizePolicy)
self.line_7.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_7.setLineWidth(1)
self.line_7.setMidLineWidth(1)
self.line_7.setFrameShape(QtGui.QFrame.VLine)
self.line_7.setFrameShadow(QtGui.QFrame.Sunken)
self.line_7.setObjectName(_fromUtf8("line_7"))
self.horizontalLayout_2.addWidget(self.line_7)
self.toolButton_19 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_19.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_19.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_19.setStyleSheet(_fromUtf8(""))
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Keyboard-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_19.setIcon(icon9)
self.toolButton_19.setIconSize(QtCore.QSize(35, 35))
self.toolButton_19.setObjectName(_fromUtf8("toolButton_19"))
self.horizontalLayout_2.addWidget(self.toolButton_19)
self.toolButton_23 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_23.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_23.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_23.setStyleSheet(_fromUtf8(""))
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Printer-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_23.setIcon(icon10)
self.toolButton_23.setIconSize(QtCore.QSize(35, 35))
self.toolButton_23.setObjectName(_fromUtf8("toolButton_23"))
self.horizontalLayout_2.addWidget(self.toolButton_23)
self.toolButton_2 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_2.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_2.setIcon(icon4)
self.toolButton_2.setIconSize(QtCore.QSize(35, 35))
self.toolButton_2.setObjectName(_fromUtf8("toolButton_2"))
self.horizontalLayout_2.addWidget(self.toolButton_2)
self.toolButton_24 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_24.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_24.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_24.setStyleSheet(_fromUtf8(""))
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Camera-02-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_24.setIcon(icon11)
self.toolButton_24.setIconSize(QtCore.QSize(35, 35))
self.toolButton_24.setObjectName(_fromUtf8("toolButton_24"))
self.horizontalLayout_2.addWidget(self.toolButton_24)
self.toolButton_22 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_22.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_22.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_22.setStyleSheet(_fromUtf8(""))
icon12 = QtGui.QIcon()
icon12.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Facebook-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_22.setIcon(icon12)
self.toolButton_22.setIconSize(QtCore.QSize(35, 35))
self.toolButton_22.setObjectName(_fromUtf8("toolButton_22"))
self.horizontalLayout_2.addWidget(self.toolButton_22)
self.line_3 = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line_3.sizePolicy().hasHeightForWidth())
self.line_3.setSizePolicy(sizePolicy)
self.line_3.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_3.setFrameShape(QtGui.QFrame.VLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName(_fromUtf8("line_3"))
self.horizontalLayout_2.addWidget(self.line_3)
self.toolButton_21 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_21.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_21.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_21.setStyleSheet(_fromUtf8(""))
icon13 = QtGui.QIcon()
icon13.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Media-Play-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_21.setIcon(icon13)
self.toolButton_21.setIconSize(QtCore.QSize(35, 35))
self.toolButton_21.setObjectName(_fromUtf8("toolButton_21"))
self.horizontalLayout_2.addWidget(self.toolButton_21)
self.toolButton_16 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_16.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_16.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_16.setStyleSheet(_fromUtf8(""))
icon14 = QtGui.QIcon()
icon14.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Stop-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_16.setIcon(icon14)
self.toolButton_16.setIconSize(QtCore.QSize(35, 35))
self.toolButton_16.setObjectName(_fromUtf8("toolButton_16"))
self.horizontalLayout_2.addWidget(self.toolButton_16)
self.line_2 = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line_2.sizePolicy().hasHeightForWidth())
self.line_2.setSizePolicy(sizePolicy)
self.line_2.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_2.setFrameShape(QtGui.QFrame.VLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.horizontalLayout_2.addWidget(self.line_2)
self.toolButton_15 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_15.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_15.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_15.setStyleSheet(_fromUtf8(""))
icon15 = QtGui.QIcon()
icon15.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Column-Selection-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_15.setIcon(icon15)
self.toolButton_15.setIconSize(QtCore.QSize(35, 35))
self.toolButton_15.setObjectName(_fromUtf8("toolButton_15"))
self.horizontalLayout_2.addWidget(self.toolButton_15)
self.toolButton_14 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_14.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_14.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_14.setStyleSheet(_fromUtf8(""))
icon16 = QtGui.QIcon()
icon16.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Slash-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_14.setIcon(icon16)
self.toolButton_14.setIconSize(QtCore.QSize(35, 35))
self.toolButton_14.setObjectName(_fromUtf8("toolButton_14"))
self.horizontalLayout_2.addWidget(self.toolButton_14)
self.line = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line.sizePolicy().hasHeightForWidth())
self.line.setSizePolicy(sizePolicy)
self.line.setMaximumSize(QtCore.QSize(16777215, 20))
self.line.setFrameShape(QtGui.QFrame.VLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.horizontalLayout_2.addWidget(self.line)
self.toolButton_13 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_13.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_13.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_13.setStyleSheet(_fromUtf8(""))
icon17 = QtGui.QIcon()
icon17.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Magnifying-Glass-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_13.setIcon(icon17)
self.toolButton_13.setIconSize(QtCore.QSize(35, 35))
self.toolButton_13.setObjectName(_fromUtf8("toolButton_13"))
self.horizontalLayout_2.addWidget(self.toolButton_13)
self.toolButton_12 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_12.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_12.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_12.setStyleSheet(_fromUtf8(""))
icon18 = QtGui.QIcon()
icon18.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Zoom-In-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_12.setIcon(icon18)
self.toolButton_12.setIconSize(QtCore.QSize(35, 35))
self.toolButton_12.setObjectName(_fromUtf8("toolButton_12"))
self.horizontalLayout_2.addWidget(self.toolButton_12)
self.toolButton_11 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_11.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_11.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_11.setAutoFillBackground(False)
self.toolButton_11.setStyleSheet(_fromUtf8(""))
icon19 = QtGui.QIcon()
icon19.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Zoom-Out-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_11.setIcon(icon19)
self.toolButton_11.setIconSize(QtCore.QSize(35, 35))
self.toolButton_11.setObjectName(_fromUtf8("toolButton_11"))
self.horizontalLayout_2.addWidget(self.toolButton_11)
self.dockWidget_4.setWidget(self.dockWidgetContents_4)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(4), self.dockWidget_4)
self.dockWidget_5 = QtGui.QDockWidget(MainWindow)
self.dockWidget_5.setObjectName(_fromUtf8("dockWidget_5"))
self.dockWidgetContents_5 = QtGui.QWidget()
self.dockWidgetContents_5.setObjectName(_fromUtf8("dockWidgetContents_5"))
self.verticalLayout = QtGui.QVBoxLayout(self.dockWidgetContents_5)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.pushButton_2 = QtGui.QPushButton(self.dockWidgetContents_5)
self.pushButton_2.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.verticalLayout.addWidget(self.pushButton_2)
self.dockWidget_5.setWidget(self.dockWidgetContents_5)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget_5)
self.retranslateUi(MainWindow)
self.tabWidget_2.setCurrentIndex(1)
self.tabWidget_3.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.pushButton_3.setText(_translate("MainWindow", "Hide", None))
self.toolButton_7.setToolTip(_translate("MainWindow", "Add", None))
self.toolButton_7.setText(_translate("MainWindow", "...", None))
self.toolButton_9.setToolTip(_translate("MainWindow", "Remove", None))
self.toolButton_9.setText(_translate("MainWindow", "...", None))
self.toolButton_8.setToolTip(_translate("MainWindow", "Import Coordinates", None))
self.toolButton_8.setText(_translate("MainWindow", "...", None))
self.toolButton_5.setToolTip(_translate("MainWindow", "Export Coordinates", None))
self.toolButton_5.setText(_translate("MainWindow", "...", None))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "x", None))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "y", None))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "z", None))
self.pushButton_21.setText(_translate("MainWindow", "Redraw", None))
self.toolButton_17.setToolTip(_translate("MainWindow", "Create New", None))
self.toolButton_17.setText(_translate("MainWindow", "...", None))
self.toolButton_10.setToolTip(_translate("MainWindow", "Open Existing", None))
self.toolButton_10.setText(_translate("MainWindow", "...", None))
self.toolButton_20.setToolTip(_translate("MainWindow", "Save to Drive", None))
self.toolButton_20.setText(_translate("MainWindow", "...", None))
self.toolButton_18.setToolTip(_translate("MainWindow", "Load New", None))
self.toolButton_18.setText(_translate("MainWindow", "...", None))
self.toolButton_4.setToolTip(_translate("MainWindow", "Add new Equation", None))
self.toolButton_4.setText(_translate("MainWindow", "...", None))
self.toolButton_3.setToolTip(_translate("MainWindow", "Remove this Equation", None))
self.toolButton_3.setText(_translate("MainWindow", "...", None))
self.checkBox.setToolTip(_translate("MainWindow", "Show on Graph", None))
self.Example.setToolTip(_translate("MainWindow", "Illustrate with an Example", None))
self.Example.setWhatsThis(_translate("MainWindow", "Example", None))
self.Example.setText(_translate("MainWindow", "Example", None))
self.toolButton.setToolTip(_translate("MainWindow", "Always on Top", None))
self.toolButton.setText(_translate("MainWindow", "...", None))
self.toolButton_25.setToolTip(_translate("MainWindow", "Show/Hide Table", None))
self.toolButton_25.setText(_translate("MainWindow", "...", None))
self.toolButton_19.setToolTip(_translate("MainWindow", "Keyboard", None))
self.toolButton_19.setText(_translate("MainWindow", "...", None))
self.toolButton_23.setToolTip(_translate("MainWindow", "Print graph", None))
self.toolButton_23.setText(_translate("MainWindow", "...", None))
self.toolButton_2.setToolTip(_translate("MainWindow", "Save Graph", None))
self.toolButton_2.setText(_translate("MainWindow", "...", None))
self.toolButton_24.setToolTip(_translate("MainWindow", "Take a screenshot", None))
self.toolButton_24.setText(_translate("MainWindow", "...", None))
self.toolButton_22.setToolTip(_translate("MainWindow", "Go to our FaceBook page", None))
self.toolButton_22.setText(_translate("MainWindow", "...", None))
self.toolButton_21.setToolTip(_translate("MainWindow", "Play", None))
self.toolButton_21.setText(_translate("MainWindow", "...", None))
self.toolButton_16.setToolTip(_translate("MainWindow", "Stop", None))
self.toolButton_16.setText(_translate("MainWindow", "...", None))
self.toolButton_15.setToolTip(_translate("MainWindow", "Disable Anti-Aliasing", None))
self.toolButton_15.setText(_translate("MainWindow", "...", None))
self.toolButton_14.setToolTip(_translate("MainWindow", "Enable Anti-Aliasing", None))
self.toolButton_14.setText(_translate("MainWindow", "...", None))
self.toolButton_13.setToolTip(_translate("MainWindow", "Zoom All", None))
self.toolButton_13.setText(_translate("MainWindow", "...", None))
self.toolButton_12.setToolTip(_translate("MainWindow", "Zoom in", None))
self.toolButton_12.setText(_translate("MainWindow", "...", None))
self.toolButton_11.setToolTip(_translate("MainWindow", "Zoom out", None))
self.toolButton_11.setText(_translate("MainWindow", "...", None))
self.pushButton_2.setText(_translate("MainWindow", "PushButton", None))
| gpl-2.0 | -4,134,462,761,231,112,000 | 56.969608 | 268 | 0.698557 | false |
ifp-uiuc/do-neural-networks-learn-faus-iccvw-2015 | ck_plus/cnn_ad/train.py | 1 | 3697 | import argparse
import os
import sys
sys.path.append('..')
import numpy
from anna import util
from anna.datasets import supervised_dataset
#from anna.datasets.supervised_data_loader import SupervisedDataLoaderCrossVal
import data_fold_loader
import data_paths
from model import SupervisedModel
parser = argparse.ArgumentParser(prog='train_cnn_with_dropout_\
data_augmentation',
description='Script to train convolutional \
network from random initialization with \
dropout and data augmentation.')
parser.add_argument("-s", "--split", default='0', help='Testing split of CK+ \
to use. (0-9)')
parser.add_argument("--checkpoint_dir", default='./', help='Location to save \
model checkpoint files.')
args = parser.parse_args()
print('Start')
test_split = int(args.split)
if test_split < 0 or test_split > 9:
raise Exception("Testing Split must be in range 0-9.")
print('Using CK+ testing split: {}'.format(test_split))
checkpoint_dir = os.path.join(args.checkpoint_dir, 'checkpoints_'+str(test_split))
print 'Checkpoint dir: ', checkpoint_dir
pid = os.getpid()
print('PID: {}'.format(pid))
f = open('pid_'+str(test_split), 'wb')
f.write(str(pid)+'\n')
f.close()
# Load model
model = SupervisedModel('experiment', './', learning_rate=1e-2)
monitor = util.Monitor(model,
checkpoint_directory=checkpoint_dir,
save_steps=1000)
# Add dropout to fully-connected layer
model.fc4.dropout = 0.5
model._compile()
# Loading CK+ dataset
print('Loading Data')
#supervised_data_loader = SupervisedDataLoaderCrossVal(
# data_paths.ck_plus_data_path)
#train_data_container = supervised_data_loader.load('train', train_split)
#test_data_container = supervised_data_loader.load('test', train_split)
train_folds, val_fold, _ = data_fold_loader.load_fold_assignment(test_fold=test_split)
X_train, y_train = data_fold_loader.load_folds(data_paths.ck_plus_data_path, train_folds)
X_val, y_val = data_fold_loader.load_folds(data_paths.ck_plus_data_path, [val_fold])
X_test, y_test = data_fold_loader.load_folds(data_paths.ck_plus_data_path, [test_split])
X_train = numpy.float32(X_train)
X_train /= 255.0
X_train *= 2.0
X_val = numpy.float32(X_val)
X_val /= 255.0
X_val *= 2.0
X_test = numpy.float32(X_test)
X_test /= 255.0
X_test *= 2.0
train_dataset = supervised_dataset.SupervisedDataset(X_train, y_train)
val_dataset = supervised_dataset.SupervisedDataset(X_val, y_val)
train_iterator = train_dataset.iterator(
mode='random_uniform', batch_size=64, num_batches=31000)
val_iterator = val_dataset.iterator(
mode='random_uniform', batch_size=64, num_batches=31000)
# Do data augmentation (crops, flips, rotations, scales, intensity)
data_augmenter = util.DataAugmenter2(crop_shape=(96, 96),
flip=True, gray_on=True)
normer = util.Normer3(filter_size=5, num_channels=1)
module_list_train = [data_augmenter, normer]
module_list_val = [normer]
preprocessor_train = util.Preprocessor(module_list_train)
preprocessor_val = util.Preprocessor(module_list_val)
print('Training Model')
for x_batch, y_batch in train_iterator:
x_batch = preprocessor_train.run(x_batch)
monitor.start()
log_prob, accuracy = model.train(x_batch, y_batch)
monitor.stop(1-accuracy)
if monitor.test:
monitor.start()
x_val_batch, y_val_batch = val_iterator.next()
x_val_batch = preprocessor_val.run(x_val_batch)
val_accuracy = model.eval(x_val_batch, y_val_batch)
monitor.stop_test(1-val_accuracy)
| bsd-3-clause | -8,914,378,663,641,386,000 | 33.551402 | 89 | 0.675683 | false |
all-of-us/raw-data-repository | tests/service_tests/consent_tests/test_consent_validation.py | 1 | 12558 | from datetime import datetime, timedelta
import json
import mock
from typing import List, Type
from rdr_service.model.consent_file import ConsentFile, ConsentSyncStatus, ConsentType
from rdr_service.model.hpo import HPO
from rdr_service.model.participant_summary import ParticipantSummary
from rdr_service.services.consent import files
from rdr_service.services.consent.validation import ConsentValidator
from tests.helpers.unittest_base import BaseTestCase
class ConsentValidationTesting(BaseTestCase):
def __init__(self, *args, **kwargs):
super(ConsentValidationTesting, self).__init__(*args, **kwargs)
self.uses_database = False
self.va_hpo = HPO(hpoId=4)
self.another_hpo = HPO(hpoId=8)
self._default_signature = 'Test'
default_consent_timestamp = datetime(2019, 8, 27, 17, 9)
self._default_signing_date = default_consent_timestamp.date()
self.participant_summary = ParticipantSummary(
consentForStudyEnrollmentFirstYesAuthored=default_consent_timestamp
)
self.consent_factory_mock = mock.MagicMock(spec=files.ConsentFileAbstractFactory)
self.validator = ConsentValidator(
consent_factory=self.consent_factory_mock,
participant_summary=self.participant_summary,
va_hpo_id=self.va_hpo.hpoId
)
def test_primary_file_ready_for_sync(self):
"""Test the defaults give a consent file ready for syncing"""
self.consent_factory_mock.get_primary_consents.return_value = [
self._mock_consent(consent_class=files.PrimaryConsentFile)
]
self.assertMatchesExpectedResults(
[
{
'participant_id': self.participant_summary.participantId,
'file_exists': True,
'type': ConsentType.PRIMARY,
'is_signature_valid': True,
'signature_str': self._default_signature,
'is_signing_date_valid': True,
'signing_date': self._default_signing_date,
'sync_status': ConsentSyncStatus.READY_FOR_SYNC
}
],
self.validator.get_primary_validation_results()
)
def test_primary_with_incorrect_date(self):
incorrect_date_on_file = self._default_signing_date - timedelta(days=300)
self.consent_factory_mock.get_primary_consents.return_value = [
self._mock_consent(
consent_class=files.PrimaryConsentFile,
get_signature_on_file='signed with wrong date',
get_date_signed=incorrect_date_on_file
),
self._mock_consent(
consent_class=files.PrimaryConsentFile,
get_signature_on_file='signed with no date',
get_date_signed=None
)
]
self.assertMatchesExpectedResults(
[
{
'type': ConsentType.PRIMARY,
'signature_str': 'signed with wrong date',
'is_signing_date_valid': False,
'signing_date': incorrect_date_on_file,
'sync_status': ConsentSyncStatus.NEEDS_CORRECTING
},
{
'type': ConsentType.PRIMARY,
'signature_str': 'signed with no date',
'is_signing_date_valid': False,
'signing_date': None,
'sync_status': ConsentSyncStatus.NEEDS_CORRECTING
}
],
self.validator.get_primary_validation_results()
)
def test_primary_with_slightly_off_date(self):
shifted_date_on_file = self._default_signing_date - timedelta(days=3)
self.consent_factory_mock.get_primary_consents.return_value = [
self._mock_consent(
consent_class=files.PrimaryConsentFile,
get_signature_on_file='signed with slightly off date',
get_date_signed=shifted_date_on_file
)
]
self.assertMatchesExpectedResults(
[
{
'type': ConsentType.PRIMARY,
'signature_str': 'signed with slightly off date',
'is_signing_date_valid': True,
'signing_date': shifted_date_on_file,
'sync_status': ConsentSyncStatus.READY_FOR_SYNC
}
],
self.validator.get_primary_validation_results()
)
def test_primary_with_signature_image(self):
self.consent_factory_mock.get_primary_consents.return_value = [
self._mock_consent(
consent_class=files.PrimaryConsentFile,
get_signature_on_file=True
)
]
self.assertMatchesExpectedResults(
[
{
'type': ConsentType.PRIMARY,
'is_signature_valid': True,
'signature_str': None,
'is_signature_image': True,
'sync_status': ConsentSyncStatus.READY_FOR_SYNC
}
],
self.validator.get_primary_validation_results()
)
def test_va_primary_for_non_veteran(self):
self.participant_summary.hpoId = self.another_hpo.hpoId
self.consent_factory_mock.get_primary_consents.return_value = [
self._mock_consent(
consent_class=files.PrimaryConsentFile,
get_is_va_consent=True
)
]
self.assertMatchesExpectedResults(
[
{
'type': ConsentType.PRIMARY,
'other_errors': 'veteran consent for non-veteran participant',
'sync_status': ConsentSyncStatus.NEEDS_CORRECTING
}
],
self.validator.get_primary_validation_results()
)
def test_non_va_primary_for_veteran(self):
self.participant_summary.hpoId = self.va_hpo.hpoId
self.consent_factory_mock.get_primary_consents.return_value = [
self._mock_consent(
consent_class=files.PrimaryConsentFile,
get_is_va_consent=False,
)
]
self.assertMatchesExpectedResults(
[
{
'type': ConsentType.PRIMARY,
'other_errors': 'non-veteran consent for veteran participant',
'sync_status': ConsentSyncStatus.NEEDS_CORRECTING
}
],
self.validator.get_primary_validation_results()
)
def test_ehr_file_ready_for_sync(self):
ehr_consent_timestamp = datetime(2020, 2, 5, 13, 9)
self.participant_summary.consentForElectronicHealthRecordsAuthored = ehr_consent_timestamp
self.consent_factory_mock.get_ehr_consents.return_value = [
self._mock_consent(
consent_class=files.EhrConsentFile,
get_date_signed=ehr_consent_timestamp.date()
)
]
self.assertMatchesExpectedResults(
[
{
'participant_id': self.participant_summary.participantId,
'type': ConsentType.EHR,
'is_signing_date_valid': True,
'signing_date': ehr_consent_timestamp.date(),
'sync_status': ConsentSyncStatus.READY_FOR_SYNC
}
],
self.validator.get_ehr_validation_results()
)
def test_cabor_file_ready_for_sync(self):
cabor_consent_timestamp = datetime(2020, 4, 21, 13, 9)
self.participant_summary.consentForCABoRAuthored = cabor_consent_timestamp
self.consent_factory_mock.get_cabor_consents.return_value = [
self._mock_consent(
consent_class=files.CaborConsentFile,
get_date_signed=cabor_consent_timestamp.date()
)
]
self.assertMatchesExpectedResults(
[
{
'participant_id': self.participant_summary.participantId,
'type': ConsentType.CABOR,
'is_signing_date_valid': True,
'signing_date': cabor_consent_timestamp.date(),
'sync_status': ConsentSyncStatus.READY_FOR_SYNC
}
],
self.validator.get_cabor_validation_results()
)
def test_gror_file_ready_for_sync(self):
gror_consent_timestamp = datetime(2020, 10, 21, 13, 9)
self.participant_summary.consentForGenomicsRORAuthored = gror_consent_timestamp
self.consent_factory_mock.get_gror_consents.return_value = [
self._mock_consent(
consent_class=files.GrorConsentFile,
get_date_signed=gror_consent_timestamp.date()
)
]
self.assertMatchesExpectedResults(
[
{
'participant_id': self.participant_summary.participantId,
'type': ConsentType.GROR,
'is_signing_date_valid': True,
'signing_date': gror_consent_timestamp.date(),
'sync_status': ConsentSyncStatus.READY_FOR_SYNC
}
],
self.validator.get_gror_validation_results()
)
def test_gror_without_checkmark(self):
self.participant_summary.consentForGenomicsRORAuthored = datetime.combine(
self._default_signing_date,
datetime.now().time()
)
self.consent_factory_mock.get_gror_consents.return_value = [
self._mock_consent(
consent_class=files.GrorConsentFile,
is_confirmation_selected=False
)
]
self.assertMatchesExpectedResults(
[
{
'participant_id': self.participant_summary.participantId,
'type': ConsentType.GROR,
'other_errors': 'missing consent check mark',
'sync_status': ConsentSyncStatus.NEEDS_CORRECTING
}
],
self.validator.get_gror_validation_results()
)
def test_gror_missing(self):
self.participant_summary.consentForGenomicsRORAuthored = datetime.combine(
self._default_signing_date,
datetime.now().time()
)
self.consent_factory_mock.get_gror_consents.return_value = []
self.assertMatchesExpectedResults(
[
{
'participant_id': self.participant_summary.participantId,
'type': ConsentType.GROR,
'file_exists': False,
'sync_status': ConsentSyncStatus.NEEDS_CORRECTING
}
],
self.validator.get_gror_validation_results()
)
def _mock_consent(self, consent_class: Type[files.ConsentFile], **kwargs):
consent_args = {
'get_signature_on_file': self._default_signature,
'get_date_signed': self._default_signing_date,
'get_is_va_consent': False
}
consent_args.update(kwargs)
consent_mock = mock.MagicMock(spec=consent_class)
consent_mock.upload_time = datetime.now()
consent_mock.file_path = '/test'
for method_name, return_value in consent_args.items():
if hasattr(consent_mock, method_name):
getattr(consent_mock, method_name).return_value = return_value
return consent_mock
def assertMatchesExpectedResults(self, expected_list, actual_list: List[ConsentFile]):
self.assertEqual(len(expected_list), len(actual_list))
def expected_data_found_in_results(expected_result):
for actual_result in actual_list:
if all([getattr(actual_result, attr_name) == value for attr_name, value in expected_result.items()]):
return True
return False
def json_print(data):
return json.dumps(data, default=str, indent=4)
for expected in expected_list:
if not expected_data_found_in_results(expected):
self.fail(
f'{json_print(expected)} not found in results: '
f'{json_print([actual.asdict() for actual in actual_list])}'
)
| bsd-3-clause | -6,291,842,177,331,229,000 | 39.121406 | 117 | 0.558847 | false |
radjkarl/imgProcessor | imgProcessor/imgSignal.py | 1 | 10847 | from __future__ import division
from __future__ import print_function
import numpy as np
import cv2
from imgProcessor.imgIO import imread
from imgProcessor.measure.FitHistogramPeaks import FitHistogramPeaks
from fancytools.math.findXAt import findXAt
# from scipy.optimize.minpack import curve_fit
MAX_SIZE = 700
def scaleSignalCut(img, ratio, nbins=100):
'''
scaling img cutting x percent of top and bottom part of histogram
'''
start, stop = scaleSignalCutParams(img, ratio, nbins)
img = img - start
img /= (stop - start)
return img
def _toSize(img):
fac = MAX_SIZE / max(img.shape)
if fac < 1:
try:
return cv2.resize(img, (0, 0), fx=fac, fy=fac,
interpolation=cv2.INTER_AREA)
except cv2.error:
# cv2.error: ..\..\..\modules\imgproc\src\imgwarp.cpp:3235: error:
# (-215) dsize.area() > 0 in function cv::resize
return cv2.resize(img.T, (0, 0), fx=fac, fy=fac,
interpolation=cv2.INTER_AREA).T
return img
def _histogramAndCorrBinPos(img, nbins=100):
try:
h, bins = np.histogram(img, nbins)
except ValueError: # img contains NaN
h, bins = np.histogram(img[np.isfinite(img)], nbins)
b0 = bins[0]
bins = bins[1:]
bins += 0.5 * (bins[0] - b0)
return h, bins
def scaleSignalCutParams(img, ratio=0.01, nbins=100, return_img=False):
img = _toSize(img)
h, bins = _histogramAndCorrBinPos(img, nbins)
h = np.cumsum(h).astype(float)
h -= h.min()
h /= h[-1]
try:
start = findXAt(bins, h, ratio)
except IndexError:
start = bins[0]
try:
stop = findXAt(bins, h, 1 - ratio)
except IndexError:
stop = bins[-1]
if return_img:
return start, stop, img
return start, stop
def scaleSignal(img, fitParams=None,
backgroundToZero=False, reference=None):
'''
scale the image between...
backgroundToZero=True -> 0 (average background) and 1 (maximum signal)
backgroundToZero=False -> signal+-3std
reference -> reference image -- scale image to fit this one
returns:
scaled image
'''
img = imread(img)
if reference is not None:
# def fn(ii, m,n):
# return ii*m+n
# curve_fit(fn, img[::10,::10], ref[::10,::10])
low, high = signalRange(img, fitParams)
low2, high2 = signalRange(reference)
img = np.asfarray(img)
ampl = (high2 - low2) / (high - low)
img -= low
img *= ampl
img += low2
return img
else:
offs, div = scaleParams(img, fitParams, backgroundToZero)
img = np.asfarray(img) - offs
img /= div
print('offset: %s, divident: %s' % (offs, div))
return img
def getBackgroundRange(fitParams):
'''
return minimum, average, maximum of the background peak
'''
smn, _, _ = getSignalParameters(fitParams)
bg = fitParams[0]
_, avg, std = bg
bgmn = max(0, avg - 3 * std)
if avg + 4 * std < smn:
bgmx = avg + 4 * std
if avg + 3 * std < smn:
bgmx = avg + 3 * std
if avg + 2 * std < smn:
bgmx = avg + 2 * std
else:
bgmx = avg + std
return bgmn, avg, bgmx
def hasBackground(fitParams):
'''
compare the height of putative bg and signal peak
if ratio if too height assume there is no background
'''
signal = getSignalPeak(fitParams)
bg = getBackgroundPeak(fitParams)
if signal == bg:
return False
r = signal[0] / bg[0]
if r < 1:
r = 1 / r
return r < 100
def backgroundPeakValue(img, bins=500):
f = FitHistogramPeaks(img, bins=bins, bins2=300)
bgp = getBackgroundPeak(f.fitParams)
ind = int(bgp[1])
if ind < 0:
ind = 0
# y = f.yvals[ind:]
# i = np.argmax(np.diff(y) > 0)
# bgmaxpos = ind # + i
# print(f.xvals[bgmaxpos], bgmaxpos)
# import pylab as plt
# plt.plot(f.xvals, f.yvals)
# plt.show()
return f.xvals[ind]
def signalMinimum2(img, bins=None):
'''
minimum position between signal and background peak
'''
f = FitHistogramPeaks(img, bins=bins)
i = signalPeakIndex(f.fitParams)
spos = f.fitParams[i][1]
# spos = getSignalPeak(f.fitParams)[1]
# bpos = getBackgroundPeak(f.fitParams)[1]
bpos = f.fitParams[i - 1][1]
ind = np.logical_and(f.xvals > bpos, f.xvals < spos)
try:
i = np.argmin(f.yvals[ind])
return f.xvals[ind][i]
except ValueError as e:
if bins is None:
return signalMinimum2(img, bins=400)
else:
raise e
def signalMinimum(img, fitParams=None, n_std=3):
'''
intersection between signal and background peak
'''
if fitParams is None:
fitParams = FitHistogramPeaks(img).fitParams
assert len(fitParams) > 1, 'need 2 peaks so get minimum signal'
i = signalPeakIndex(fitParams)
signal = fitParams[i]
bg = getBackgroundPeak(fitParams)
smn = signal[1] - n_std * signal[2]
bmx = bg[1] + n_std * bg[2]
if smn > bmx:
return smn
# peaks are overlapping
# define signal min. as intersection between both Gaussians
def solve(p1, p2):
s1, m1, std1 = p1
s2, m2, std2 = p2
a = (1 / (2 * std1**2)) - (1 / (2 * std2**2))
b = (m2 / (std2**2)) - (m1 / (std1**2))
c = (m1**2 / (2 * std1**2)) - (m2**2 / (2 * std2**2)) - \
np.log(((std2 * s1) / (std1 * s2)))
return np.roots([a, b, c])
i = solve(bg, signal)
try:
return i[np.logical_and(i > bg[1], i < signal[1])][0]
except IndexError:
# this error shouldn't occur... well
return max(smn, bmx)
def getSignalMinimum(fitParams, n_std=3):
assert len(fitParams) > 0, 'need min. 1 peak so get minimum signal'
if len(fitParams) == 1:
signal = fitParams[0]
return signal[1] - n_std * signal[2]
i = signalPeakIndex(fitParams)
signal = fitParams[i]
bg = fitParams[i - 1]
#bg = getBackgroundPeak(fitParams)
smn = signal[1] - n_std * signal[2]
bmx = bg[1] + n_std * bg[2]
if smn > bmx:
return smn
# peaks are overlapping
# define signal min. as intersection between both Gaussians
def solve(p1, p2):
s1, m1, std1 = p1
s2, m2, std2 = p2
a = (1 / (2 * std1**2)) - (1 / (2 * std2**2))
b = (m2 / (std2**2)) - (m1 / (std1**2))
c = (m1**2 / (2 * std1**2)) - (m2**2 / (2 * std2**2)) - \
np.log(((std2 * s1) / (std1 * s2)))
return np.roots([a, b, c])
i = solve(bg, signal)
try:
return i[np.logical_and(i > bg[1], i < signal[1])][0]
except IndexError:
# something didnt work out - fallback
return smn
def getSignalParameters(fitParams, n_std=3):
'''
return minimum, average, maximum of the signal peak
'''
signal = getSignalPeak(fitParams)
mx = signal[1] + n_std * signal[2]
mn = signal[1] - n_std * signal[2]
if mn < fitParams[0][1]:
mn = fitParams[0][1] # set to bg
return mn, signal[1], mx
def signalStd(img):
fitParams = FitHistogramPeaks(img).fitParams
signal = getSignalPeak(fitParams)
return signal[2]
def backgroundMean(img, fitParams=None):
try:
if fitParams is None:
fitParams = FitHistogramPeaks(img).fitParams
bg = getBackgroundPeak(fitParams)
return bg[1]
except Exception as e:
print(e)
# in case peaks were not found:
return img.mean()
def signalRange(img, fitParams=None, nSigma=3):
try:
if fitParams is None:
fitParams = FitHistogramPeaks(img).fitParams
signPeak = getSignalPeak(fitParams)
return (signalMinimum(img, fitParams, nSigma),
signPeak[1] + nSigma * signPeak[2])
# return (signPeak[1] - nSigma*signPeak[2],signPeak[1] +
# nSigma*signPeak[2])
except Exception as e:
print(e)
# in case peaks were not found:
s = img.std()
m = img.mean()
return m - nSigma * s, m + nSigma * s
def scaleParamsFromReference(img, reference):
# saving startup time:
from scipy.optimize import curve_fit
def ff(arr):
arr = imread(arr, 'gray')
if arr.size > 300000:
arr = arr[::10, ::10]
m = np.nanmean(arr)
s = np.nanstd(arr)
r = m - 3 * s, m + 3 * s
b = (r[1] - r[0]) / 5
return arr, r, b
img, imgr, imgb = ff(img)
reference, refr, refb = ff(reference)
nbins = np.clip(15, max(imgb, refb), 50)
refh = np.histogram(reference, bins=nbins, range=refr)[
0].astype(np.float32)
imgh = np.histogram(img, bins=nbins, range=imgr)[0].astype(np.float32)
import pylab as plt
plt.figure(1)
plt.plot(refh)
plt.figure(2)
plt.plot(imgh)
plt.show()
def fn(x, offs, div):
return (x - offs) / div
params, fitCovariances = curve_fit(fn, refh, imgh, p0=(0, 1))
perr = np.sqrt(np.diag(fitCovariances))
print('error scaling to reference image: %s' % perr[0])
# if perr[0] < 0.1:
return params[0], params[1]
def scaleParams(img, fitParams=None):
low, high = signalRange(img, fitParams)
offs = low
div = high - low
return offs, div
def getBackgroundPeak(fitParams):
return fitParams[0]
def getSignalPeak(fitParams):
i = signalPeakIndex(fitParams)
return fitParams[i]
def signalPeakIndex(fitParams):
if len(fitParams) == 1:
i = 0
else:
# find categorical signal peak as max(peak height*standard deviation):
sizes = [pi[0] * pi[2] for pi in fitParams[1:]]
# signal peak has to have positive avg:
for n, p in enumerate(fitParams[1:]):
if p[1] < 0:
sizes[n] = 0
i = np.argmax(sizes) + 1
return i
if __name__ == '__main__':
import sys
import pylab as plt
from fancytools.os.PathStr import PathStr
import imgProcessor
img = imread(PathStr(imgProcessor.__file__).dirname().join(
'media', 'electroluminescence', 'EL_module_orig.PNG'), 'gray')
print('EL signal within range of %s' % str(signalRange(img)))
print('EL signal minimum = %s' % signalMinimum(img))
if 'no_window' not in sys.argv:
plt.imshow(img)
plt.colorbar()
plt.show()
| gpl-3.0 | 7,949,265,973,686,205,000 | 26.028424 | 78 | 0.553425 | false |
fumitoh/modelx | modelx/tests/core/reference/relative/test_refmode.py | 1 | 2034 | import modelx as mx
import pytest
@pytest.fixture
def refmode_model():
"""
A---B---C---foo <-+
| | |
| +---bar --+
D
"""
import modelx as mx
m = mx.new_model()
A = mx.new_space('A')
B = A.new_space('B')
C = B.new_space('C')
@mx.defcells
def foo(x):
return x
D = m.new_space('D')
D.add_bases(B)
return m
def test_refmode_change(refmode_model):
m = refmode_model
m.A.B.C.bar = m.A.B.C.foo
assert m.D.C.bar is m.D.C.foo
m.A.B.C.absref(bar=m.A.B.C.foo)
assert m.D.C.bar is m.A.B.C.foo
m.A.B.C.relref(bar=m.A.B.C.foo)
assert m.D.C.bar is m.D.C.foo
m.A.B.C.absref(bar=m.A.B.C.foo)
assert m.D.C.bar is m.A.B.C.foo
@pytest.mark.parametrize("mode", ["relative", "auto"])
def test_refer_sibling(mode):
"""
A---B-------foo <-+
| | |
| +---bar --+
D
"""
import modelx as mx
m = mx.new_model()
A = mx.new_space('A')
B = A.new_space('B')
@mx.defcells
def foo(x):
return x
B.set_ref("bar", foo, mode)
D = m.new_space('D', bases=B)
assert D.bar is D.foo
@pytest.mark.parametrize("mode", ["absolute", "auto"])
def test_refer_parent(mode):
"""
A---B-------foo
| |
| +---bar --> A
D
"""
import modelx as mx
m = mx.new_model()
A = mx.new_space('A')
B = A.new_space('B')
@mx.defcells
def foo(x):
return x
B.set_ref("bar", A, mode)
D = m.new_space('D', bases=B)
assert D.bar is A
def test_refer_parent_error():
"""
A---B-------foo
| |
| +---bar --> A
D
"""
import modelx as mx
m = mx.new_model()
A = mx.new_space('A')
B = A.new_space('B')
@mx.defcells
def foo(x):
return x
B.set_ref("bar", A, "relative")
with pytest.raises(ValueError):
D = m.new_space('D', bases=B)
| gpl-3.0 | -1,664,453,725,808,355,300 | 17 | 54 | 0.457227 | false |
Osmose/normandy | recipe-server/normandy/recipes/migrations/0021_migrate_to_single_actions.py | 1 | 1111 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-16 19:55
# flake8: noqa
from __future__ import unicode_literals
from django.db import migrations
def multiple_to_single(apps, schema_editor):
"""
Take the first action in a recipe and set it as the single action.
"""
Recipe = apps.get_model('recipes', 'Recipe')
for recipe in Recipe.objects.all():
if recipe.recipeaction_set.count() < 1:
raise ValueError('Cannot migrate recipe pk={0} as it has no actions. Delete it manually'
' or add an action and re-run this migration.'.format(recipe.pk))
recipe_action = recipe.recipeaction_set.order_by('order')[0]
recipe.action = recipe_action.action
recipe.arguments_json = recipe_action.arguments_json
recipe.save()
def noop(apps, schema_editor):
pass # Not too concerned about going backwards here.
class Migration(migrations.Migration):
dependencies = [
('recipes', '0020_auto_20160316_1947'),
]
operations = [
migrations.RunPython(multiple_to_single, noop)
]
| mpl-2.0 | -8,590,091,028,667,492,000 | 30.742857 | 100 | 0.647165 | false |
HoussemCharf/FunUtils | pythonMergeSort.py | 1 | 2025 | """
This is a pure python implementation of the merge sort algorithm
For doctests run following command:
python -m doctest -v merge_sort.py
or
python3 -m doctest -v merge_sort.py
For manual testing run:
python merge_sort.py
"""
from __future__ import print_function
def merge_sort(collection):
"""Pure implementation of the merge sort algorithm in Python
:param collection: some mutable ordered collection with heterogeneous
comparable items inside
:return: the same collection ordered by ascending
Examples:
>>> merge_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> merge_sort([])
[]
>>> merge_sort([-2, -5, -45])
[-45, -5, -2]
"""
length = len(collection)
if length > 1:
midpoint = length // 2
left_half = merge_sort(collection[:midpoint])
right_half = merge_sort(collection[midpoint:])
i = 0
j = 0
k = 0
left_length = len(left_half)
right_length = len(right_half)
while i < left_length and j < right_length:
if left_half[i] < right_half[j]:
collection[k] = left_half[i]
i += 1
else:
collection[k] = right_half[j]
j += 1
k += 1
while i < left_length:
collection[k] = left_half[i]
i += 1
k += 1
while j < right_length:
collection[k] = right_half[j]
j += 1
k += 1
return collection
if __name__ == '__main__':
import sys
# For python 2.x and 3.x compatibility: 3.x has no raw_input builtin
# otherwise 2.x's input builtin function is too "smart"
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
user_input = input_function('Enter numbers separated by a comma:\n')
unsorted = [int(item) for item in user_input.split(',')]
print(merge_sort(unsorted)) | mit | -4,175,166,591,996,086,300 | 26.957143 | 73 | 0.545185 | false |
walterbender/locosugar | toolbar_utils.py | 1 | 5547 | # -*- coding: utf-8 -*-
# Copyright (c) 2011, Walter Bender
# Copyright (c) 2012, Ignacio Rodriguez
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# You should have received a copy of the GNU General Public License
# along with this library; if not, write to the Free Software
# Foundation, 51 Franklin Street, Suite 500 Boston, MA 02110-1335 USA
from gi.repository import Gtk
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.graphics.combobox import ComboBox
def combo_factory(combo_array, toolbar, callback, cb_arg=None,
tooltip=None, default=None):
'''Factory for making a toolbar combo box'''
combo = ComboBox()
if tooltip is not None and hasattr(combo, 'set_tooltip_text'):
combo.set_tooltip_text(tooltip)
if cb_arg is not None:
combo.connect('changed', callback, cb_arg)
else:
combo.connect('changed', callback)
for i, selection in enumerate(combo_array):
combo.append_item(i, selection, None)
combo.show()
toolitem = Gtk.ToolItem()
toolitem.add(combo)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
if default is not None:
combo.set_active(combo_array.index(default))
return combo
def entry_factory(default_string, toolbar, tooltip=None, max=3):
''' Factory for adding a text box to a toolbar '''
entry = Gtk.Entry()
entry.set_text(default_string)
if tooltip is not None and hasattr(entry, 'set_tooltip_text'):
entry.set_tooltip_text(tooltip)
entry.set_width_chars(max)
entry.show()
toolitem = Gtk.ToolItem()
toolitem.add(entry)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
return entry
def button_factory(icon_name, toolbar, callback, cb_arg=None, tooltip=None,
accelerator=None):
'''Factory for making tooplbar buttons'''
button = ToolButton(icon_name)
if tooltip is not None:
button.set_tooltip(tooltip)
button.props.sensitive = True
if accelerator is not None:
button.props.accelerator = accelerator
if cb_arg is not None:
button.connect('clicked', callback, cb_arg)
else:
button.connect('clicked', callback)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(button, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(button, -1)
button.show()
return button
def radio_factory(name, toolbar, callback, cb_arg=None, tooltip=None,
group=None):
''' Add a radio button to a toolbar '''
button = RadioToolButton(group=group)
button.set_icon_name(name)
if callback is not None:
if cb_arg is None:
button.connect('clicked', callback)
else:
button.connect('clicked', callback, cb_arg)
if hasattr(toolbar, 'insert'): # Add button to the main toolbar...
toolbar.insert(button, -1)
else: # ...or a secondary toolbar.
toolbar.props.page.insert(button, -1)
button.show()
if tooltip is not None:
button.set_tooltip(tooltip)
return button
def label_factory(toolbar, label_text, width=None):
''' Factory for adding a label to a toolbar '''
label = Gtk.Label(label_text)
label.set_line_wrap(True)
if width is not None:
label.set_size_request(width, -1) # doesn't work on XOs
label.show()
toolitem = Gtk.ToolItem()
toolitem.add(label)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
return label
def separator_factory(toolbar, expand=False, visible=True):
''' add a separator to a toolbar '''
separator = Gtk.SeparatorToolItem()
separator.props.draw = visible
separator.set_expand(expand)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(separator, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(separator, -1)
separator.show()
def image_factory(image, toolbar, tooltip=None):
''' Add an image to the toolbar '''
img = Gtk.Image()
img.set_from_pixbuf(image)
img_tool = Gtk.ToolItem()
img_tool.add(img)
if tooltip is not None:
img.set_tooltip_text(tooltip)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(img_tool, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(img_tool, -1)
img_tool.show()
return img
def spin_factory(default, min, max, callback, toolbar):
spin_adj = Gtk.Adjustment(default, min, max, 1, 32, 0)
spin = Gtk.SpinButton(spin_adj, 0, 0)
spin_id = spin.connect('value-changed', callback)
spin.set_numeric(True)
spin.show()
toolitem = Gtk.ToolItem()
toolitem.add(spin)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else:
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
return spin
| gpl-3.0 | -9,041,999,313,022,829,000 | 33.240741 | 75 | 0.658194 | false |
ionux/bitforge | tests/unit.py | 1 | 1117 | from bitforge.unit import Unit
class TestUnit:
def test_btc_accessors(self):
u = Unit(btc = 1.2)
assert u.btc == 1.2
assert u.mbtc == 1200
assert u.bits == 1200000
assert u.satoshis == 120000000
def test_btc_conversion(self):
u = Unit(btc = 1.3)
assert u.mbtc == 1300
assert u.bits == 1300000
assert u.satoshis == 130000000
u = Unit(mbtc = 1.3)
assert u.btc == 0.0013
assert u.bits == 1300
assert u.satoshis == 130000
u = Unit(bits = 1.3)
assert u.btc == 0.0000013
assert u.mbtc == 0.0013
assert u.satoshis == 130
u = Unit(satoshis = 3)
assert u.btc == 0.00000003
assert u.mbtc == 0.00003
assert u.bits == 0.03
# TODO: Review presition
# def test_unit_rates(self):
# u = Unit.from_fiat(1.3, 350)
# assert u.at_rate(350) == 1.3
# u = Unit(btc = 0.0123)
# assert u.at_rate(10) == 0.12
def test_repr(self):
u = Unit(btc = 1.3)
assert repr(u) == '<Unit: 130000000 satoshis>'
| mit | 5,227,069,257,949,842,000 | 24.386364 | 54 | 0.522829 | false |
markbenvenuto/buildbaron | bfg_analyzer.py | 1 | 24487 | #!/usr/bin/env python3
"""
Script to analyze the Jira Build Baron Queue
"""
import argparse
import binascii
import datetime
import dateutil
import dateutil.relativedelta
import hashlib
import json
import os
import pprint
import re
import requests
import stat
import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
import buildbaron.analyzer.analyzer_config
import buildbaron.analyzer.evergreen
import buildbaron.analyzer.evg_log_file_analyzer
import buildbaron.analyzer.faultinfo
import buildbaron.analyzer.jira_client
import buildbaron.analyzer.log_file_analyzer
import buildbaron.analyzer.logkeeper
import buildbaron.analyzer.mongo_client
import buildbaron.analyzer.parallel_failure_analyzer
import buildbaron.analyzer.timeout_file_analyzer
# URL of the default Jira server.
# If you use .com, it breaks horribly
def ParseJiraTicket(issue, summary, description):
# Parse summary
if "System Failure:" in summary:
type = "system_failure"
elif "Timed Out:" in summary:
type = "timed_out"
elif "Failures" in summary:
type = "test_failure"
elif "Failure" in summary:
type = "test_failure"
elif "Failed" in summary:
type = "task_failure"
else:
raise ValueError("Unknown summary " + str(summary))
suite, build_variant, project, githash = ("unknown", "unknown", "unknown", "unknown")
summary_match = re.match(".*?: (.*) on (.*) \[(.*) @ ([a-zA-Z0-9]+)\]", summary)
if summary_match:
suite, build_variant, project, githash = summary_match.groups()
# Parse Body of description
lines = description.split("\n")
tests = []
for line in lines:
if line.startswith('h2.'):
url_match = re.search("\|(.*)\]", line)
task_url = url_match.group(1)
elif "[Logs|" in line:
log_line_match = re.match("\*(.*)\* - \[Logs\|(.*?)\]", line)
if log_line_match:
test_name = log_line_match.group(1)
log_file = log_line_match.group(2)
tests.append({'name': test_name, 'log_file': log_file})
else:
pass
return bfg_fault_description(issue,
summary,
type,
project,
githash,
task_url,
suite,
build_variant,
tests)
class bfg_fault_description:
"""Parse a fault description into type"""
def __init__(self,
issue,
summary,
type,
project,
githash,
task_url,
suite,
build_variant,
tests):
self.issue = issue
self.summary = summary
self.type = type
self.project = project
self.githash = githash
self.task_url = task_url
self.suite = suite
self.build_variant = build_variant
self.tests = tests
def to_json(self):
return json.dumps(self, cls=BFGCustomEncoder)
class BFGCustomEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bfg_fault_description):
return {
"issue": obj.issue,
"summary": obj.summary,
"type": obj.type,
"task_url": obj.task_url,
"project": obj.project,
"githash": obj.githash,
"suite": obj.suite,
"build_variant": obj.build_variant,
"tests": obj.tests
}
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class BFGCustomDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, obj):
if 'task_url' not in obj and "project" not in obj:
return obj
return bfg_fault_description(obj['issue'], obj['summary'], obj['type'], obj['project'],
obj['task_url'], obj['suite'], obj['build_variant'],
obj['tests'])
class bfg_analyzer(object):
"""description of class"""
__STACK_FRAME_EXTRACTING_REGEX = re.compile(
"([a-zA-Z0-9\./]*)@((?:[a-zA-Z0-9_()]+/?)+\.js):(\d+)(?::\d+)?$")
def __init__(self, jira_client):
self.jira_client = jira_client
self.evg_client = buildbaron.analyzer.evergreen.client()
self.pp = pprint.PrettyPrinter()
def query(self, query_str):
results = self.jira_client.search_issues(query_str, maxResults=100)
print("Result Count %d" % len(results))
bfs = []
for result in results:
bfs.append(ParseJiraTicket(
result.key,
result.fields.summary,
result.fields.description
))
# Save to disk to help investigation of bad results
bfs_str = json.dumps(bfs, cls=BFGCustomEncoder, indent="\t")
with open("bfs.json", "wb") as sjh:
sjh.write(bfs_str.encode())
# Return a list of dictionaries instead of a list of bfg_fault_description
return json.loads(bfs_str)
def check_logs(self, bfs):
summaries = []
for bf in bfs:
summaries.append(self.process_bf(bf))
jira_issue = self.jira_client.get_bfg_issue(bf["issue"])
jira_issue.fields.labels.append("bot-analyzed")
jira_issue.add_field_value("labels", "bot-analyzed")
return summaries
# TODO: parallelize the check_logs function with this since we are network bound
# builds = thread_map( lambda item : process_bf(base_url, item), commits)
def thread_map(func, items):
# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=cpu_count() * 2) as executor:
# Start the load operations and mark each future with its URL
future_to_item = {executor.submit(func, item): item for item in items}
results = []
for future in concurrent.futures.as_completed(future_to_item):
item = future_to_item[future]
try:
nf = future.result()
if nf:
results += nf
except Exception as exc:
print('%r generated an exception: %s' % (item, exc))
return results
def create_bf_cache(self, bf):
"""Create a directory to cache the log file in"""
if not os.path.exists("cache"):
os.mkdir("cache")
if not os.path.exists(os.path.join("cache", "bf")):
os.mkdir(os.path.join("cache", "bf"))
m = hashlib.sha1()
m.update(bf["task_url"].encode())
digest = m.digest()
digest64 = binascii.b2a_hex(digest).decode()
bf["hash"] = digest64
path = os.path.join("cache", "bf", digest64)
bf["bf_cache"] = path
if not os.path.exists(path):
os.mkdir(path)
def create_test_cache(self, bf, test):
"""Create a directory to cache the log file in"""
m = hashlib.sha1()
m.update(test["name"].encode())
digest = m.digest()
digest64 = binascii.b2a_hex(digest).decode()
test["hash"] = digest64
path = os.path.join(bf['bf_cache'], digest64)
test["cache"] = path
if not os.path.exists(path):
os.mkdir(path)
def process_bf(self, bf):
"""
Process a log through the log file analyzer
Saves analysis information in cache\XXX\summary.json
"""
self.create_bf_cache(bf)
print("BF: " + str(bf))
summary_json_file = os.path.join(bf["bf_cache"], "summary.json")
# If we've already analyzed this failure, don't do it again.
if os.path.exists(summary_json_file):
with open(summary_json_file, "rb") as summary_file:
return json.loads(summary_file.read().decode('utf-8'))
system_log_url = buildbaron.analyzer.evergreen.task_get_system_raw_log(bf['task_url'])
task_log_file_url = buildbaron.analyzer.evergreen.task_get_task_raw_log(bf["task_url"])
bf['system_log_url'] = system_log_url
bf['task_log_file_url'] = task_log_file_url
# Will be populated with objects like {"test": <test name>, "faults": [...]}
tests_fault_info = []
# Will be populated with fault objects.
extracted_faults = self.process_task_failure(bf)
if bf['type'] == 'test_failure':
# Go through each test
for test in bf['tests']:
tests_fault_info.append({
"test": test["name"],
"faults": self.process_test(bf, test)
})
elif bf['type'] == 'system_failure':
extracted_faults.extend(self.process_system_failure(bf))
elif bf['type'] == 'timed_out':
task_faults, test_faults = self.process_time_out(bf)
extracted_faults.extend(task_faults)
tests_fault_info.extend(test_faults)
try:
summary_obj = {
"bfg_info": bf,
"faults": [fault.to_json() for fault in extracted_faults],
"test_faults": [
{"test": info["test"], "faults": [fault.to_json() for fault in info["faults"]]}
for info in tests_fault_info
],
"backtraces": [],
}
except TypeError:
summary_obj = {
"bfg_info": bf,
"faults": [fault.to_json() for fault in extracted_faults],
"backtraces": [],
}
summary_str = json.dumps(summary_obj)
def flatten(a):
flattened = []
for elem in a:
if type(elem) == list:
flattened.extend(elem)
else:
flattened.append(elem)
return flattened
# Update jira tickets to include new information.
try:
all_faults = (extracted_faults
+ flatten([testinfo["faults"] for testinfo in tests_fault_info]))
except:
all_faults = extracted_faults
for fault in all_faults:
self.jira_client.add_fault_comment(bf["issue"], fault)
if fault.category == "js backtrace":
backtrace = self.build_backtrace(fault, bf["githash"])
self.jira_client.add_github_backtrace_context(bf["issue"], backtrace)
summary_obj["backtraces"].append(backtrace)
with open(summary_json_file, "wb") as sjh:
sjh.write(summary_str.encode())
return summary_obj
def build_backtrace(self, fault, githash):
"""
returns a list of strings representing a backtrace, as well as a parsed version represented
as a list of objects of the form
{
"github_url": "https://github.com/mongodb/mongo/blob/deadbeef/jstests/core/test.js#L42",
"first_line_number": 37,
"line_number": 42,
"frame_number": 0,
"file_path": "jstests/core/test.js",
"file_name": "test.js",
"lines": ["line 37", "line 38", ..., "line 47"]
}
"""
trace = []
# Also populate a plain-text style backtrace, with github links to frames.
n_lines_of_context = 5
stack_lines = fault.context.splitlines()
# Traverse the stack frames in reverse.
for i in range(len(stack_lines) - 1, -1, -1):
line = stack_lines[i].replace("\\", "/") # Normalize separators.
stack_match = bfg_analyzer.__STACK_FRAME_EXTRACTING_REGEX.search(line)
if stack_match is None:
if re.search("failed to load", line) is not None:
continue # skip that line, it's expected.
break # any other line should be the end of the backtrace
(func_name, file_path, line_number) = stack_match.groups()
gui_github_url = (
"https://github.com/mongodb/mongo/blob/{githash}/{file_path}#L{line_number}".format(
githash=githash,
file_path=file_path,
line_number=line_number))
line_number = int(line_number)
# add a {code} frame to the comment, showing the line involved in the stack trace, with
# some context of surrounding lines. Don't do this for the stack frames within
# src/mongo/shell, since they tend not to be as interesting.
if "src/mongo/shell" in file_path:
continue
raw_github_url = (
"https://raw.githubusercontent.com/mongodb/mongo/{githash}/{file_path}".format(
githash=githash,
file_path=file_path))
raw_code = requests.get(raw_github_url).text
start_line = max(0, line_number - n_lines_of_context)
end_line = line_number + n_lines_of_context
code_context = raw_code.splitlines()[start_line:end_line]
file_name = file_path[file_path.rfind("/") + 1:]
trace.append({
"github_url": gui_github_url,
"first_line_number": start_line,
"line_number": line_number,
"frame_number": i,
"file_path": file_path,
"file_name": file_name,
"lines": code_context
})
return trace
def process_system_failure(self, bf):
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
bf['log_file_url'] = bf['task_log_file_url']
bf['name'] = 'task'
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['task_log_file_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
analyzer = buildbaron.analyzer.evg_log_file_analyzer.EvgLogFileAnalyzer(log_file_str)
analyzer.analyze()
faults = analyzer.get_faults()
if len(faults) == 0:
print("===========================")
print("No system failure faults detected: " + self.pp.pformat(bf))
print("To Debug: python analyzer" + os.path.sep + "log_file_analyzer.py " + log_file)
print("===========================")
return faults
def process_task_failure(self, bf):
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
bf['log_file_url'] = bf['task_log_file_url']
bf['name'] = 'task'
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['task_log_file_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
extracted_faults = []
analyzer = buildbaron.analyzer.evg_log_file_analyzer.EvgLogFileAnalyzer(log_file_str)
analyzer.analyze()
extracted_faults.extend(analyzer.get_faults())
oom_analyzer = self.check_for_oom_killer(bf)
if oom_analyzer is not None:
extracted_faults.extend(oom_analyzer.get_faults())
return extracted_faults
def process_time_out(self, bf):
"""
Returns a list of faults at the task level, and also a list of faults at the test level,
which is populated with test faults if any are determined to have timed out.
"""
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
bf['log_file_url'] = bf['task_log_file_url']
bf['name'] = 'task'
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['task_log_file_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
task_faults = []
test_faults = []
print("Checking " + log_file)
analyzer = buildbaron.analyzer.timeout_file_analyzer.TimeOutAnalyzer(log_file_str)
analyzer.analyze()
task_faults.extend(analyzer.get_faults())
incomplete_tests = analyzer.get_incomplete_tests()
if len(incomplete_tests) == 0:
if len(task_faults) == 0:
print("===========================")
print("No faults found for task: " + self.pp.pformat(bf))
print("To Debug: python analyzer" + os.path.sep + "timeout_file_analyzer.py " +
log_file)
print("===========================")
for incomplete_test in incomplete_tests:
jira_issue = self.jira_client.get_bfg_issue(bf["issue"])
timeout_comment = (
"*" +
incomplete_test["name"] +
" timed out* - [Logs|" +
incomplete_test["log_file"] +
"]"
)
try:
if "bot-analyzed" not in jira_issue.fields.labels:
jira_issue.update(
description=jira_issue.fields.description +
"\n{0}\n".format(timeout_comment))
except buildbaron.analyzer.jira_client.JIRAError as e:
print("Error updating jira: " + str(e))
test_faults.extend(self.process_test(bf, incomplete_test))
return task_faults, test_faults
def process_test(self, bf, test):
self.create_test_cache(bf, test)
cache_dir = test["cache"]
log_file = os.path.join(cache_dir, "test.log")
# TODO(CWS) what is this?
nested_test = test
for key in bf.keys():
if key != 'tests' and key != 'name':
nested_test[key] = bf[key]
faults = []
# If logkeeper is down, we will not have a log file :-(
if test["log_file"] is not None and test["log_file"] != "" and "test/None" not in test[
"log_file"] and "log url not available" not in test["log_file"]:
if not os.path.exists(log_file):
buildbaron.analyzer.logkeeper.retieve_raw_log(test["log_file"], log_file)
test["log_file_url"] = buildbaron.analyzer.logkeeper.get_raw_log_url(
test["log_file"])
log_file_stat = os.stat(log_file)
if log_file_stat[stat.ST_SIZE] > 50 * 1024 * 1024:
print("Skipping Large File : " + str(log_file_stat[stat.ST_SIZE]))
return []
else:
test["log_file_url"] = "none"
with open(log_file, "wb") as lfh:
lfh.write("Logkeeper was down\n".encode())
log_file_stat = os.stat(log_file)
if log_file_stat[stat.ST_SIZE] > 50 * 1024 * 1024:
print("Skipping Large File : " + str(log_file_stat[stat.ST_SIZE]) + " at " + str(
log_file))
return []
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
print("Checking Log File")
LFS = buildbaron.analyzer.log_file_analyzer.LogFileSplitter(log_file_str)
analyzer = buildbaron.analyzer.log_file_analyzer.LogFileAnalyzer(LFS.get_streams())
analyzer.analyze()
faults.extend(analyzer.get_faults())
if test["name"].startswith("basic") and test["name"].endswith(".js"):
print("Anlyzing basic.js or basicPlus.js failure")
parallel_analyzer = \
buildbaron.analyzer.parallel_failure_analyzer.ParallelTestFailureAnalyzer(
log_file_str)
parallel_analyzer.analyze()
faults.extend(parallel_analyzer.get_faults())
if len(faults) == 0:
print("===========================")
print("No faults found for test: " + self.pp.pformat(bf))
print("To Debug: python analyzer" + os.path.sep + "log_file_analyzer.py " +
log_file)
print("===========================")
return faults
def check_for_oom_killer(self, bf):
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['system_log_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
analyzer = buildbaron.analyzer.evg_log_file_analyzer.EvgLogFileAnalyzer(log_file_str)
analyzer.analyze_oom()
if len(analyzer.get_faults()) > 0:
return analyzer
return None
def query_bfg_str(start, end):
# Dates should be formatted as 2017-01-25
return ('project = bfg'
' AND resolution is EMPTY'
' AND created > {createdStart}'
' AND created <= {createdEnd}'
' AND summary !~ "System Failure:"'
' ORDER BY created DESC'.format(
createdStart=start.strftime("%Y-%m-%d"),
createdEnd=end.strftime("%Y-%m-%d")))
def get_last_week_query():
today = datetime.date.today()
# The start of build baron - if today is Wednesday, returns prior Wednesday otherwise return
# prior x2 Wednesday
last_wednesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(-2))
# The end of build baron
last_tuesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(-1))
return query_bfg_str(last_wednesday, last_tuesday)
def get_this_week_query():
today = datetime.date.today()
# The start of build baron - last Wednesday (or today if today is Wednesday)
next_wednesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(-1))
# The end of build baron - this Wednesday
this_tuesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(2))
return query_bfg_str(next_wednesday, this_tuesday)
def main():
parser = argparse.ArgumentParser(description='Analyze test failure in jira.')
group = parser.add_argument_group("Jira options")
group.add_argument(
'--jira_server',
type=str,
help="Jira Server to query",
default=buildbaron.analyzer.analyzer_config.jira_server())
group.add_argument(
'--jira_user',
type=str,
help="Jira user name",
default=buildbaron.analyzer.analyzer_config.jira_user())
group = parser.add_mutually_exclusive_group()
group.add_argument(
'--last_week', action='store_true', help="Query of Last week's build baron queue")
group.add_argument(
'--this_week', action='store_true', help="Query of This week's build baron queue")
group.add_argument('--query_str', type=str, help="Any query against implicitly the BFG project")
args = parser.parse_args()
if args.query_str:
query_str = "(PROJECT = BFG) AND (%s)" % args.query_str
elif args.last_week:
query_str = get_last_week_query()
else:
query_str = get_this_week_query()
print("Query: %s" % query_str)
# Connect to mongod
print("Initializing local MongoDB server...")
buildbaron.analyzer.mongo_client.reinit_db()
# Connect to jira
jira_client = buildbaron.analyzer.jira_client.jira_client(args.jira_server, args.jira_user)
# Create our analyzer
bfa = bfg_analyzer(jira_client)
# Fetch desired BFG tickets
bfs = bfa.query(query_str)
# Analyze for failure
failed_bfs = bfa.check_logs(bfs)
print("Total BFs to investigate %d\n" % len(failed_bfs))
failed_bfs_root = {
'query': query_str,
'date': datetime.datetime.now().isoformat(' '),
'bfs': failed_bfs
}
with open("failed_bfs.json", "w", encoding="utf8") as sjh:
json.dump(failed_bfs_root, sjh, indent="\t")
buildbaron.analyzer.mongo_client.load_bfs(failed_bfs)
if __name__ == '__main__':
main()
| apache-2.0 | -1,692,208,233,766,159,000 | 34.283862 | 100 | 0.556663 | false |
jjscarafia/CUPS-Cloud-Print | reportissues.py | 2 | 2412 | #! /bin/sh
"true" '''\'
if command -v python2 > /dev/null; then
exec python2 "$0" "$@"
else
exec python "$0" "$@"
fi
exit $?
'''
# CUPS Cloudprint - Print via Google Cloud Print
# Copyright (C) 2013 Simon Cadman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
if __name__ == '__main__': # pragma: no cover
import sys
import os
import subprocess
libpath = "/usr/local/share/cloudprint-cups/"
if not os.path.exists(libpath):
libpath = "/usr/share/cloudprint-cups"
sys.path.insert(0, libpath)
from auth import Auth
from printermanager import PrinterManager
from ccputils import Utils
Utils.SetupLogging()
# line below is replaced on commit
CCPVersion = "20140814.2 000000"
Utils.ShowVersion(CCPVersion)
requestors, storage = Auth.SetupAuth(True)
printer_manager = PrinterManager(requestors)
printers = printer_manager.getPrinters()
if printers is None:
print "ERROR: No Printers Found"
sys.exit(1)
for printer in printers:
print printer.getCUPSDriverDescription()
print ""
print printer.getFields()
print printer['capabilities']
print "\n"
ppdname = printer.getPPDName()
p1 = subprocess.Popen(
(os.path.join(libpath, 'dynamicppd.py'), 'cat', ppdname.lstrip('-')),
stdout=subprocess.PIPE)
ppddata = p1.communicate()[0]
p = subprocess.Popen(['cupstestppd', '-'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
testdata = p.communicate(ppddata)[0]
result = p.returncode
print "Result of cupstestppd was " + str(result)
print "".join(testdata)
if result != 0:
print "cupstestppd errored: "
print ppddata
print "\n"
| gpl-3.0 | -9,007,118,623,980,421,000 | 32.5 | 97 | 0.648425 | false |
gzamboni/sdnResilience | loxi/of14/queue_stats_prop.py | 1 | 3603 | # Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
import util
import loxi.generic_util
import sys
ofp = sys.modules['loxi.of14']
class queue_stats_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = queue_stats_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = queue_stats_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("queue_stats_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class experimenter(queue_stats_prop):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, exp_type=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if exp_type != None:
self.exp_type = exp_type
else:
self.exp_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.exp_type))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.exp_type = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.exp_type != other.exp_type: return False
return True
def pretty_print(self, q):
q.text("experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("exp_type = ");
q.text("%#x" % self.exp_type)
q.breakable()
q.text('}')
queue_stats_prop.subtypes[65535] = experimenter
| gpl-2.0 | 4,146,537,638,850,166,000 | 27.824 | 88 | 0.566472 | false |
googleapis/python-bigquery | samples/snippets/view_test.py | 1 | 3789 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import uuid
from google.cloud import bigquery
import pytest
import view
def temp_suffix():
now = datetime.datetime.now()
return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"
@pytest.fixture(autouse=True)
def bigquery_client_patch(monkeypatch, bigquery_client):
monkeypatch.setattr(bigquery, "Client", lambda: bigquery_client)
@pytest.fixture(scope="module")
def view_dataset_id(bigquery_client, project_id):
dataset_id = f"{project_id}.view_{temp_suffix()}"
bigquery_client.create_dataset(dataset_id)
yield dataset_id
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
@pytest.fixture(scope="module")
def view_id(bigquery_client, view_dataset_id):
view_id = f"{view_dataset_id}.my_view"
yield view_id
bigquery_client.delete_table(view_id, not_found_ok=True)
@pytest.fixture(scope="module")
def source_dataset_id(bigquery_client, project_id):
dataset_id = f"{project_id}.view_{temp_suffix()}"
bigquery_client.create_dataset(dataset_id)
yield dataset_id
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
@pytest.fixture(scope="module")
def source_table_id(bigquery_client, source_dataset_id):
source_table_id = f"{source_dataset_id}.us_states"
job_config = bigquery.LoadJobConfig(
schema=[
bigquery.SchemaField("name", "STRING"),
bigquery.SchemaField("post_abbr", "STRING"),
],
skip_leading_rows=1,
)
load_job = bigquery_client.load_table_from_uri(
"gs://cloud-samples-data/bigquery/us-states/us-states.csv",
source_table_id,
job_config=job_config,
)
load_job.result()
yield source_table_id
bigquery_client.delete_table(source_table_id, not_found_ok=True)
def test_view(capsys, view_id, view_dataset_id, source_table_id, source_dataset_id):
override_values = {
"view_id": view_id,
"source_id": source_table_id,
}
got = view.create_view(override_values)
assert source_table_id in got.view_query
out, _ = capsys.readouterr()
assert view_id in out
got = view.get_view(override_values)
assert source_table_id in got.view_query
assert "'W%'" in got.view_query
out, _ = capsys.readouterr()
assert view_id in out
assert source_table_id in out
assert "'W%'" in out
got = view.update_view(override_values)
assert source_table_id in got.view_query
assert "'M%'" in got.view_query
out, _ = capsys.readouterr()
assert view_id in out
project_id, dataset_id, table_id = view_id.split(".")
override_values = {
"analyst_group_email": "[email protected]",
"view_dataset_id": view_dataset_id,
"source_dataset_id": source_dataset_id,
"view_reference": {
"projectId": project_id,
"datasetId": dataset_id,
"tableId": table_id,
},
}
view_dataset, source_dataset = view.grant_access(override_values)
assert len(view_dataset.access_entries) != 0
assert len(source_dataset.access_entries) != 0
out, _ = capsys.readouterr()
assert "[email protected]" in out
assert table_id in out
| apache-2.0 | -6,257,050,349,015,454,000 | 31.384615 | 84 | 0.676432 | false |
virtualopensystems/nova | nova/virt/hyperv/driver.py | 1 | 9838 | # Copyright (c) 2010 Cloud.com, Inc
# Copyright (c) 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A Hyper-V Nova Compute driver.
"""
from nova.i18n import _
from nova.openstack.common import log as logging
from nova.virt import driver
from nova.virt.hyperv import hostops
from nova.virt.hyperv import livemigrationops
from nova.virt.hyperv import migrationops
from nova.virt.hyperv import rdpconsoleops
from nova.virt.hyperv import snapshotops
from nova.virt.hyperv import vmops
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
class HyperVDriver(driver.ComputeDriver):
def __init__(self, virtapi):
super(HyperVDriver, self).__init__(virtapi)
self._hostops = hostops.HostOps()
self._volumeops = volumeops.VolumeOps()
self._vmops = vmops.VMOps()
self._snapshotops = snapshotops.SnapshotOps()
self._livemigrationops = livemigrationops.LiveMigrationOps()
self._migrationops = migrationops.MigrationOps()
self._rdpconsoleops = rdpconsoleops.RDPConsoleOps()
def init_host(self, host):
pass
def list_instances(self):
return self._vmops.list_instances()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
self._vmops.reboot(instance, network_info, reboot_type)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
self._vmops.destroy(instance, network_info, block_device_info,
destroy_disks)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def get_info(self, instance):
return self._vmops.get_info(instance)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
return self._volumeops.attach_volume(connection_info,
instance['name'])
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
return self._volumeops.detach_volume(connection_info,
instance['name'])
def get_volume_connector(self, instance):
return self._volumeops.get_volume_connector(instance)
def get_available_resource(self, nodename):
return self._hostops.get_available_resource()
def get_host_stats(self, refresh=False):
return self._hostops.get_host_stats(refresh)
def host_power_action(self, host, action):
return self._hostops.host_power_action(host, action)
def snapshot(self, context, instance, image_id, update_task_state):
self._snapshotops.snapshot(context, instance, image_id,
update_task_state)
def pause(self, instance):
self._vmops.pause(instance)
def unpause(self, instance):
self._vmops.unpause(instance)
def suspend(self, instance):
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
self._vmops.resume(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
self._vmops.power_on(instance)
def live_migration(self, context, instance, dest, post_method,
recover_method, block_migration=False,
migrate_data=None):
self._livemigrationops.live_migration(context, instance, dest,
post_method, recover_method,
block_migration, migrate_data)
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
self.destroy(context, instance, network_info, block_device_info)
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data=None):
self._livemigrationops.pre_live_migration(context, instance,
block_device_info,
network_info)
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
self._livemigrationops.post_live_migration_at_destination(
context,
instance,
network_info,
block_migration)
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
return self._livemigrationops.check_can_live_migrate_destination(
context, instance, src_compute_info, dst_compute_info,
block_migration, disk_over_commit)
def check_can_live_migrate_destination_cleanup(self, context,
dest_check_data):
self._livemigrationops.check_can_live_migrate_destination_cleanup(
context, dest_check_data)
def check_can_live_migrate_source(self, context, instance,
dest_check_data):
return self._livemigrationops.check_can_live_migrate_source(
context, instance, dest_check_data)
def get_instance_disk_info(self, instance_name, block_device_info=None):
pass
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
msg = _("VIF plugging is not supported by the Hyper-V driver.")
raise NotImplementedError(msg)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
msg = _("VIF unplugging is not supported by the Hyper-V driver.")
raise NotImplementedError(msg)
def ensure_filtering_rules_for_instance(self, instance, network_info):
LOG.debug("ensure_filtering_rules_for_instance called",
instance=instance)
def unfilter_instance(self, instance, network_info):
LOG.debug("unfilter_instance called", instance=instance)
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._migrationops.migrate_disk_and_power_off(context,
instance, dest,
flavor,
network_info,
block_device_info)
def confirm_migration(self, migration, instance, network_info):
self._migrationops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
self._migrationops.finish_revert_migration(context, instance,
network_info,
block_device_info, power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
self._migrationops.finish_migration(context, migration, instance,
disk_info, network_info,
image_meta, resize_instance,
block_device_info, power_on)
def get_host_ip_addr(self):
return self._hostops.get_host_ip_addr()
def get_host_uptime(self, host):
return self._hostops.get_host_uptime()
def get_rdp_console(self, context, instance):
return self._rdpconsoleops.get_rdp_console(instance)
| apache-2.0 | 4,809,147,517,281,367,000 | 43.116592 | 79 | 0.582639 | false |
MediaMath/qasino | lib/zmq_requestor.py | 1 | 2310 | # Copyright (C) 2014 MediaMath, Inc. <http://www.mediamath.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from txzmq import ZmqFactory, ZmqEndpoint, ZmqEndpointType, ZmqREQConnection
import logging
import json
from util import Identity
class ZmqRequestor(ZmqREQConnection):
def __init__(self, remote_host, port, zmq_factory, data_manager=None):
self.data_manager = data_manager
self.remote_host = remote_host
endpoint = ZmqEndpoint(ZmqEndpointType.connect, "tcp://%s:%d" % (remote_host, port))
ZmqREQConnection.__init__(self, zmq_factory, endpoint)
def request_metadata(self):
msg = { "op" : "get_table_list", "identity" : Identity.get_identity() }
#logging.info("ZmqRequestor: Requesting table list from %s.", self.remote_host)
deferred = self.sendMsg(json.dumps(msg))
deferred.callback = self.message_received
def send_table(self, table):
deferred = self.sendMsg(table.get_json(op="add_table_data", identity=Identity.get_identity()))
deferred.callback = self.message_received
def message_received(self, msg):
response_meta = json.loads(msg[0])
if response_meta == None or response_meta["response_op"] == None:
logging.error("ZmqRequestor: bad message response received")
elif response_meta["response_op"] == "tables_list":
logging.info("ZmqRequestor: Table list response: %s", json.loads(msg[1]))
elif response_meta["response_op"] == "ok":
logging.info("ZmqRequestor: request OK")
elif response_meta["response_op"] == "error":
logging.info("ZmqRequestor: request ERROR: " + response_meta["error_message"])
else:
logging.error("ZmqRequestor: unknown response: ", response_meta)
| apache-2.0 | 5,312,493,586,066,515,000 | 38.827586 | 102 | 0.679654 | false |
scionrep/scioncc | src/pyon/util/containers.py | 1 | 14330 | """ General purpose util classes and functions """
__author__ = 'Adam R. Smith, Michael Meisinger'
import collections
import datetime
import importlib
import string
import time
import simplejson
import base64
import uuid
import os
import re
from types import NoneType
from copy import deepcopy
DICT_LOCKING_ATTR = "__locked__"
class DotNotationGetItem(object):
""" Drive the behavior for DotList and DotDict lookups by dot notation, JSON-style. """
def _convert(self, val):
""" Convert the type if necessary and return if a conversion happened. """
if isinstance(val, dict) and not isinstance(val, DotDict):
return DotDict(val), True
elif isinstance(val, list) and not isinstance(val, DotList):
return DotList(val), True
return val, False
def __getitem__(self, key):
val = super(DotNotationGetItem, self).__getitem__(key)
val, converted = self._convert(val)
if converted: self[key] = val
return val
def __contains__(self, item):
return hasattr(self, item)
class DotList(DotNotationGetItem, list):
""" Partner class for DotDict; see that for docs. Both are needed to fully support JSON/YAML blocks. """
#def DotListIterator(list.)
def __iter__(self):
""" Monkey-patch the "next" iterator method to return modified versions. This will be slow. """
#it = super(DotList, self).__iter__()
#it_next = getattr(it, 'next')
#setattr(it, 'next', lambda: it_next(it))
#return it
for val in super(DotList, self).__iter__():
val, converted = self._convert(val)
yield val
class DotDict(DotNotationGetItem, dict):
"""
Subclass of dict that will recursively look up attributes with dot notation.
This is primarily for working with JSON-style data in a cleaner way like javascript.
Note that this will instantiate a number of child DotDicts when you first access attributes;
do not use in performance-critical parts of your code.
"""
def __dir__(self):
return [k for k in self.__dict__.keys() + self.keys() if k != DICT_LOCKING_ATTR]
def __getattr__(self, key):
""" Make attempts to lookup by nonexistent attributes also attempt key lookups. """
if self.has_key(key):
return self[key]
if not self.__dict__.has_key(DICT_LOCKING_ATTR):
import sys
import dis
frame = sys._getframe(1)
if '\x00%c' % dis.opmap['STORE_ATTR'] in frame.f_code.co_code:
self[key] = DotDict()
return self[key]
raise AttributeError(key)
def __setattr__(self, key, value):
if key in dir(dict):
raise AttributeError('%s conflicts with builtin.' % key)
if self.__dict__.has_key(DICT_LOCKING_ATTR):
raise AttributeError('Setting %s on a locked DotDict' % key)
if isinstance(value, dict):
self[key] = DotDict(value)
else:
self[key] = value
def copy(self):
return deepcopy(self)
def get_safe(self, qual_key, default=None):
"""
@brief Returns value of qualified key, such as "system.name" or None if not exists.
If default is given, returns the default. No exception thrown.
"""
value = get_safe(self, qual_key)
if value is None:
value = default
return value
def lock(self):
self.__dict__[DICT_LOCKING_ATTR] = True
def clear(self):
if self.__dict__.has_key(DICT_LOCKING_ATTR):
del self.__dict__[DICT_LOCKING_ATTR]
super(DotDict, self).clear()
def pop(self, *args, **kwargs):
if self.__dict__.has_key(DICT_LOCKING_ATTR):
raise AttributeError('Cannot pop on a locked DotDict')
return super(DotDict, self).pop(*args, **kwargs)
def popitem(self):
if self.__dict__.has_key(DICT_LOCKING_ATTR):
raise AttributeError('Cannot popitem on a locked DotDict')
return super(DotDict, self).popitem()
def as_dict(self):
return simple_deepcopy(self)
@classmethod
def fromkeys(cls, seq, value=None):
return DotDict(dict.fromkeys(seq, value))
class DictDiffer(object):
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
def __init__(self, current_dict, past_dict):
self.current_dict, self.past_dict = current_dict, past_dict
self.set_current, self.set_past = set(current_dict.keys()), set(past_dict.keys())
self.intersect = self.set_current.intersection(self.set_past)
def added(self):
return self.set_current - self.intersect
def removed(self):
return self.set_past - self.intersect
def changed(self):
return set(o for o in self.intersect if self.past_dict[o] != self.current_dict[o])
def unchanged(self):
return set(o for o in self.intersect if self.past_dict[o] == self.current_dict[o])
def simple_deepcopy(coll):
""" Performs a recursive deep copy on given collection, only using dict, list and set
collection types and not checking for cycles. """
if isinstance(coll, dict):
return {k: simple_deepcopy(v) for k, v in coll.iteritems()}
elif isinstance(coll, set):
return {simple_deepcopy(v) for v in coll}
elif hasattr(coll, "__iter__"):
return [simple_deepcopy(v) for v in coll]
else:
return coll
# dict_merge from: http://appdelegateinc.com/blog/2011/01/12/merge-deeply-nested-dicts-in-python/
def quacks_like_dict(object):
""" Check if object is dict-like """
return isinstance(object, collections.Mapping)
def dict_merge(base, upd, inplace=False):
""" Merge two deep dicts non-destructively.
Uses a stack to avoid maximum recursion depth exceptions.
@param base the dict to merge into
@param upd the content to merge
@param inplace change base if True, otherwise deepcopy base
@retval the merged dict (base if inplace else a merged deepcopy)
"""
assert quacks_like_dict(base), quacks_like_dict(upd)
dst = base if inplace else deepcopy(base)
stack = [(dst, upd)]
while stack:
current_dst, current_src = stack.pop()
for key in current_src:
if key not in current_dst:
current_dst[key] = current_src[key]
else:
if quacks_like_dict(current_src[key]) and quacks_like_dict(current_dst[key]) :
stack.append((current_dst[key], current_src[key]))
else:
current_dst[key] = current_src[key]
return dst
def get_safe(dict_instance, keypath, default=None):
"""
Returns a value with in a nested dict structure from a dot separated
path expression such as "system.server.host" or a list of key entries
@retval Value if found or None
"""
try:
obj = dict_instance
keylist = keypath if type(keypath) is list else keypath.split('.')
for key in keylist:
obj = obj[key]
return obj
except Exception as ex:
return default
def named_any(name):
"""
Retrieve a Python object by its fully qualified name from the global Python
module namespace. The first part of the name, that describes a module,
will be discovered and imported. Each subsequent part of the name is
treated as the name of an attribute of the object specified by all of the
name which came before it.
@param name: The name of the object to return.
@return: the Python object identified by 'name'.
"""
if not name:
raise Exception("Empty module name")
names = name.split('.')
module = None
mod_mames = names[:]
obj_names = []
while not module:
if mod_mames:
trialname = '.'.join(mod_mames)
try:
module = importlib.import_module(trialname)
except Exception as ex:
obj_names.append(mod_mames.pop())
else:
if len(names) == 1:
raise Exception("No module named %r" % (name,))
else:
raise Exception('%r does not name an object' % (name,))
obj = module
for n in reversed(obj_names):
obj = getattr(obj, n)
return obj
def for_name(modpath, classname):
"""
Returns a class of "classname" from module "modname".
"""
module = __import__(modpath, fromlist=[classname])
classobj = getattr(module, classname)
return classobj()
def current_time_millis():
return int(round(time.time() * 1000))
get_ion_ts_millis = current_time_millis
def get_ion_ts():
"""
Returns standard ION representation of a global timestamp.
It is defined as a str representing an integer number, the millis in UNIX epoch,
which started 1970-01-01 midnight UTC
"""
return str(current_time_millis())
def get_datetime(ts, local_time=True):
"""
Returns a naive datetime object in either local time or UTC time based on the given ION
timestamp
@param ts ION timestamp (str with millis in epoch)
@param local_time if True, returns local time (default), otherwise UTC
@retval datetime instance, naive
"""
tsf = float(ts) / 1000
return datetime.datetime.fromtimestamp(tsf) if local_time else datetime.datetime.utcfromtimestamp(tsf)
def get_datetime_str(ts, show_millis=False, local_time=True):
"""
Returns a string with date and time representation from an ION timestamp
@param ts ION timestamp (str with millis in epoch)
@param show_millis If True, appends the milli seconds
@param local_time if True, returns local time (default), otherwise UTC
@retval str with ION standard date and time representation
"""
dt = get_datetime(ts, local_time)
dts = str(dt)
period_idx = dts.rfind(".")
if period_idx != -1:
dts = dts[:period_idx+4] if show_millis else dts[:period_idx]
return dts
def parse_ion_ts(ts):
""" Returns a Python timestamp from an ION ts """
return float(ts) / 1000
def is_valid_ts(ts):
""" Check if given ts is string with only digits and length of 13 """
# We assume no timestamps before 2001-09
return isinstance(ts, basestring) and len(ts) == 13 and ts.isdigit() and ts[0] != "0"
def itersubclasses(cls, _seen=None):
"""
itersubclasses(cls)
http://code.activestate.com/recipes/576949-find-all-subclasses-of-a-given-class/
Generator over all subclasses of a given class, in depth first order.
"""
if not isinstance(cls, type):
raise TypeError('itersubclasses must be called with '
'new-style classes, not %.100r' % cls)
if _seen is None: _seen = set()
try:
subs = cls.__subclasses__()
except TypeError: # fails only when cls is type
subs = cls.__subclasses__(cls)
for sub in subs:
if sub not in _seen:
_seen.add(sub)
yield sub
for sub in itersubclasses(sub, _seen):
yield sub
def getleafsubclasses(cls):
"""
Returns all subclasses that have no further subclasses, for the given class
"""
scls = itersubclasses(cls)
return [s for s in scls if not s.__subclasses__()]
# _abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
BASIC_VALID = "_%s%s" % (string.ascii_letters, string.digits)
# -_.()abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
NORMAL_VALID = "-_.() %s%s" % (string.ascii_letters, string.digits)
def create_valid_identifier(name, valid_chars=BASIC_VALID, dot_sub=None, ws_sub=None):
if dot_sub:
name = name.replace('.', dot_sub)
if ws_sub:
name = name.replace(' ', ws_sub)
return str(''.join(c for c in name if c in valid_chars))
def create_basic_identifier(name):
return create_valid_identifier(name, dot_sub='_', ws_sub='_')
def is_basic_identifier(name):
return name == create_basic_identifier(name)
def is_valid_identifier(name, valid_chars=BASIC_VALID, dot_sub=None, ws_sub=None):
return name == create_valid_identifier(name, valid_chars=valid_chars, dot_sub=dot_sub, ws_sub=ws_sub)
#Used by json encoder
def ion_object_encoder(obj):
return obj.__dict__
def make_json(data):
result = simplejson.dumps(data, default=ion_object_encoder, indent=2)
return result
#Global utility functions for generating unique names and UUIDs
# get a UUID - URL safe, Base64
def get_a_Uuid():
r_uuid = base64.urlsafe_b64encode(uuid.uuid4().bytes)
return r_uuid.replace('=', '')
# generate a unique identifier based on a UUID and optional information
def create_unique_identifier(prefix=''):
return prefix + '_' + get_a_Uuid()
def get_default_sysname():
return 'ion_%s' % os.uname()[1].replace('.', '_')
def get_default_container_id():
return string.replace('%s_%d' % (os.uname()[1], os.getpid()), ".", "_")
BASIC_TYPE_SET = {str, bool, int, float, long, NoneType}
def recursive_encode(obj, encoding="utf8"):
"""Recursively walks a dict/list collection and in-place encodes any unicode keys and values in
dicts and lists to UTF-8 encoded str"""
if isinstance(obj, dict):
fix_list = None
for k, v in obj.iteritems():
if type(k) is unicode:
if fix_list is None:
fix_list = []
fix_list.append(k)
if type(v) in BASIC_TYPE_SET:
continue
if type(v) is unicode:
obj[k] = v.encode(encoding)
continue
recursive_encode(v, encoding=encoding)
if fix_list:
for k in fix_list:
v = obj.pop(k)
newk = k.encode(encoding)
obj[newk] = v
elif isinstance(obj, list):
for i, v in enumerate(obj):
if type(v) in BASIC_TYPE_SET:
continue
if type(v) is unicode:
obj[i] = v.encode(encoding)
continue
recursive_encode(v, encoding=encoding)
else:
raise RuntimeError("unknown type: %s" % type(obj))
return obj
| bsd-2-clause | 2,506,083,733,855,044,600 | 33.200477 | 108 | 0.625611 | false |
uweschmitt/emzed | libms/WebserviceClients/Metlin.py | 1 | 3159 | import pdb
#encoding:latin-1
import requests
import urllib2
import userConfig
from collections import OrderedDict
from ..DataStructures.Table import Table
class MetlinMatcher(object):
ws_col_names = [ "formula", "mass", "name", "molid"]
ws_col_types = [ str, float, str, int]
ws_col_formats = [ "%s", "%.5f", "%s", "%d" ]
url = "http://metlin.scripps.edu/REST/search/index.php"
info_url = "http://metlin.scripps.edu/metabo_info.php?molid=%d"
batch_size = 90 # should be 500 as metlin promises, but this is false
# the REST webserive of METLIN returns a result set which does not explain
# which combination of theoretical mass and adduct results in a match,
# which is not what we want. eg one gets the same result set for
# masses=[195.0877, 194.07904], adducts=["M"] and for masses=[195.0877],
# adducts = ["M", "M+H"]
# so we start a separate query for mass and each adduct !
@staticmethod
def _query(masses, adduct, ppm):
token = userConfig.getMetlinToken()
if not token:
raise Exception("need metlin token in user config file")
params = OrderedDict()
params["token"] = token # "DqeN7qBNEAzVNm9n"
params["mass[]"] = masses
params["adduct[]"] = [adduct]
params["tolunits"] = "ppm"
params["tolerance"] = ppm
r = requests.get(MetlinMatcher.url, params=params)
if r.status_code != 200:
raise Exception("matlin query %s failed: %s" %
(urllib2.unquote(r.url), r.text))
try:
j = r.json()
except:
raise Exception("invalid answer from %s" % r.url)
ws_col_names = MetlinMatcher.ws_col_names
ws_col_types = MetlinMatcher.ws_col_types
ws_col_formats = MetlinMatcher.ws_col_formats
info_url = MetlinMatcher.info_url
tables = []
for m_z, ji in zip(masses, j):
rows = []
if isinstance(ji, dict):
ji = ji.values()
for jii in ji:
if jii:
rows.append([t(jii[n])\
for t, n in zip(ws_col_types, ws_col_names)])
if rows:
ti = Table(ws_col_names, ws_col_types, ws_col_formats, rows[:])
ti.addColumn("m_z", m_z, insertBefore=0)
ti.addColumn("adduct", adduct, insertBefore=1)
ti.addColumn("link", ti.molid.apply(lambda d: info_url % d))
tables.append(ti)
return tables
@staticmethod
def query(masses, adducts, ppm):
all_tables = []
for adduct in adducts:
for i0 in range(0, len(masses), MetlinMatcher.batch_size):
mass_slice = masses[i0:i0 + MetlinMatcher.batch_size]
tables = MetlinMatcher._query(mass_slice, adduct, ppm)
all_tables.extend(tables)
result_table = all_tables[0]
result_table.append(all_tables[1:])
return result_table
if 0:
t = MetlinMatcher.query(["282.222813", "292.229272"], 50, "-")
t.info()
t._print()
| gpl-3.0 | -3,001,720,266,134,262,300 | 32.967742 | 79 | 0.566318 | false |
KatiRG/flyingpigeon | flyingpigeon/processes/wps_subset_continents.py | 1 | 5476 | import os
import tarfile
from flyingpigeon.subset import clipping
from flyingpigeon.subset import _CONTINENTS_
from pywps.Process import WPSProcess
from flyingpigeon.log import init_process_logger
import logging
logger = logging.getLogger(__name__)
class subset_continentsProcess(WPSProcess):
def __init__(self):
WPSProcess.__init__(
self,
identifier="subset_continents",
title="Subset continents",
version="0.9",
abstract="Returns only the selected polygon for each input dataset",
metadata=[
{"title": "LSCE", "href": "http://www.lsce.ipsl.fr/en/index.php"},
{"title": "Documentation", "href": "http://flyingpigeon.readthedocs.io/en/latest/"},
],
statusSupported=True,
storeSupported=True
)
self.resource = self.addComplexInput(
identifier="resource",
title="Resource",
abstract="NetCDF Files or archive (tar/zip) containing netCDF files",
minOccurs=1,
maxOccurs=1000,
maxmegabites=5000,
formats=[{"mimeType": "application/x-netcdf"},
{"mimeType": "application/x-tar"},
{"mimeType": "application/zip"}],
)
self.region = self.addLiteralInput(
identifier="region",
title="Region",
default='Africa',
type=type(''),
minOccurs=1,
maxOccurs=len(_CONTINENTS_),
allowedValues=_CONTINENTS_ # REGION_EUROPE #COUNTRIES #
)
# self.dimension_map = self.addLiteralInput(
# identifier="dimension_map",
# title="Dimension Map",
# abstract= 'if not ordered in lon/lat a dimension map has to be provided',
# type=type(''),
# minOccurs=0,
# maxOccurs=1
# )
self.mosaic = self.addLiteralInput(
identifier="mosaic",
title="Mosaic",
abstract="If Mosaic is checked, selected polygons will be merged to one Mosaic for each input file",
default=False,
type=type(False),
minOccurs=0,
maxOccurs=1,
)
# self.variable = self.addLiteralInput(
# identifier="variable",
# title="Variable",
# abstract="Variable to be expected in the input files (Variable will be detected if not set)",
# default=None,
# type=type(''),
# minOccurs=0,
# maxOccurs=1,
# )
self.output = self.addComplexOutput(
title="Subsets",
abstract="Tar archive containing the netCDF files",
formats=[{"mimeType": "application/x-tar"}],
asReference=True,
identifier="output",
)
self.output_netcdf = self.addComplexOutput(
title="Subsets for one dataset",
abstract="NetCDF file with subsets of one dataset.",
formats=[{"mimeType": "application/x-netcdf"}],
asReference=True,
identifier="ncout",
)
self.output_log = self.addComplexOutput(
identifier="output_log",
title="Logging information",
abstract="Collected logs during process run.",
formats=[{"mimeType": "text/plain"}],
asReference=True,
)
def execute(self):
from ast import literal_eval
from flyingpigeon.utils import archive, archiveextract
init_process_logger('log.txt')
self.output_log.setValue('log.txt')
ncs = archiveextract(self.getInputValues(identifier='resource'))
mosaic = self.mosaic.getValue()
regions = self.region.getValue()
# variable = self.variable.getValue()
# logger.info('regions: %s' % regions)
# dimension_map = self.dimension_map.getValue()
# if dimension_map != None:
# dimension_map = literal_eval(dimension_map)
logger.info('ncs = %s', ncs)
logger.info('regions = %s', regions)
logger.info('mosaic = %s', mosaic)
# logger.info('dimension_map = %s', dimension_map)
self.status.set('Arguments set for subset process', 10)
logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(ncs)))
try:
results = clipping(
resource=ncs,
polygons=regions, # self.region.getValue(),
mosaic=mosaic,
spatial_wrapping='wrap',
# variable=variable,
dir_output=os.path.abspath(os.curdir),
# dimension_map=dimension_map,
)
except Exception as e:
msg = 'clipping failed'
logger.exception(msg)
raise Exception(msg)
if not results:
raise Exception('no results produced.')
# prepare tar file
try:
tarf = archive(results)
logger.info('Tar file prepared')
except Exception as e:
msg = 'Tar file preparation failed'
logger.exception(msg)
raise Exception(msg)
self.output.setValue(tarf)
i = next((i for i, x in enumerate(results) if x), None)
self.output_netcdf.setValue(results[i])
self.status.set('done', 100)
| apache-2.0 | -3,787,420,159,431,396,400 | 33.658228 | 112 | 0.546567 | false |
dthain/cctools | chirp/src/bindings/python3/chirp.binding.py | 1 | 22700 | ## @package ChirpPython
#
# Python Chirp bindings.
#
# The objects and methods provided by this package correspond to the native
# C API in @ref chirp_reli.h and chirp_swig_wrap.h
#
# The SWIG-based Python bindings provide a higher-level interface that
# revolves around:
#
# - @ref Chirp.Client
# - @ref Chirp.Stat
import os
import time
import json
##
# \class Chirp.Client
# Python Client object
#
# This class is used to create a chirp client
class Client(object):
##
# Create a new chirp client
#
# @param self Reference to the current task object.
# @param hostport The host:port of the server.
# @param timeout The time to wait for a server response on every request.
# @param authentication A list of prefered authentications. E.g., ['tickets', 'unix']
# @param tickets A list of ticket filenames.
# @param debug Generate client debug output.
def __init__(self, hostport, timeout=60, authentication=None, tickets=None, debug=False):
self.hostport = hostport
self.timeout = timeout
if debug:
cctools_debug_config('chirp_python_client')
cctools_debug_flags_set('chirp')
if tickets and (authentication is None):
authentication = ['ticket']
self.__set_tickets(tickets)
if authentication is None:
auth_register_all()
else:
for auth in authentication:
auth_register_byname(auth)
self.identity = self.whoami()
if self.identity == '':
raise AuthenticationFailure(authentication)
def __exit__(self, exception_type, exception_value, traceback):
chirp_reli_disconnect(self.hostport)
def __del__(self):
chirp_reli_disconnect(self.hostport)
def __stoptime(self, absolute_stop_time=None, timeout=None):
if timeout is None:
timeout = self.timeout
if absolute_stop_time is None:
absolute_stop_time = time.time() + timeout
return absolute_stop_time
def __set_tickets(self, tickets):
tickets_str = None
if tickets is None:
try:
tickets_str = os.environ['CHIRP_CLIENT_TICKETS']
except KeyError:
tickets_str = None
else:
tickets_str = ','.join(tickets)
if tickets_str is not None:
auth_ticket_load(tickets_str)
##
# Returns a string with identity of the client according to the server.
#
# @param self Reference to the current task object.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def whoami(self, absolute_stop_time=None, timeout=None):
return chirp_wrap_whoami(self.hostport, self.__stoptime(absolute_stop_time, timeout))
##
# Returns a string with the ACL of the given directory.
# Throws an IOError on error (no such directory).
#
# @param self Reference to the current task object.
# @param path Target directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def listacl(self, path='/', absolute_stop_time=None, timeout=None):
acls = chirp_wrap_listacl(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
if acls is None:
raise IOError(path)
return acls.split('\n')
##
# Returns a string with the ACL of the given directory.
# Throws a GeneralError on error.
#
# @param self Reference to the current task object.
# @param path Target directory.
# @param subject Target subject.
# @param rights Permissions to be granted.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def setacl(self, path, subject, rights, absolute_stop_time=None, timeout=None):
result = chirp_reli_setacl(self.hostport, path, subject, rights, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise GeneralFailure('setacl', result, [path, subject, rights])
return result
##
# Set the ACL for the given directory to be only for the rights to the calling user.
# Throws a GeneralError on error.
#
# @param self Reference to the current task object.
# @param path Target directory.
# @param rights Permissions to be granted.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def resetacl(self, path, rights, absolute_stop_time=None, timeout=None):
result = chirp_wrap_resetacl(self.hostport, path, rights, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise GeneralFailure('resetacl', result, [path, rights])
return result
##
# Returns a list with the names of the files in the path.
# Throws an IOError on error (no such directory).
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def ls(self, path, absolute_stop_time=None, timeout=None):
dr = chirp_reli_opendir(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
files = []
if dir is None:
raise IOError(path)
while True:
d = chirp_reli_readdir(dr)
if d is None: break
files.append(Stat(d.name, d.info))
return files
##
# Returns a Chirp.Stat object with information on path.
# Throws an IOError on error (e.g., no such path or insufficient permissions).
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def stat(self, path, absolute_stop_time=None, timeout=None):
info = chirp_wrap_stat(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
if info is None:
raise IOError(path)
return Stat(path, info)
##
# Changes permissions on path.
# Throws a GeneralFailure on error (e.g., no such path or insufficient permissions).
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param mode Desired permissions (e.g., 0755)
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def chmod(self, path, mode, absolute_stop_time=None, timeout=None):
result = chirp_reli_chmod(self.hostport, path, mode, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise GeneralFailure('chmod', result, [path, mode])
return result
##
# Copies local file/directory source to the chirp server as file/directory destination.
# If destination is not given, source name is used.
# Raises Chirp.TransferFailure on error.
#
# @param self Reference to the current task object.
# @param source A local file or directory.
# @param destination File or directory name to use in the server (defaults to source).
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def put(self, source, destination=None, absolute_stop_time=None, timeout=None):
if destination is None:
destination = source
result = chirp_recursive_put(self.hostport,
source, destination,
self.__stoptime(absolute_stop_time, timeout))
if result > -1:
return result
raise TransferFailure('put', result, source, destination)
##
# Copies server file/directory source to the local file/directory destination.
# If destination is not given, source name is used.
# Raises Chirp.TransferFailure on error.
#
# @param self Reference to the current task object.
# @param source A server file or directory.
# @param destination File or directory name to be used locally (defaults to source).
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def get(self, source, destination=None, absolute_stop_time=None, timeout=None):
if destination is None:
destination = source
result = chirp_recursive_get(self.hostport,
source, destination,
self.__stoptime(absolute_stop_time, timeout))
if result > -1:
return result
raise TransferFailure('get', result, source, destination)
##
# Removes the given file or directory from the server.
# Raises OSError on error.
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def rm(self, path, absolute_stop_time=None, timeout=None):
status = chirp_reli_rmall(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
if status < 0:
raise OSError
##
# Recursively create the directories in path.
# Raises OSError on error.
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param mode Unix permissions for the created directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def mkdir(self, path, mode=493, absolute_stop_time=None, timeout=None):
result = chirp_reli_mkdir_recursive(self.hostport, path, mode, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise OSError
return result
##
# Computes the checksum of path.
# Raises IOError on error.
#
# @param self Reference to the current task object.
# @param path Target file.
# @param algorithm One of 'md5' or 'sha1' (default).
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def hash(self, path, algorithm='sha1', absolute_stop_time=None, timeout=None):
hash_hex = chirp_wrap_hash(self.hostport, path, algorithm, self.__stoptime(absolute_stop_time, timeout))
if hash_hex is None:
raise IOError
return hash_hex
##
# Creates a chirp job. See http://ccl.cse.nd.edu/software/manuals/chirp.html for details.
#
# @param job_description A dictionary with a job chirp description.
#
# @code
# job_description = {
# 'executable': "/bin/tar",
# 'arguments': [ 'tar', '-cf', 'archive.tar', 'a', 'b' ],
# 'files': { 'task_path': 'a',
# 'serv_path': '/users/magrat/a.txt'
# 'type': 'INPUT' },
# { 'task_path': 'b',
# 'serv_path': '/users/magrat/b.txt'
# 'type': 'INPUT' },
# { 'task_path': 'archive.tar',
# 'serv_path': '/users/magrat/archive.tar'
# 'type': 'OUTPUT' }
# }
# job_id = client.job_create(job_description);
# @endcode
def job_create(self, job_description):
job_json = json.dumps(job_description)
job_id = chirp_wrap_job_create(self.hostport, job_json, self.__stoptime())
if job_id < 0:
raise ChirpJobError('create', job_id, job_json)
return job_id
##
# Kills the jobs identified with the different job ids.
#
# @param job_ids Job ids of the chirp jobs to be killed.
#
def job_kill(self, *job_ids):
ids_str = json.dumps(job_ids)
result = chirp_wrap_job_kill(self.hostport, ids_str, self.__stoptime())
if result < 0:
raise ChirpJobError('kill', result, ids_str)
return result
##
# Commits (starts running) the jobs identified with the different job ids.
#
# @param job_ids Job ids of the chirp jobs to be committed.
#
def job_commit(self, *job_ids):
ids_str = json.dumps(job_ids)
result = chirp_wrap_job_commit(self.hostport, ids_str, self.__stoptime())
if result < 0:
raise ChirpJobError('commit', result, ids_str)
return result
##
# Reaps the jobs identified with the different job ids.
#
# @param job_ids Job ids of the chirp jobs to be reaped.
#
def job_reap(self, *job_ids):
ids_str = json.dumps(job_ids)
result = chirp_wrap_job_reap(self.hostport, ids_str, self.__stoptime())
if result < 0:
raise ChirpJobError('reap', result, ids_str)
return result
##
# Obtains the current status for each job id. The value returned is a
# list which contains a dictionary reference per job id.
#
# @param job_ids Job ids of the chirp jobs to be reaped.
#
def job_status(self, *job_ids):
ids_str = json.dumps(job_ids)
status = chirp_wrap_job_status(self.hostport, ids_str, self.__stoptime())
if status is None:
raise ChirpJobError('status', None, ids_str)
return json.loads(status)
##
# Waits waiting_time seconds for the job_id to terminate. Return value is
# the same as job_status. If the call timesout, an empty string is
# returned. If job_id is missing, `<job_wait>` waits for any of the user's job.
#
# @param waiting_time maximum number of seconds to wait for a job to finish.
# @param job_id id of the job to wait.
def job_wait(self, waiting_time, job_id=0):
status = chirp_wrap_job_wait(self.hostport, job_id, waiting_time, self.__stoptime())
if status is None:
raise ChirpJobError('status', None, job_id)
return json.loads(status)
##
# Python Stat object
#
# This class is used to record stat information for files/directories of a chirp server.
class Stat(object):
def __init__(self, path, cstat):
self._path = path
self._info = cstat
##
# Target path.
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.path
# @endcode
@property
def path(self):
return self._path
##
# ID of device containing file.
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.device
# @endcode
@property
def device(self):
return self._info.cst_dev
##
# inode number
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.inode
# @endcode
@property
def inode(self):
return self._info.cst_ino
##
# file mode permissions
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.mode
# @endcode
@property
def mode(self):
return self._info.cst_mode
##
# number of hard links
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.nlink
# @endcode
@property
def nlink(self):
return self._info.cst_nlink
##
# user ID of owner
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.uid
# @endcode
@property
def uid(self):
return self._info.cst_uid
##
# group ID of owner
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.gid
# @endcode
@property
def gid(self):
return self._info.cst_gid
##
# device ID if special file
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.rdev
# @endcode
@property
def rdev(self):
return self._info.cst_rdev
##
# total size, in bytes
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.size
# @endcode
@property
def size(self):
return self._info.cst_size
##
# block size for file system I/O
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.block_size
# @endcode
@property
def block_size(self):
return self._info.cst_blksize
##
# number of 512B blocks allocated
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.blocks
# @endcode
@property
def blocks(self):
return self._info.cst_blocks
##
# number of seconds since epoch since last access
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.atime
# @endcode
@property
def atime(self):
return self._info.cst_atime
##
# number of seconds since epoch since last modification
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.mtime
# @endcode
@property
def mtime(self):
return self._info.cst_mtime
##
# number of seconds since epoch since last status change
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.ctime
# @endcode
@property
def ctime(self):
return self._info.cst_ctime
def __repr__(self):
return "%s uid:%d gid:%d size:%d" % (self.path, self.uid, self.gid, self.size)
class AuthenticationFailure(Exception):
pass
class GeneralFailure(Exception):
def __init__(self, action, status, value):
message = "Error with %s(%s) %s" % (action, status, value)
super(GeneralFailure, self).__init__(message)
self.action = action
self.status = status
self.value = value
class TransferFailure(Exception):
def __init__(self, action, status, source, dest):
message = "Error with %s(%s) %s %s" % (action, status, source, dest)
super(TransferFailure, self).__init__(message)
self.action = action
self.status = status
self.source = source
self.dest = dest
class ChirpJobError(Exception):
def __init__(self, action, status, value):
message = "Error with %s(%s) %s" % (action, status, value)
super(ChirpJobError, self).__init__(message)
self.action = action
self.status = status
self.value = value
# @endcode
| gpl-2.0 | -854,934,521,074,259,500 | 33.869432 | 118 | 0.574758 | false |
windskyer/nova | nova/tests/unit/virt/xenapi/test_xenapi.py | 1 | 177617 | # Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test suite for XenAPI."""
import ast
import base64
import contextlib
import copy
import functools
import os
import re
import uuid
import mock
from mox3 import mox
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import importutils
import six
import testtools
from nova.compute import api as compute_api
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import crypto
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova import test
from nova.tests.unit.db import fakes as db_fakes
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network
from nova.tests.unit import fake_processutils
import nova.tests.unit.image.fake as fake_image
from nova.tests.unit import matchers
from nova.tests.unit.objects import test_aggregate
from nova.tests.unit import utils as test_utils
from nova.tests.unit.virt.xenapi import stubs
from nova.virt import fake
from nova.virt.xenapi import agent
from nova.virt.xenapi.client import session as xenapi_session
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import fake as xenapi_fake
from nova.virt.xenapi import host
from nova.virt.xenapi.image import glance
from nova.virt.xenapi import pool
from nova.virt.xenapi import pool_states
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import vmops
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
CONF.import_opt('network_manager', 'nova.service')
CONF.import_opt('compute_driver', 'nova.virt.driver')
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('default_availability_zone', 'nova.availability_zones')
CONF.import_opt('login_timeout', 'nova.virt.xenapi.client.session',
group="xenserver")
IMAGE_MACHINE = '1'
IMAGE_KERNEL = '2'
IMAGE_RAMDISK = '3'
IMAGE_RAW = '4'
IMAGE_VHD = '5'
IMAGE_ISO = '6'
IMAGE_IPXE_ISO = '7'
IMAGE_FROM_VOLUME = '8'
IMAGE_FIXTURES = {
IMAGE_MACHINE: {
'image_meta': {'name': 'fakemachine', 'size': 0,
'disk_format': 'ami',
'container_format': 'ami',
'id': 'fake-image'},
},
IMAGE_KERNEL: {
'image_meta': {'name': 'fakekernel', 'size': 0,
'disk_format': 'aki',
'container_format': 'aki',
'id': 'fake-kernel'},
},
IMAGE_RAMDISK: {
'image_meta': {'name': 'fakeramdisk', 'size': 0,
'disk_format': 'ari',
'container_format': 'ari',
'id': 'fake-ramdisk'},
},
IMAGE_RAW: {
'image_meta': {'name': 'fakeraw', 'size': 0,
'disk_format': 'raw',
'container_format': 'bare',
'id': 'fake-image-raw'},
},
IMAGE_VHD: {
'image_meta': {'name': 'fakevhd', 'size': 0,
'disk_format': 'vhd',
'container_format': 'ovf',
'id': 'fake-image-vhd'},
},
IMAGE_ISO: {
'image_meta': {'name': 'fakeiso', 'size': 0,
'disk_format': 'iso',
'container_format': 'bare',
'id': 'fake-image-iso'},
},
IMAGE_IPXE_ISO: {
'image_meta': {'name': 'fake_ipxe_iso', 'size': 0,
'disk_format': 'iso',
'container_format': 'bare',
'id': 'fake-image-pxe',
'properties': {'ipxe_boot': 'true'}},
},
IMAGE_FROM_VOLUME: {
'image_meta': {'name': 'fake_ipxe_iso',
'id': 'fake-image-volume',
'properties': {'foo': 'bar'}},
},
}
def get_session():
return xenapi_session.XenAPISession('test_url', 'root', 'test_pass')
def set_image_fixtures():
image_service = fake_image.FakeImageService()
image_service.images.clear()
for image_id, image_meta in IMAGE_FIXTURES.items():
image_meta = image_meta['image_meta']
image_meta['id'] = image_id
image_service.create(None, image_meta)
def get_fake_device_info():
# FIXME: 'sr_uuid', 'introduce_sr_keys', sr_type and vdi_uuid
# can be removed from the dict when LP bug #1087308 is fixed
fake_vdi_ref = xenapi_fake.create_vdi('fake-vdi', None)
fake_vdi_uuid = xenapi_fake.get_record('VDI', fake_vdi_ref)['uuid']
fake = {'block_device_mapping':
[{'connection_info': {'driver_volume_type': 'iscsi',
'data': {'sr_uuid': 'falseSR',
'introduce_sr_keys': ['sr_type'],
'sr_type': 'iscsi',
'vdi_uuid': fake_vdi_uuid,
'target_discovered': False,
'target_iqn': 'foo_iqn:foo_volid',
'target_portal': 'localhost:3260',
'volume_id': 'foo_volid',
'target_lun': 1,
'auth_password': 'my-p@55w0rd',
'auth_username': 'johndoe',
'auth_method': u'CHAP'}, },
'mount_device': 'vda',
'delete_on_termination': False}, ],
'root_device_name': '/dev/sda',
'ephemerals': [],
'swap': None, }
return fake
def stub_vm_utils_with_vdi_attached_here(function):
"""vm_utils.with_vdi_attached_here needs to be stubbed out because it
calls down to the filesystem to attach a vdi. This provides a
decorator to handle that.
"""
@functools.wraps(function)
def decorated_function(self, *args, **kwargs):
@contextlib.contextmanager
def fake_vdi_attached_here(*args, **kwargs):
fake_dev = 'fakedev'
yield fake_dev
def fake_image_download(*args, **kwargs):
pass
orig_vdi_attached_here = vm_utils.vdi_attached_here
orig_image_download = fake_image._FakeImageService.download
try:
vm_utils.vdi_attached_here = fake_vdi_attached_here
fake_image._FakeImageService.download = fake_image_download
return function(self, *args, **kwargs)
finally:
fake_image._FakeImageService.download = orig_image_download
vm_utils.vdi_attached_here = orig_vdi_attached_here
return decorated_function
def create_instance_with_system_metadata(context, instance_values):
inst = objects.Instance(context=context,
system_metadata={})
for k, v in instance_values.items():
setattr(inst, k, v)
inst.flavor = objects.Flavor.get_by_id(context,
instance_values['instance_type_id'])
inst.old_flavor = None
inst.new_flavor = None
inst.create()
inst.pci_devices = objects.PciDeviceList(objects=[])
return inst
class XenAPIVolumeTestCase(stubs.XenAPITestBaseNoDB):
"""Unit tests for Volume operations."""
def setUp(self):
super(XenAPIVolumeTestCase, self).setUp()
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
self.fixture.config(disable_process_locking=True,
group='oslo_concurrency')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.instance = fake_instance.fake_db_instance(name='foo')
@classmethod
def _make_connection_info(cls):
target_iqn = 'iqn.2010-10.org.openstack:volume-00000001'
return {'driver_volume_type': 'iscsi',
'data': {'volume_id': 1,
'target_iqn': target_iqn,
'target_portal': '127.0.0.1:3260,fake',
'target_lun': None,
'auth_method': 'CHAP',
'auth_username': 'username',
'auth_password': 'password'}}
def test_attach_volume(self):
# This shows how to test Ops classes' methods.
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vm = xenapi_fake.create_vm(self.instance['name'], 'Running')
conn_info = self._make_connection_info()
self.assertIsNone(
conn.attach_volume(None, conn_info, self.instance, '/dev/sdc'))
# check that the VM has a VBD attached to it
# Get XenAPI record for VBD
vbds = xenapi_fake.get_all('VBD')
vbd = xenapi_fake.get_record('VBD', vbds[0])
vm_ref = vbd['VM']
self.assertEqual(vm_ref, vm)
def test_attach_volume_raise_exception(self):
# This shows how to test when exceptions are raised.
stubs.stubout_session(self.stubs,
stubs.FakeSessionForVolumeFailedTests)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
xenapi_fake.create_vm(self.instance['name'], 'Running')
self.assertRaises(exception.VolumeDriverNotFound,
conn.attach_volume,
None, {'driver_volume_type': 'nonexist'},
self.instance, '/dev/sdc')
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIVMTestCase(stubs.XenAPITestBase):
"""Unit tests for VM operations."""
def setUp(self):
super(XenAPIVMTestCase, self).setUp()
self.useFixture(test.SampleNetworks())
self.network = importutils.import_object(CONF.network_manager)
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
self.fixture.config(disable_process_locking=True,
group='oslo_concurrency')
self.flags(instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.create_network('fake', 'fake_br1')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
stubs.stubout_get_this_vm_uuid(self.stubs)
stubs.stub_out_vm_methods(self.stubs)
fake_processutils.stub_out_processutils_execute(self.stubs)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.conn._session.is_local_connection = False
fake_image.stub_out_image_service(self.stubs)
set_image_fixtures()
stubs.stubout_image_service_download(self.stubs)
stubs.stubout_stream_disk(self.stubs)
def fake_inject_instance_metadata(self, instance, vm):
pass
self.stubs.Set(vmops.VMOps, '_inject_instance_metadata',
fake_inject_instance_metadata)
def fake_safe_copy_vdi(session, sr_ref, instance, vdi_to_copy_ref):
name_label = "fakenamelabel"
disk_type = "fakedisktype"
virtual_size = 777
return vm_utils.create_vdi(
session, sr_ref, instance, name_label, disk_type,
virtual_size)
self.stubs.Set(vm_utils, '_safe_copy_vdi', fake_safe_copy_vdi)
def tearDown(self):
fake_image.FakeImageService_reset()
super(XenAPIVMTestCase, self).tearDown()
def test_init_host(self):
session = get_session()
vm = vm_utils._get_this_vm_ref(session)
# Local root disk
vdi0 = xenapi_fake.create_vdi('compute', None)
vbd0 = xenapi_fake.create_vbd(vm, vdi0)
# Instance VDI
vdi1 = xenapi_fake.create_vdi('instance-aaaa', None,
other_config={'nova_instance_uuid': 'aaaa'})
xenapi_fake.create_vbd(vm, vdi1)
# Only looks like instance VDI
vdi2 = xenapi_fake.create_vdi('instance-bbbb', None)
vbd2 = xenapi_fake.create_vbd(vm, vdi2)
self.conn.init_host(None)
self.assertEqual(set(xenapi_fake.get_all('VBD')), set([vbd0, vbd2]))
def test_instance_exists(self):
self.mox.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup(mox.IgnoreArg(), 'foo').AndReturn(True)
self.mox.ReplayAll()
self.stubs.Set(objects.Instance, 'name', 'foo')
instance = objects.Instance(uuid='fake-uuid')
self.assertTrue(self.conn.instance_exists(instance))
def test_instance_not_exists(self):
self.mox.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup(mox.IgnoreArg(), 'bar').AndReturn(None)
self.mox.ReplayAll()
self.stubs.Set(objects.Instance, 'name', 'bar')
instance = objects.Instance(uuid='fake-uuid')
self.assertFalse(self.conn.instance_exists(instance))
def test_list_instances_0(self):
instances = self.conn.list_instances()
self.assertEqual(instances, [])
def test_list_instance_uuids_0(self):
instance_uuids = self.conn.list_instance_uuids()
self.assertEqual(instance_uuids, [])
def test_list_instance_uuids(self):
uuids = []
for x in range(1, 4):
instance = self._create_instance()
uuids.append(instance['uuid'])
instance_uuids = self.conn.list_instance_uuids()
self.assertEqual(len(uuids), len(instance_uuids))
self.assertEqual(set(uuids), set(instance_uuids))
def test_get_rrd_server(self):
self.flags(connection_url='myscheme://myaddress/',
group='xenserver')
server_info = vm_utils._get_rrd_server()
self.assertEqual(server_info[0], 'myscheme')
self.assertEqual(server_info[1], 'myaddress')
expected_raw_diagnostics = {
'vbd_xvdb_write': '0.0',
'memory_target': '4294967296.0000',
'memory_internal_free': '1415564.0000',
'memory': '4294967296.0000',
'vbd_xvda_write': '0.0',
'cpu0': '0.0042',
'vif_0_tx': '287.4134',
'vbd_xvda_read': '0.0',
'vif_0_rx': '1816.0144',
'vif_2_rx': '0.0',
'vif_2_tx': '0.0',
'vbd_xvdb_read': '0.0',
'last_update': '1328795567',
}
def test_get_diagnostics(self):
def fake_get_rrd(host, vm_uuid):
path = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(path, 'vm_rrd.xml')) as f:
return re.sub(r'\s', '', f.read())
self.stubs.Set(vm_utils, '_get_rrd', fake_get_rrd)
expected = self.expected_raw_diagnostics
instance = self._create_instance()
actual = self.conn.get_diagnostics(instance)
self.assertThat(actual, matchers.DictMatches(expected))
def test_get_instance_diagnostics(self):
def fake_get_rrd(host, vm_uuid):
path = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(path, 'vm_rrd.xml')) as f:
return re.sub(r'\s', '', f.read())
self.stubs.Set(vm_utils, '_get_rrd', fake_get_rrd)
expected = {
'config_drive': False,
'state': 'running',
'driver': 'xenapi',
'version': '1.0',
'uptime': 0,
'hypervisor_os': None,
'cpu_details': [{'time': 0}, {'time': 0},
{'time': 0}, {'time': 0}],
'nic_details': [{'mac_address': '00:00:00:00:00:00',
'rx_drop': 0,
'rx_errors': 0,
'rx_octets': 0,
'rx_packets': 0,
'tx_drop': 0,
'tx_errors': 0,
'tx_octets': 0,
'tx_packets': 0}],
'disk_details': [{'errors_count': 0,
'id': '',
'read_bytes': 0,
'read_requests': 0,
'write_bytes': 0,
'write_requests': 0}],
'memory_details': {'maximum': 8192, 'used': 0}}
instance = self._create_instance(obj=True)
actual = self.conn.get_instance_diagnostics(instance)
self.assertEqual(expected, actual.serialize())
def test_get_vnc_console(self):
instance = self._create_instance(obj=True)
session = get_session()
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vm_ref = vm_utils.lookup(session, instance['name'])
console = conn.get_vnc_console(self.context, instance)
# Note(sulo): We don't care about session id in test
# they will always differ so strip that out
actual_path = console.internal_access_path.split('&')[0]
expected_path = "/console?ref=%s" % str(vm_ref)
self.assertEqual(expected_path, actual_path)
def test_get_vnc_console_for_rescue(self):
instance = self._create_instance(obj=True)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
rescue_vm = xenapi_fake.create_vm(instance['name'] + '-rescue',
'Running')
# Set instance state to rescued
instance['vm_state'] = 'rescued'
console = conn.get_vnc_console(self.context, instance)
# Note(sulo): We don't care about session id in test
# they will always differ so strip that out
actual_path = console.internal_access_path.split('&')[0]
expected_path = "/console?ref=%s" % str(rescue_vm)
self.assertEqual(expected_path, actual_path)
def test_get_vnc_console_instance_not_ready(self):
instance = self._create_instance(obj=True, spawn=False)
instance.vm_state = 'building'
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.assertRaises(exception.InstanceNotFound,
conn.get_vnc_console, self.context, instance)
def test_get_vnc_console_rescue_not_ready(self):
instance = self._create_instance(obj=True, spawn=False)
instance.vm_state = 'rescued'
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.assertRaises(exception.InstanceNotReady,
conn.get_vnc_console, self.context, instance)
def test_instance_snapshot_fails_with_no_primary_vdi(self):
def create_bad_vbd(session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=False,
osvol=False):
vbd_rec = {'VM': vm_ref,
'VDI': vdi_ref,
'userdevice': 'fake',
'currently_attached': False}
vbd_ref = xenapi_fake._create_object('VBD', vbd_rec)
xenapi_fake.after_VBD_create(vbd_ref, vbd_rec)
return vbd_ref
self.stubs.Set(vm_utils, 'create_vbd', create_bad_vbd)
stubs.stubout_instance_snapshot(self.stubs)
# Stubbing out firewall driver as previous stub sets alters
# xml rpc result parsing
stubs.stubout_firewall_driver(self.stubs, self.conn)
instance = self._create_instance()
image_id = "my_snapshot_id"
self.assertRaises(exception.NovaException, self.conn.snapshot,
self.context, instance, image_id,
lambda *args, **kwargs: None)
def test_instance_snapshot(self):
expected_calls = [
{'args': (),
'kwargs':
{'task_state': task_states.IMAGE_PENDING_UPLOAD}},
{'args': (),
'kwargs':
{'task_state': task_states.IMAGE_UPLOADING,
'expected_state': task_states.IMAGE_PENDING_UPLOAD}}]
func_call_matcher = matchers.FunctionCallMatcher(expected_calls)
image_id = "my_snapshot_id"
stubs.stubout_instance_snapshot(self.stubs)
stubs.stubout_is_snapshot(self.stubs)
# Stubbing out firewall driver as previous stub sets alters
# xml rpc result parsing
stubs.stubout_firewall_driver(self.stubs, self.conn)
instance = self._create_instance()
self.fake_upload_called = False
def fake_image_upload(_self, ctx, session, inst, img_id, vdi_uuids):
self.fake_upload_called = True
self.assertEqual(ctx, self.context)
self.assertEqual(inst, instance)
self.assertIsInstance(vdi_uuids, list)
self.assertEqual(img_id, image_id)
self.stubs.Set(glance.GlanceStore, 'upload_image',
fake_image_upload)
self.conn.snapshot(self.context, instance, image_id,
func_call_matcher.call)
# Ensure VM was torn down
vm_labels = []
for vm_ref in xenapi_fake.get_all('VM'):
vm_rec = xenapi_fake.get_record('VM', vm_ref)
if not vm_rec["is_control_domain"]:
vm_labels.append(vm_rec["name_label"])
self.assertEqual(vm_labels, [instance['name']])
# Ensure VBDs were torn down
vbd_labels = []
for vbd_ref in xenapi_fake.get_all('VBD'):
vbd_rec = xenapi_fake.get_record('VBD', vbd_ref)
vbd_labels.append(vbd_rec["vm_name_label"])
self.assertEqual(vbd_labels, [instance['name']])
# Ensure task states changed in correct order
self.assertIsNone(func_call_matcher.match())
# Ensure VDIs were torn down
for vdi_ref in xenapi_fake.get_all('VDI'):
vdi_rec = xenapi_fake.get_record('VDI', vdi_ref)
name_label = vdi_rec["name_label"]
self.assertFalse(name_label.endswith('snapshot'))
self.assertTrue(self.fake_upload_called)
def create_vm_record(self, conn, os_type, name):
instances = conn.list_instances()
self.assertEqual(instances, [name])
# Get Nova record for VM
vm_info = conn.get_info({'name': name})
# Get XenAPI record for VM
vms = [rec for ref, rec
in six.iteritems(xenapi_fake.get_all_records('VM'))
if not rec['is_control_domain']]
vm = vms[0]
self.vm_info = vm_info
self.vm = vm
def check_vm_record(self, conn, instance_type_id, check_injection):
flavor = db.flavor_get(conn, instance_type_id)
mem_kib = int(flavor['memory_mb']) << 10
mem_bytes = str(mem_kib << 10)
vcpus = flavor['vcpus']
vcpu_weight = flavor['vcpu_weight']
self.assertEqual(self.vm_info.max_mem_kb, mem_kib)
self.assertEqual(self.vm_info.mem_kb, mem_kib)
self.assertEqual(self.vm['memory_static_max'], mem_bytes)
self.assertEqual(self.vm['memory_dynamic_max'], mem_bytes)
self.assertEqual(self.vm['memory_dynamic_min'], mem_bytes)
self.assertEqual(self.vm['VCPUs_max'], str(vcpus))
self.assertEqual(self.vm['VCPUs_at_startup'], str(vcpus))
if vcpu_weight is None:
self.assertEqual(self.vm['VCPUs_params'], {})
else:
self.assertEqual(self.vm['VCPUs_params'],
{'weight': str(vcpu_weight), 'cap': '0'})
# Check that the VM is running according to Nova
self.assertEqual(self.vm_info.state, power_state.RUNNING)
# Check that the VM is running according to XenAPI.
self.assertEqual(self.vm['power_state'], 'Running')
if check_injection:
xenstore_data = self.vm['xenstore_data']
self.assertNotIn('vm-data/hostname', xenstore_data)
key = 'vm-data/networking/DEADBEEF0001'
xenstore_value = xenstore_data[key]
tcpip_data = ast.literal_eval(xenstore_value)
self.assertJsonEqual({'broadcast': '192.168.1.255',
'dns': ['192.168.1.4', '192.168.1.3'],
'gateway': '192.168.1.1',
'gateway_v6': '2001:db8:0:1::1',
'ip6s': [{'enabled': '1',
'ip': '2001:db8:0:1:dcad:beff:feef:1',
'netmask': 64,
'gateway': '2001:db8:0:1::1'}],
'ips': [{'enabled': '1',
'ip': '192.168.1.100',
'netmask': '255.255.255.0',
'gateway': '192.168.1.1'},
{'enabled': '1',
'ip': '192.168.1.101',
'netmask': '255.255.255.0',
'gateway': '192.168.1.1'}],
'label': 'test1',
'mac': 'DE:AD:BE:EF:00:01'}, tcpip_data)
def check_vm_params_for_windows(self):
self.assertEqual(self.vm['platform']['nx'], 'true')
self.assertEqual(self.vm['HVM_boot_params'], {'order': 'dc'})
self.assertEqual(self.vm['HVM_boot_policy'], 'BIOS order')
# check that these are not set
self.assertEqual(self.vm['PV_args'], '')
self.assertEqual(self.vm['PV_bootloader'], '')
self.assertEqual(self.vm['PV_kernel'], '')
self.assertEqual(self.vm['PV_ramdisk'], '')
def check_vm_params_for_linux(self):
self.assertEqual(self.vm['platform']['nx'], 'false')
self.assertEqual(self.vm['PV_args'], '')
self.assertEqual(self.vm['PV_bootloader'], 'pygrub')
# check that these are not set
self.assertEqual(self.vm['PV_kernel'], '')
self.assertEqual(self.vm['PV_ramdisk'], '')
self.assertEqual(self.vm['HVM_boot_params'], {})
self.assertEqual(self.vm['HVM_boot_policy'], '')
def check_vm_params_for_linux_with_external_kernel(self):
self.assertEqual(self.vm['platform']['nx'], 'false')
self.assertEqual(self.vm['PV_args'], 'root=/dev/xvda1')
self.assertNotEqual(self.vm['PV_kernel'], '')
self.assertNotEqual(self.vm['PV_ramdisk'], '')
# check that these are not set
self.assertEqual(self.vm['HVM_boot_params'], {})
self.assertEqual(self.vm['HVM_boot_policy'], '')
def _list_vdis(self):
session = get_session()
return session.call_xenapi('VDI.get_all')
def _list_vms(self):
session = get_session()
return session.call_xenapi('VM.get_all')
def _check_vdis(self, start_list, end_list):
for vdi_ref in end_list:
if vdi_ref not in start_list:
vdi_rec = xenapi_fake.get_record('VDI', vdi_ref)
# If the cache is turned on then the base disk will be
# there even after the cleanup
if 'other_config' in vdi_rec:
if 'image-id' not in vdi_rec['other_config']:
self.fail('Found unexpected VDI:%s' % vdi_ref)
else:
self.fail('Found unexpected VDI:%s' % vdi_ref)
def _test_spawn(self, image_ref, kernel_id, ramdisk_id,
instance_type_id="3", os_type="linux",
hostname="test", architecture="x86-64", instance_id=1,
injected_files=None, check_injection=False,
create_record=True, empty_dns=False,
block_device_info=None,
key_data=None):
if injected_files is None:
injected_files = []
# Fake out inject_instance_metadata
def fake_inject_instance_metadata(self, instance, vm):
pass
self.stubs.Set(vmops.VMOps, '_inject_instance_metadata',
fake_inject_instance_metadata)
if create_record:
instance = objects.Instance(context=self.context)
instance.project_id = self.project_id
instance.user_id = self.user_id
instance.image_ref = image_ref
instance.kernel_id = kernel_id
instance.ramdisk_id = ramdisk_id
instance.root_gb = 20
instance.ephemeral_gb = 0
instance.instance_type_id = instance_type_id
instance.os_type = os_type
instance.hostname = hostname
instance.key_data = key_data
instance.architecture = architecture
instance.system_metadata = {}
flavor = objects.Flavor.get_by_id(self.context,
instance_type_id)
if instance_type_id == 5:
# NOTE(danms): xenapi test stubs have flavor 5 with no
# vcpu_weight
flavor.vcpu_weight = None
instance.flavor = flavor
instance.create()
else:
instance = objects.Instance.get_by_id(self.context, instance_id,
expected_attrs=['flavor'])
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
if empty_dns:
# NOTE(tr3buchet): this is a terrible way to do this...
network_info[0]['network']['subnets'][0]['dns'] = []
image_meta = IMAGE_FIXTURES[image_ref]["image_meta"]
self.conn.spawn(self.context, instance, image_meta, injected_files,
'herp', network_info, block_device_info)
self.create_vm_record(self.conn, os_type, instance['name'])
self.check_vm_record(self.conn, instance_type_id, check_injection)
self.assertEqual(instance['os_type'], os_type)
self.assertEqual(instance['architecture'], architecture)
def test_spawn_ipxe_iso_success(self):
self.mox.StubOutWithMock(vm_utils, 'get_sr_path')
vm_utils.get_sr_path(mox.IgnoreArg()).AndReturn('/sr/path')
self.flags(ipxe_network_name='test1',
ipxe_boot_menu_url='http://boot.example.com',
ipxe_mkisofs_cmd='/root/mkisofs',
group='xenserver')
self.mox.StubOutWithMock(self.conn._session, 'call_plugin_serialized')
self.conn._session.call_plugin_serialized(
'ipxe', 'inject', '/sr/path', mox.IgnoreArg(),
'http://boot.example.com', '192.168.1.100', '255.255.255.0',
'192.168.1.1', '192.168.1.3', '/root/mkisofs')
self.mox.ReplayAll()
self._test_spawn(IMAGE_IPXE_ISO, None, None)
def test_spawn_ipxe_iso_no_network_name(self):
self.flags(ipxe_network_name=None,
ipxe_boot_menu_url='http://boot.example.com',
group='xenserver')
# call_plugin_serialized shouldn't be called
self.mox.StubOutWithMock(self.conn._session, 'call_plugin_serialized')
self.mox.ReplayAll()
self._test_spawn(IMAGE_IPXE_ISO, None, None)
def test_spawn_ipxe_iso_no_boot_menu_url(self):
self.flags(ipxe_network_name='test1',
ipxe_boot_menu_url=None,
group='xenserver')
# call_plugin_serialized shouldn't be called
self.mox.StubOutWithMock(self.conn._session, 'call_plugin_serialized')
self.mox.ReplayAll()
self._test_spawn(IMAGE_IPXE_ISO, None, None)
def test_spawn_ipxe_iso_unknown_network_name(self):
self.flags(ipxe_network_name='test2',
ipxe_boot_menu_url='http://boot.example.com',
group='xenserver')
# call_plugin_serialized shouldn't be called
self.mox.StubOutWithMock(self.conn._session, 'call_plugin_serialized')
self.mox.ReplayAll()
self._test_spawn(IMAGE_IPXE_ISO, None, None)
def test_spawn_empty_dns(self):
# Test spawning with an empty dns list.
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64",
empty_dns=True)
self.check_vm_params_for_linux()
def test_spawn_not_enough_memory(self):
self.assertRaises(exception.InsufficientFreeMemory,
self._test_spawn,
'1', 2, 3, "4") # m1.xlarge
def test_spawn_fail_cleanup_1(self):
"""Simulates an error while downloading an image.
Verifies that the VM and VDIs created are properly cleaned up.
"""
vdi_recs_start = self._list_vdis()
start_vms = self._list_vms()
stubs.stubout_fetch_disk_image(self.stubs, raise_failure=True)
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, '1', 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
end_vms = self._list_vms()
self._check_vdis(vdi_recs_start, vdi_recs_end)
# No additional VMs should be found.
self.assertEqual(start_vms, end_vms)
def test_spawn_fail_cleanup_2(self):
"""Simulates an error while creating VM record.
Verifies that the VM and VDIs created are properly cleaned up.
"""
vdi_recs_start = self._list_vdis()
start_vms = self._list_vms()
stubs.stubout_create_vm(self.stubs)
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, '1', 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
end_vms = self._list_vms()
self._check_vdis(vdi_recs_start, vdi_recs_end)
# No additional VMs should be found.
self.assertEqual(start_vms, end_vms)
def test_spawn_fail_cleanup_3(self):
"""Simulates an error while attaching disks.
Verifies that the VM and VDIs created are properly cleaned up.
"""
stubs.stubout_attach_disks(self.stubs)
vdi_recs_start = self._list_vdis()
start_vms = self._list_vms()
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, '1', 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
end_vms = self._list_vms()
self._check_vdis(vdi_recs_start, vdi_recs_end)
# No additional VMs should be found.
self.assertEqual(start_vms, end_vms)
def test_spawn_raw_glance(self):
self._test_spawn(IMAGE_RAW, None, None, os_type=None)
self.check_vm_params_for_windows()
def test_spawn_vhd_glance_linux(self):
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
self.check_vm_params_for_linux()
def test_spawn_vhd_glance_windows(self):
self._test_spawn(IMAGE_VHD, None, None,
os_type="windows", architecture="i386",
instance_type_id=5)
self.check_vm_params_for_windows()
def test_spawn_iso_glance(self):
self._test_spawn(IMAGE_ISO, None, None,
os_type="windows", architecture="i386")
self.check_vm_params_for_windows()
def test_spawn_glance(self):
def fake_fetch_disk_image(context, session, instance, name_label,
image_id, image_type):
sr_ref = vm_utils.safe_find_sr(session)
image_type_str = vm_utils.ImageType.to_string(image_type)
vdi_ref = vm_utils.create_vdi(session, sr_ref, instance,
name_label, image_type_str, "20")
vdi_role = vm_utils.ImageType.get_role(image_type)
vdi_uuid = session.call_xenapi("VDI.get_uuid", vdi_ref)
return {vdi_role: dict(uuid=vdi_uuid, file=None)}
self.stubs.Set(vm_utils, '_fetch_disk_image',
fake_fetch_disk_image)
self._test_spawn(IMAGE_MACHINE,
IMAGE_KERNEL,
IMAGE_RAMDISK)
self.check_vm_params_for_linux_with_external_kernel()
def test_spawn_boot_from_volume_no_glance_image_meta(self):
dev_info = get_fake_device_info()
self._test_spawn(IMAGE_FROM_VOLUME, None, None,
block_device_info=dev_info)
def test_spawn_boot_from_volume_with_image_meta(self):
dev_info = get_fake_device_info()
self._test_spawn(IMAGE_VHD, None, None,
block_device_info=dev_info)
@testtools.skipIf(test_utils.is_osx(),
'IPv6 pretty-printing broken on OSX, see bug 1409135')
def test_spawn_netinject_file(self):
self.flags(flat_injected=True)
db_fakes.stub_out_db_instance_api(self.stubs, injected=True)
self._tee_executed = False
def _tee_handler(cmd, **kwargs):
actual = kwargs.get('process_input', None)
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
hwaddress ether DE:AD:BE:EF:00:01
address 192.168.1.100
netmask 255.255.255.0
broadcast 192.168.1.255
gateway 192.168.1.1
dns-nameservers 192.168.1.3 192.168.1.4
iface eth0 inet6 static
hwaddress ether DE:AD:BE:EF:00:01
address 2001:db8:0:1:dcad:beff:feef:1
netmask 64
gateway 2001:db8:0:1::1
"""
self.assertEqual(expected, actual)
self._tee_executed = True
return '', ''
def _readlink_handler(cmd_parts, **kwargs):
return os.path.realpath(cmd_parts[2]), ''
fake_processutils.fake_execute_set_repliers([
# Capture the tee .../etc/network/interfaces command
(r'tee.*interfaces', _tee_handler),
(r'readlink -nm.*', _readlink_handler),
])
self._test_spawn(IMAGE_MACHINE,
IMAGE_KERNEL,
IMAGE_RAMDISK,
check_injection=True)
self.assertTrue(self._tee_executed)
@testtools.skipIf(test_utils.is_osx(),
'IPv6 pretty-printing broken on OSX, see bug 1409135')
def test_spawn_netinject_xenstore(self):
db_fakes.stub_out_db_instance_api(self.stubs, injected=True)
self._tee_executed = False
def _mount_handler(cmd, *ignore_args, **ignore_kwargs):
# When mounting, create real files under the mountpoint to simulate
# files in the mounted filesystem
# mount point will be the last item of the command list
self._tmpdir = cmd[len(cmd) - 1]
LOG.debug('Creating files in %s to simulate guest agent',
self._tmpdir)
os.makedirs(os.path.join(self._tmpdir, 'usr', 'sbin'))
# Touch the file using open
open(os.path.join(self._tmpdir, 'usr', 'sbin',
'xe-update-networking'), 'w').close()
return '', ''
def _umount_handler(cmd, *ignore_args, **ignore_kwargs):
# Umount would normally make files in the mounted filesystem
# disappear, so do that here
LOG.debug('Removing simulated guest agent files in %s',
self._tmpdir)
os.remove(os.path.join(self._tmpdir, 'usr', 'sbin',
'xe-update-networking'))
os.rmdir(os.path.join(self._tmpdir, 'usr', 'sbin'))
os.rmdir(os.path.join(self._tmpdir, 'usr'))
return '', ''
def _tee_handler(cmd, *ignore_args, **ignore_kwargs):
self._tee_executed = True
return '', ''
fake_processutils.fake_execute_set_repliers([
(r'mount', _mount_handler),
(r'umount', _umount_handler),
(r'tee.*interfaces', _tee_handler)])
self._test_spawn('1', 2, 3, check_injection=True)
# tee must not run in this case, where an injection-capable
# guest agent is detected
self.assertFalse(self._tee_executed)
def test_spawn_injects_auto_disk_config_to_xenstore(self):
instance = self._create_instance(spawn=False, obj=True)
self.mox.StubOutWithMock(self.conn._vmops, '_inject_auto_disk_config')
self.conn._vmops._inject_auto_disk_config(instance, mox.IgnoreArg())
self.mox.ReplayAll()
self.conn.spawn(self.context, instance,
IMAGE_FIXTURES['1']["image_meta"], [], 'herp', '')
def test_spawn_vlanmanager(self):
self.flags(network_manager='nova.network.manager.VlanManager',
vlan_interface='fake0')
def dummy(*args, **kwargs):
pass
self.stubs.Set(vmops.VMOps, '_create_vifs', dummy)
# Reset network table
xenapi_fake.reset_table('network')
# Instance 2 will use vlan network (see db/fakes.py)
ctxt = self.context.elevated()
inst2 = self._create_instance(False, obj=True)
networks = self.network.db.network_get_all(ctxt)
with mock.patch('nova.objects.network.Network._from_db_object'):
for network in networks:
self.network.set_network_host(ctxt, network)
self.network.allocate_for_instance(ctxt,
instance_id=inst2.id,
instance_uuid=inst2.uuid,
host=CONF.host,
vpn=None,
rxtx_factor=3,
project_id=self.project_id,
macs=None)
self._test_spawn(IMAGE_MACHINE,
IMAGE_KERNEL,
IMAGE_RAMDISK,
instance_id=inst2.id,
create_record=False)
# TODO(salvatore-orlando): a complete test here would require
# a check for making sure the bridge for the VM's VIF is
# consistent with bridge specified in nova db
def test_spawn_with_network_qos(self):
self._create_instance()
for vif_ref in xenapi_fake.get_all('VIF'):
vif_rec = xenapi_fake.get_record('VIF', vif_ref)
self.assertEqual(vif_rec['qos_algorithm_type'], 'ratelimit')
self.assertEqual(vif_rec['qos_algorithm_params']['kbps'],
str(3 * 10 * 1024))
def test_spawn_ssh_key_injection(self):
# Test spawning with key_data on an instance. Should use
# agent file injection.
self.flags(use_agent_default=True,
group='xenserver')
actual_injected_files = []
def fake_inject_file(self, method, args):
path = base64.b64decode(args['b64_path'])
contents = base64.b64decode(args['b64_contents'])
actual_injected_files.append((path, contents))
return jsonutils.dumps({'returncode': '0', 'message': 'success'})
self.stubs.Set(stubs.FakeSessionForVMTests,
'_plugin_agent_inject_file', fake_inject_file)
def fake_encrypt_text(sshkey, new_pass):
self.assertEqual("ssh-rsa fake_keydata", sshkey)
return "fake"
self.stubs.Set(crypto, 'ssh_encrypt_text', fake_encrypt_text)
expected_data = ('\n# The following ssh key was injected by '
'Nova\nssh-rsa fake_keydata\n')
injected_files = [('/root/.ssh/authorized_keys', expected_data)]
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64",
key_data='ssh-rsa fake_keydata')
self.assertEqual(actual_injected_files, injected_files)
def test_spawn_ssh_key_injection_non_rsa(self):
# Test spawning with key_data on an instance. Should use
# agent file injection.
self.flags(use_agent_default=True,
group='xenserver')
actual_injected_files = []
def fake_inject_file(self, method, args):
path = base64.b64decode(args['b64_path'])
contents = base64.b64decode(args['b64_contents'])
actual_injected_files.append((path, contents))
return jsonutils.dumps({'returncode': '0', 'message': 'success'})
self.stubs.Set(stubs.FakeSessionForVMTests,
'_plugin_agent_inject_file', fake_inject_file)
def fake_encrypt_text(sshkey, new_pass):
raise NotImplementedError("Should not be called")
self.stubs.Set(crypto, 'ssh_encrypt_text', fake_encrypt_text)
expected_data = ('\n# The following ssh key was injected by '
'Nova\nssh-dsa fake_keydata\n')
injected_files = [('/root/.ssh/authorized_keys', expected_data)]
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64",
key_data='ssh-dsa fake_keydata')
self.assertEqual(actual_injected_files, injected_files)
def test_spawn_injected_files(self):
# Test spawning with injected_files.
self.flags(use_agent_default=True,
group='xenserver')
actual_injected_files = []
def fake_inject_file(self, method, args):
path = base64.b64decode(args['b64_path'])
contents = base64.b64decode(args['b64_contents'])
actual_injected_files.append((path, contents))
return jsonutils.dumps({'returncode': '0', 'message': 'success'})
self.stubs.Set(stubs.FakeSessionForVMTests,
'_plugin_agent_inject_file', fake_inject_file)
injected_files = [('/tmp/foo', 'foobar')]
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64",
injected_files=injected_files)
self.check_vm_params_for_linux()
self.assertEqual(actual_injected_files, injected_files)
@mock.patch('nova.db.agent_build_get_by_triple')
def test_spawn_agent_upgrade(self, mock_get):
self.flags(use_agent_default=True,
group='xenserver')
mock_get.return_value = {"version": "1.1.0", "architecture": "x86-64",
"hypervisor": "xen", "os": "windows",
"url": "url", "md5hash": "asdf",
'created_at': None, 'updated_at': None,
'deleted_at': None, 'deleted': False,
'id': 1}
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
@mock.patch('nova.db.agent_build_get_by_triple')
def test_spawn_agent_upgrade_fails_silently(self, mock_get):
mock_get.return_value = {"version": "1.1.0", "architecture": "x86-64",
"hypervisor": "xen", "os": "windows",
"url": "url", "md5hash": "asdf",
'created_at': None, 'updated_at': None,
'deleted_at': None, 'deleted': False,
'id': 1}
self._test_spawn_fails_silently_with(exception.AgentError,
method="_plugin_agent_agentupdate", failure="fake_error")
def test_spawn_with_resetnetwork_alternative_returncode(self):
self.flags(use_agent_default=True,
group='xenserver')
def fake_resetnetwork(self, method, args):
fake_resetnetwork.called = True
# NOTE(johngarbutt): as returned by FreeBSD and Gentoo
return jsonutils.dumps({'returncode': '500',
'message': 'success'})
self.stubs.Set(stubs.FakeSessionForVMTests,
'_plugin_agent_resetnetwork', fake_resetnetwork)
fake_resetnetwork.called = False
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
self.assertTrue(fake_resetnetwork.called)
def _test_spawn_fails_silently_with(self, expected_exception_cls,
method="_plugin_agent_version",
failure=None, value=None):
self.flags(use_agent_default=True,
agent_version_timeout=0,
group='xenserver')
def fake_agent_call(self, method, args):
if failure:
raise xenapi_fake.Failure([failure])
else:
return value
self.stubs.Set(stubs.FakeSessionForVMTests,
method, fake_agent_call)
called = {}
def fake_add_instance_fault(*args, **kwargs):
called["fake_add_instance_fault"] = args[2]
self.stubs.Set(compute_utils, 'add_instance_fault_from_exc',
fake_add_instance_fault)
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
actual_exception = called["fake_add_instance_fault"]
self.assertIsInstance(actual_exception, expected_exception_cls)
def test_spawn_fails_silently_with_agent_timeout(self):
self._test_spawn_fails_silently_with(exception.AgentTimeout,
failure="TIMEOUT:fake")
def test_spawn_fails_silently_with_agent_not_implemented(self):
self._test_spawn_fails_silently_with(exception.AgentNotImplemented,
failure="NOT IMPLEMENTED:fake")
def test_spawn_fails_silently_with_agent_error(self):
self._test_spawn_fails_silently_with(exception.AgentError,
failure="fake_error")
def test_spawn_fails_silently_with_agent_bad_return(self):
error = jsonutils.dumps({'returncode': -1, 'message': 'fake'})
self._test_spawn_fails_silently_with(exception.AgentError,
value=error)
def test_spawn_sets_last_dom_id(self):
self._test_spawn(IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
self.assertEqual(self.vm['domid'],
self.vm['other_config']['last_dom_id'])
def test_rescue(self):
instance = self._create_instance(spawn=False, obj=True)
xenapi_fake.create_vm(instance['name'], 'Running')
session = get_session()
vm_ref = vm_utils.lookup(session, instance['name'])
swap_vdi_ref = xenapi_fake.create_vdi('swap', None)
root_vdi_ref = xenapi_fake.create_vdi('root', None)
eph1_vdi_ref = xenapi_fake.create_vdi('eph', None)
eph2_vdi_ref = xenapi_fake.create_vdi('eph', None)
vol_vdi_ref = xenapi_fake.create_vdi('volume', None)
xenapi_fake.create_vbd(vm_ref, swap_vdi_ref, userdevice=2)
xenapi_fake.create_vbd(vm_ref, root_vdi_ref, userdevice=0)
xenapi_fake.create_vbd(vm_ref, eph1_vdi_ref, userdevice=4)
xenapi_fake.create_vbd(vm_ref, eph2_vdi_ref, userdevice=5)
xenapi_fake.create_vbd(vm_ref, vol_vdi_ref, userdevice=6,
other_config={'osvol': True})
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
image_meta = {'id': IMAGE_VHD,
'disk_format': 'vhd',
'properties': {'vm_mode': 'xen'}}
conn.rescue(self.context, instance, [], image_meta, '')
vm = xenapi_fake.get_record('VM', vm_ref)
rescue_name = "%s-rescue" % vm["name_label"]
rescue_ref = vm_utils.lookup(session, rescue_name)
rescue_vm = xenapi_fake.get_record('VM', rescue_ref)
vdi_refs = {}
for vbd_ref in rescue_vm['VBDs']:
vbd = xenapi_fake.get_record('VBD', vbd_ref)
vdi_refs[vbd['VDI']] = vbd['userdevice']
self.assertEqual('1', vdi_refs[root_vdi_ref])
self.assertEqual('2', vdi_refs[swap_vdi_ref])
self.assertEqual('4', vdi_refs[eph1_vdi_ref])
self.assertEqual('5', vdi_refs[eph2_vdi_ref])
self.assertNotIn(vol_vdi_ref, vdi_refs)
def test_rescue_preserve_disk_on_failure(self):
# test that the original disk is preserved if rescue setup fails
# bug #1227898
instance = self._create_instance(obj=True)
session = get_session()
image_meta = {'id': IMAGE_VHD,
'disk_format': 'vhd',
'properties': {'vm_mode': 'xen'}}
vm_ref = vm_utils.lookup(session, instance['name'])
vdi_ref, vdi_rec = vm_utils.get_vdi_for_vm_safely(session, vm_ref)
# raise an error in the spawn setup process and trigger the
# undo manager logic:
def fake_start(*args, **kwargs):
raise test.TestingException('Start Error')
self.stubs.Set(self.conn._vmops, '_start', fake_start)
self.assertRaises(test.TestingException, self.conn.rescue,
self.context, instance, [], image_meta, '')
# confirm original disk still exists:
vdi_ref2, vdi_rec2 = vm_utils.get_vdi_for_vm_safely(session, vm_ref)
self.assertEqual(vdi_ref, vdi_ref2)
self.assertEqual(vdi_rec['uuid'], vdi_rec2['uuid'])
def test_unrescue(self):
instance = self._create_instance(obj=True)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
# Unrescue expects the original instance to be powered off
conn.power_off(instance)
xenapi_fake.create_vm(instance['name'] + '-rescue', 'Running')
conn.unrescue(instance, None)
def test_unrescue_not_in_rescue(self):
instance = self._create_instance(obj=True)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
# Ensure that it will not unrescue a non-rescued instance.
self.assertRaises(exception.InstanceNotInRescueMode, conn.unrescue,
instance, None)
def test_finish_revert_migration(self):
instance = self._create_instance()
class VMOpsMock(object):
def __init__(self):
self.finish_revert_migration_called = False
def finish_revert_migration(self, context, instance, block_info,
power_on):
self.finish_revert_migration_called = True
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
conn._vmops = VMOpsMock()
conn.finish_revert_migration(self.context, instance, None)
self.assertTrue(conn._vmops.finish_revert_migration_called)
def test_reboot_hard(self):
instance = self._create_instance()
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
conn.reboot(self.context, instance, None, "HARD")
def test_poll_rebooting_instances(self):
self.mox.StubOutWithMock(compute_api.API, 'reboot')
compute_api.API.reboot(mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
instance = self._create_instance()
instances = [instance]
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
conn.poll_rebooting_instances(60, instances)
def test_reboot_soft(self):
instance = self._create_instance()
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
conn.reboot(self.context, instance, None, "SOFT")
def test_reboot_halted(self):
session = get_session()
instance = self._create_instance(spawn=False)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
xenapi_fake.create_vm(instance['name'], 'Halted')
conn.reboot(self.context, instance, None, "SOFT")
vm_ref = vm_utils.lookup(session, instance['name'])
vm = xenapi_fake.get_record('VM', vm_ref)
self.assertEqual(vm['power_state'], 'Running')
def test_reboot_unknown_state(self):
instance = self._create_instance(spawn=False)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
xenapi_fake.create_vm(instance['name'], 'Unknown')
self.assertRaises(xenapi_fake.Failure, conn.reboot, self.context,
instance, None, "SOFT")
def test_reboot_rescued(self):
instance = self._create_instance()
instance['vm_state'] = vm_states.RESCUED
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
real_result = vm_utils.lookup(conn._session, instance['name'])
self.mox.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup(conn._session, instance['name'],
True).AndReturn(real_result)
self.mox.ReplayAll()
conn.reboot(self.context, instance, None, "SOFT")
def test_get_console_output_succeeds(self):
def fake_get_console_output(instance):
self.assertEqual("instance", instance)
return "console_log"
self.stubs.Set(self.conn._vmops, 'get_console_output',
fake_get_console_output)
self.assertEqual(self.conn.get_console_output('context', "instance"),
"console_log")
def _test_maintenance_mode(self, find_host, find_aggregate):
real_call_xenapi = self.conn._session.call_xenapi
instance = self._create_instance(spawn=True)
api_calls = {}
# Record all the xenapi calls, and return a fake list of hosts
# for the host.get_all call
def fake_call_xenapi(method, *args):
api_calls[method] = args
if method == 'host.get_all':
return ['foo', 'bar', 'baz']
return real_call_xenapi(method, *args)
self.stubs.Set(self.conn._session, 'call_xenapi', fake_call_xenapi)
def fake_aggregate_get(context, host, key):
if find_aggregate:
return [test_aggregate.fake_aggregate]
else:
return []
self.stubs.Set(db, 'aggregate_get_by_host',
fake_aggregate_get)
def fake_host_find(context, session, src, dst):
if find_host:
return 'bar'
else:
raise exception.NoValidHost("I saw this one coming...")
self.stubs.Set(host, '_host_find', fake_host_find)
result = self.conn.host_maintenance_mode('bar', 'on_maintenance')
self.assertEqual(result, 'on_maintenance')
# We expect the VM.pool_migrate call to have been called to
# migrate our instance to the 'bar' host
vm_ref = vm_utils.lookup(self.conn._session, instance['name'])
host_ref = "foo"
expected = (vm_ref, host_ref, {"live": "true"})
self.assertEqual(api_calls.get('VM.pool_migrate'), expected)
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.ACTIVE)
self.assertEqual(instance['task_state'], task_states.MIGRATING)
def test_maintenance_mode(self):
self._test_maintenance_mode(True, True)
def test_maintenance_mode_no_host(self):
self.assertRaises(exception.NoValidHost,
self._test_maintenance_mode, False, True)
def test_maintenance_mode_no_aggregate(self):
self.assertRaises(exception.NotFound,
self._test_maintenance_mode, True, False)
def test_uuid_find(self):
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
fake_inst = fake_instance.fake_db_instance(id=123)
fake_inst2 = fake_instance.fake_db_instance(id=456)
db.instance_get_all_by_host(self.context, fake_inst['host'],
columns_to_join=None,
use_slave=False
).AndReturn([fake_inst, fake_inst2])
self.mox.ReplayAll()
expected_name = CONF.instance_name_template % fake_inst['id']
inst_uuid = host._uuid_find(self.context, fake_inst['host'],
expected_name)
self.assertEqual(inst_uuid, fake_inst['uuid'])
def test_session_virtapi(self):
was = {'called': False}
def fake_aggregate_get_by_host(self, *args, **kwargs):
was['called'] = True
raise test.TestingException()
self.stubs.Set(db, "aggregate_get_by_host",
fake_aggregate_get_by_host)
self.stubs.Set(self.conn._session, "is_slave", True)
self.assertRaises(test.TestingException,
self.conn._session._get_host_uuid)
self.assertTrue(was['called'])
def test_session_handles_aggregate_metadata(self):
def fake_aggregate_get(context, host, key):
agg = copy.copy(test_aggregate.fake_aggregate)
agg['metadetails'][CONF.host] = 'this_should_be_metadata'
return [agg]
self.stubs.Set(db, 'aggregate_get_by_host',
fake_aggregate_get)
self.stubs.Set(self.conn._session, "is_slave", True)
self.assertEqual('this_should_be_metadata',
self.conn._session._get_host_uuid())
def test_per_instance_usage_running(self):
instance = self._create_instance(spawn=True)
flavor = objects.Flavor.get_by_id(self.context, 3)
expected = {instance['uuid']: {'memory_mb': flavor['memory_mb'],
'uuid': instance['uuid']}}
actual = self.conn.get_per_instance_usage()
self.assertEqual(expected, actual)
# Paused instances still consume resources:
self.conn.pause(instance)
actual = self.conn.get_per_instance_usage()
self.assertEqual(expected, actual)
def test_per_instance_usage_suspended(self):
# Suspended instances do not consume memory:
instance = self._create_instance(spawn=True)
self.conn.suspend(self.context, instance)
actual = self.conn.get_per_instance_usage()
self.assertEqual({}, actual)
def test_per_instance_usage_halted(self):
instance = self._create_instance(spawn=True, obj=True)
self.conn.power_off(instance)
actual = self.conn.get_per_instance_usage()
self.assertEqual({}, actual)
def _create_instance(self, spawn=True, obj=False, **attrs):
"""Creates and spawns a test instance."""
instance_values = {
'uuid': str(uuid.uuid4()),
'display_name': 'host-',
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 80,
'ephemeral_gb': 0,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'vm_mode': 'hvm',
'architecture': 'x86-64'}
instance_values.update(attrs)
instance = create_instance_with_system_metadata(self.context,
instance_values)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
image_meta = {'id': IMAGE_VHD,
'disk_format': 'vhd'}
if spawn:
self.conn.spawn(self.context, instance, image_meta, [], 'herp',
network_info)
if obj:
return instance
return base.obj_to_primitive(instance)
def test_destroy_clean_up_kernel_and_ramdisk(self):
def fake_lookup_kernel_ramdisk(session, vm_ref):
return "kernel", "ramdisk"
self.stubs.Set(vm_utils, "lookup_kernel_ramdisk",
fake_lookup_kernel_ramdisk)
def fake_destroy_kernel_ramdisk(session, instance, kernel, ramdisk):
fake_destroy_kernel_ramdisk.called = True
self.assertEqual("kernel", kernel)
self.assertEqual("ramdisk", ramdisk)
fake_destroy_kernel_ramdisk.called = False
self.stubs.Set(vm_utils, "destroy_kernel_ramdisk",
fake_destroy_kernel_ramdisk)
instance = self._create_instance(spawn=True, obj=True)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
self.conn.destroy(self.context, instance, network_info)
vm_ref = vm_utils.lookup(self.conn._session, instance['name'])
self.assertIsNone(vm_ref)
self.assertTrue(fake_destroy_kernel_ramdisk.called)
class XenAPIDiffieHellmanTestCase(test.NoDBTestCase):
"""Unit tests for Diffie-Hellman code."""
def setUp(self):
super(XenAPIDiffieHellmanTestCase, self).setUp()
self.alice = agent.SimpleDH()
self.bob = agent.SimpleDH()
def test_shared(self):
alice_pub = self.alice.get_public()
bob_pub = self.bob.get_public()
alice_shared = self.alice.compute_shared(bob_pub)
bob_shared = self.bob.compute_shared(alice_pub)
self.assertEqual(alice_shared, bob_shared)
def _test_encryption(self, message):
enc = self.alice.encrypt(message)
self.assertFalse(enc.endswith('\n'))
dec = self.bob.decrypt(enc)
self.assertEqual(dec, message)
def test_encrypt_simple_message(self):
self._test_encryption('This is a simple message.')
def test_encrypt_message_with_newlines_at_end(self):
self._test_encryption('This message has a newline at the end.\n')
def test_encrypt_many_newlines_at_end(self):
self._test_encryption('Message with lotsa newlines.\n\n\n')
def test_encrypt_newlines_inside_message(self):
self._test_encryption('Message\nwith\ninterior\nnewlines.')
def test_encrypt_with_leading_newlines(self):
self._test_encryption('\n\nMessage with leading newlines.')
def test_encrypt_really_long_message(self):
self._test_encryption(''.join(['abcd' for i in range(1024)]))
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIMigrateInstance(stubs.XenAPITestBase):
"""Unit test for verifying migration-related actions."""
REQUIRES_LOCKING = True
def setUp(self):
super(XenAPIMigrateInstance, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.create_network('fake', 'fake_br1')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.instance_values = {
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': None,
'ramdisk_id': None,
'root_gb': 80,
'ephemeral_gb': 0,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
migration_values = {
'source_compute': 'nova-compute',
'dest_compute': 'nova-compute',
'dest_host': '10.127.5.114',
'status': 'post-migrating',
'instance_uuid': '15f23e6a-cc6e-4d22-b651-d9bdaac316f7',
'old_instance_type_id': 5,
'new_instance_type_id': 1
}
self.migration = db.migration_create(
context.get_admin_context(), migration_values)
fake_processutils.stub_out_processutils_execute(self.stubs)
stubs.stub_out_migration_methods(self.stubs)
stubs.stubout_get_this_vm_uuid(self.stubs)
def fake_inject_instance_metadata(self, instance, vm):
pass
self.stubs.Set(vmops.VMOps, '_inject_instance_metadata',
fake_inject_instance_metadata)
def test_migrate_disk_and_power_off(self):
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance['name'], 'Running')
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=80,
ephemeral_gb=0)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vm_ref = vm_utils.lookup(conn._session, instance['name'])
self.mox.StubOutWithMock(volume_utils, 'is_booted_from_volume')
volume_utils.is_booted_from_volume(conn._session, vm_ref)
self.mox.ReplayAll()
conn.migrate_disk_and_power_off(self.context, instance,
'127.0.0.1', flavor, None)
def test_migrate_disk_and_power_off_passes_exceptions(self):
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance['name'], 'Running')
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=80,
ephemeral_gb=0)
def fake_raise(*args, **kwargs):
raise exception.MigrationError(reason='test failure')
self.stubs.Set(vmops.VMOps, "_migrate_disk_resizing_up", fake_raise)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.assertRaises(exception.MigrationError,
conn.migrate_disk_and_power_off,
self.context, instance,
'127.0.0.1', flavor, None)
def test_migrate_disk_and_power_off_throws_on_zero_gb_resize_down(self):
instance = db.instance_create(self.context, self.instance_values)
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=0,
ephemeral_gb=0)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.assertRaises(exception.ResizeError,
conn.migrate_disk_and_power_off,
self.context, instance,
'fake_dest', flavor, None)
def test_migrate_disk_and_power_off_with_zero_gb_old_and_new_works(self):
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=0,
ephemeral_gb=0)
values = copy.copy(self.instance_values)
values["root_gb"] = 0
values["ephemeral_gb"] = 0
instance = db.instance_create(self.context, values)
xenapi_fake.create_vm(instance['name'], 'Running')
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vm_ref = vm_utils.lookup(conn._session, instance['name'])
self.mox.StubOutWithMock(volume_utils, 'is_booted_from_volume')
volume_utils.is_booted_from_volume(conn._session, vm_ref)
self.mox.ReplayAll()
conn.migrate_disk_and_power_off(self.context, instance,
'127.0.0.1', flavor, None)
def _test_revert_migrate(self, power_on):
instance = create_instance_with_system_metadata(self.context,
self.instance_values)
self.called = False
self.fake_vm_start_called = False
self.fake_finish_revert_migration_called = False
context = 'fake_context'
def fake_vm_start(*args, **kwargs):
self.fake_vm_start_called = True
def fake_vdi_resize(*args, **kwargs):
self.called = True
def fake_finish_revert_migration(*args, **kwargs):
self.fake_finish_revert_migration_called = True
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
self.stubs.Set(vmops.VMOps, '_start', fake_vm_start)
self.stubs.Set(vmops.VMOps, 'finish_revert_migration',
fake_finish_revert_migration)
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests,
product_version=(4, 0, 0),
product_brand='XenServer')
self.mox.StubOutWithMock(volume_utils, 'is_booted_from_volume')
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
image_meta = {'id': instance['image_ref'], 'disk_format': 'vhd'}
base = xenapi_fake.create_vdi('hurr', 'fake')
base_uuid = xenapi_fake.get_record('VDI', base)['uuid']
cow = xenapi_fake.create_vdi('durr', 'fake')
cow_uuid = xenapi_fake.get_record('VDI', cow)['uuid']
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy=base_uuid, cow=cow_uuid),
network_info, image_meta, resize_instance=True,
block_device_info=None, power_on=power_on)
self.assertEqual(self.called, True)
self.assertEqual(self.fake_vm_start_called, power_on)
conn.finish_revert_migration(context, instance, network_info)
self.assertEqual(self.fake_finish_revert_migration_called, True)
def test_revert_migrate_power_on(self):
self._test_revert_migrate(True)
def test_revert_migrate_power_off(self):
self._test_revert_migrate(False)
def _test_finish_migrate(self, power_on):
instance = create_instance_with_system_metadata(self.context,
self.instance_values)
self.called = False
self.fake_vm_start_called = False
def fake_vm_start(*args, **kwargs):
self.fake_vm_start_called = True
def fake_vdi_resize(*args, **kwargs):
self.called = True
self.stubs.Set(vmops.VMOps, '_start', fake_vm_start)
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests,
product_version=(4, 0, 0),
product_brand='XenServer')
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
image_meta = {'id': instance['image_ref'], 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=True,
block_device_info=None, power_on=power_on)
self.assertEqual(self.called, True)
self.assertEqual(self.fake_vm_start_called, power_on)
def test_finish_migrate_power_on(self):
self._test_finish_migrate(True)
def test_finish_migrate_power_off(self):
self._test_finish_migrate(False)
def test_finish_migrate_no_local_storage(self):
values = copy.copy(self.instance_values)
values["root_gb"] = 0
values["ephemeral_gb"] = 0
instance = create_instance_with_system_metadata(self.context, values)
def fake_vdi_resize(*args, **kwargs):
raise Exception("This shouldn't be called")
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
image_meta = {'id': instance['image_ref'], 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=True)
def test_finish_migrate_no_resize_vdi(self):
instance = create_instance_with_system_metadata(self.context,
self.instance_values)
def fake_vdi_resize(*args, **kwargs):
raise Exception("This shouldn't be called")
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
# Resize instance would be determined by the compute call
image_meta = {'id': instance['image_ref'], 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=False)
@stub_vm_utils_with_vdi_attached_here
def test_migrate_too_many_partitions_no_resize_down(self):
instance_values = self.instance_values
instance = db.instance_create(self.context, instance_values)
xenapi_fake.create_vm(instance['name'], 'Running')
flavor = db.flavor_get_by_name(self.context, 'm1.small')
flavor = fake_flavor.fake_flavor_obj(self.context, **flavor)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_get_partitions(partition):
return [(1, 2, 3, 4, "", ""), (1, 2, 3, 4, "", "")]
self.stubs.Set(vm_utils, '_get_partitions', fake_get_partitions)
self.assertRaises(exception.InstanceFaultRollback,
conn.migrate_disk_and_power_off,
self.context, instance,
'127.0.0.1', flavor, None)
@stub_vm_utils_with_vdi_attached_here
def test_migrate_bad_fs_type_no_resize_down(self):
instance_values = self.instance_values
instance = db.instance_create(self.context, instance_values)
xenapi_fake.create_vm(instance['name'], 'Running')
flavor = db.flavor_get_by_name(self.context, 'm1.small')
flavor = fake_flavor.fake_flavor_obj(self.context, **flavor)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_get_partitions(partition):
return [(1, 2, 3, "ext2", "", "boot")]
self.stubs.Set(vm_utils, '_get_partitions', fake_get_partitions)
self.assertRaises(exception.InstanceFaultRollback,
conn.migrate_disk_and_power_off,
self.context, instance,
'127.0.0.1', flavor, None)
def test_migrate_rollback_when_resize_down_fs_fails(self):
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vmops = conn._vmops
self.mox.StubOutWithMock(vmops, '_resize_ensure_vm_is_shutdown')
self.mox.StubOutWithMock(vmops, '_apply_orig_vm_name_label')
self.mox.StubOutWithMock(vm_utils, 'resize_disk')
self.mox.StubOutWithMock(vm_utils, 'migrate_vhd')
self.mox.StubOutWithMock(vm_utils, 'destroy_vdi')
self.mox.StubOutWithMock(vm_utils, 'get_vdi_for_vm_safely')
self.mox.StubOutWithMock(vmops, '_restore_orig_vm_and_cleanup_orphan')
instance = objects.Instance(context=self.context,
auto_disk_config=True, uuid='uuid')
instance.obj_reset_changes()
vm_ref = "vm_ref"
dest = "dest"
flavor = "type"
sr_path = "sr_path"
vmops._resize_ensure_vm_is_shutdown(instance, vm_ref)
vmops._apply_orig_vm_name_label(instance, vm_ref)
old_vdi_ref = "old_ref"
vm_utils.get_vdi_for_vm_safely(vmops._session, vm_ref).AndReturn(
(old_vdi_ref, None))
new_vdi_ref = "new_ref"
new_vdi_uuid = "new_uuid"
vm_utils.resize_disk(vmops._session, instance, old_vdi_ref,
flavor).AndReturn((new_vdi_ref, new_vdi_uuid))
vm_utils.migrate_vhd(vmops._session, instance, new_vdi_uuid, dest,
sr_path, 0).AndRaise(
exception.ResizeError(reason="asdf"))
vm_utils.destroy_vdi(vmops._session, new_vdi_ref)
vmops._restore_orig_vm_and_cleanup_orphan(instance)
self.mox.ReplayAll()
with mock.patch.object(instance, 'save') as mock_save:
self.assertRaises(exception.InstanceFaultRollback,
vmops._migrate_disk_resizing_down, self.context,
instance, dest, flavor, vm_ref, sr_path)
self.assertEqual(3, mock_save.call_count)
self.assertEqual(60.0, instance.progress)
def test_resize_ensure_vm_is_shutdown_cleanly(self):
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vmops = conn._vmops
fake_instance = {'uuid': 'uuid'}
self.mox.StubOutWithMock(vm_utils, 'is_vm_shutdown')
self.mox.StubOutWithMock(vm_utils, 'clean_shutdown_vm')
self.mox.StubOutWithMock(vm_utils, 'hard_shutdown_vm')
vm_utils.is_vm_shutdown(vmops._session, "ref").AndReturn(False)
vm_utils.clean_shutdown_vm(vmops._session, fake_instance,
"ref").AndReturn(True)
self.mox.ReplayAll()
vmops._resize_ensure_vm_is_shutdown(fake_instance, "ref")
def test_resize_ensure_vm_is_shutdown_forced(self):
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vmops = conn._vmops
fake_instance = {'uuid': 'uuid'}
self.mox.StubOutWithMock(vm_utils, 'is_vm_shutdown')
self.mox.StubOutWithMock(vm_utils, 'clean_shutdown_vm')
self.mox.StubOutWithMock(vm_utils, 'hard_shutdown_vm')
vm_utils.is_vm_shutdown(vmops._session, "ref").AndReturn(False)
vm_utils.clean_shutdown_vm(vmops._session, fake_instance,
"ref").AndReturn(False)
vm_utils.hard_shutdown_vm(vmops._session, fake_instance,
"ref").AndReturn(True)
self.mox.ReplayAll()
vmops._resize_ensure_vm_is_shutdown(fake_instance, "ref")
def test_resize_ensure_vm_is_shutdown_fails(self):
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vmops = conn._vmops
fake_instance = {'uuid': 'uuid'}
self.mox.StubOutWithMock(vm_utils, 'is_vm_shutdown')
self.mox.StubOutWithMock(vm_utils, 'clean_shutdown_vm')
self.mox.StubOutWithMock(vm_utils, 'hard_shutdown_vm')
vm_utils.is_vm_shutdown(vmops._session, "ref").AndReturn(False)
vm_utils.clean_shutdown_vm(vmops._session, fake_instance,
"ref").AndReturn(False)
vm_utils.hard_shutdown_vm(vmops._session, fake_instance,
"ref").AndReturn(False)
self.mox.ReplayAll()
self.assertRaises(exception.ResizeError,
vmops._resize_ensure_vm_is_shutdown, fake_instance, "ref")
def test_resize_ensure_vm_is_shutdown_already_shutdown(self):
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vmops = conn._vmops
fake_instance = {'uuid': 'uuid'}
self.mox.StubOutWithMock(vm_utils, 'is_vm_shutdown')
self.mox.StubOutWithMock(vm_utils, 'clean_shutdown_vm')
self.mox.StubOutWithMock(vm_utils, 'hard_shutdown_vm')
vm_utils.is_vm_shutdown(vmops._session, "ref").AndReturn(True)
self.mox.ReplayAll()
vmops._resize_ensure_vm_is_shutdown(fake_instance, "ref")
class XenAPIImageTypeTestCase(test.NoDBTestCase):
"""Test ImageType class."""
def test_to_string(self):
# Can convert from type id to type string.
self.assertEqual(
vm_utils.ImageType.to_string(vm_utils.ImageType.KERNEL),
vm_utils.ImageType.KERNEL_STR)
def _assert_role(self, expected_role, image_type_id):
self.assertEqual(
expected_role,
vm_utils.ImageType.get_role(image_type_id))
def test_get_image_role_kernel(self):
self._assert_role('kernel', vm_utils.ImageType.KERNEL)
def test_get_image_role_ramdisk(self):
self._assert_role('ramdisk', vm_utils.ImageType.RAMDISK)
def test_get_image_role_disk(self):
self._assert_role('root', vm_utils.ImageType.DISK)
def test_get_image_role_disk_raw(self):
self._assert_role('root', vm_utils.ImageType.DISK_RAW)
def test_get_image_role_disk_vhd(self):
self._assert_role('root', vm_utils.ImageType.DISK_VHD)
class XenAPIDetermineDiskImageTestCase(test.NoDBTestCase):
"""Unit tests for code that detects the ImageType."""
def assert_disk_type(self, image_meta, expected_disk_type):
actual = vm_utils.determine_disk_image_type(image_meta)
self.assertEqual(expected_disk_type, actual)
def test_machine(self):
image_meta = objects.ImageMeta.from_dict(
{'disk_format': 'ami'})
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK)
def test_raw(self):
image_meta = objects.ImageMeta.from_dict(
{'disk_format': 'raw'})
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK_RAW)
def test_vhd(self):
image_meta = objects.ImageMeta.from_dict(
{'disk_format': 'vhd'})
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK_VHD)
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIHostTestCase(stubs.XenAPITestBase):
"""Tests HostState, which holds metrics from XenServer that get
reported back to the Schedulers.
"""
def setUp(self):
super(XenAPIHostTestCase, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.context = context.get_admin_context()
self.flags(use_local=True, group='conductor')
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.instance = fake_instance.fake_db_instance(name='foo')
def test_host_state(self):
stats = self.conn.host_state.get_host_stats(False)
# Values from fake.create_local_srs (ext SR)
self.assertEqual(stats['disk_total'], 40000)
self.assertEqual(stats['disk_used'], 20000)
# Values from fake._plugin_xenhost_host_data
self.assertEqual(stats['host_memory_total'], 10)
self.assertEqual(stats['host_memory_overhead'], 20)
self.assertEqual(stats['host_memory_free'], 30)
self.assertEqual(stats['host_memory_free_computed'], 40)
self.assertEqual(stats['hypervisor_hostname'], 'fake-xenhost')
self.assertEqual(stats['host_cpu_info']['cpu_count'], 4)
self.assertThat({
'vendor': 'GenuineIntel',
'model': 'Intel(R) Xeon(R) CPU X3430 @ 2.40GHz',
'topology': {
'sockets': 1,
'cores': 4,
'threads': 1,
},
'features': [
'fpu', 'de', 'tsc', 'msr', 'pae', 'mce',
'cx8', 'apic', 'sep', 'mtrr', 'mca',
'cmov', 'pat', 'clflush', 'acpi', 'mmx',
'fxsr', 'sse', 'sse2', 'ss', 'ht',
'nx', 'constant_tsc', 'nonstop_tsc',
'aperfmperf', 'pni', 'vmx', 'est', 'ssse3',
'sse4_1', 'sse4_2', 'popcnt', 'hypervisor',
'ida', 'tpr_shadow', 'vnmi', 'flexpriority',
'ept', 'vpid',
]},
matchers.DictMatches(stats['cpu_model']))
# No VMs running
self.assertEqual(stats['vcpus_used'], 0)
def test_host_state_vcpus_used(self):
stats = self.conn.host_state.get_host_stats(True)
self.assertEqual(stats['vcpus_used'], 0)
xenapi_fake.create_vm(self.instance['name'], 'Running')
stats = self.conn.host_state.get_host_stats(True)
self.assertEqual(stats['vcpus_used'], 4)
def test_pci_passthrough_devices(self):
stats = self.conn.host_state.get_host_stats(False)
self.assertEqual(len(stats['pci_passthrough_devices']), 2)
def test_host_state_missing_sr(self):
# Must trigger construction of 'host_state' property
# before introducing the stub which raises the error
hs = self.conn.host_state
def fake_safe_find_sr(session):
raise exception.StorageRepositoryNotFound('not there')
self.stubs.Set(vm_utils, 'safe_find_sr', fake_safe_find_sr)
self.assertRaises(exception.StorageRepositoryNotFound,
hs.get_host_stats,
refresh=True)
def _test_host_action(self, method, action, expected=None):
result = method('host', action)
if not expected:
expected = action
self.assertEqual(result, expected)
def _test_host_action_no_param(self, method, action, expected=None):
result = method(action)
if not expected:
expected = action
self.assertEqual(result, expected)
def test_host_reboot(self):
self._test_host_action_no_param(self.conn.host_power_action, 'reboot')
def test_host_shutdown(self):
self._test_host_action_no_param(self.conn.host_power_action,
'shutdown')
def test_host_startup(self):
self.assertRaises(NotImplementedError,
self.conn.host_power_action, 'startup')
def test_host_maintenance_on(self):
self._test_host_action(self.conn.host_maintenance_mode,
True, 'on_maintenance')
def test_host_maintenance_off(self):
self._test_host_action(self.conn.host_maintenance_mode,
False, 'off_maintenance')
def test_set_enable_host_enable(self):
_create_service_entries(self.context, values={'nova': ['fake-mini']})
self._test_host_action_no_param(self.conn.set_host_enabled,
True, 'enabled')
service = db.service_get_by_host_and_binary(self.context, 'fake-mini',
'nova-compute')
self.assertEqual(service.disabled, False)
def test_set_enable_host_disable(self):
_create_service_entries(self.context, values={'nova': ['fake-mini']})
self._test_host_action_no_param(self.conn.set_host_enabled,
False, 'disabled')
service = db.service_get_by_host_and_binary(self.context, 'fake-mini',
'nova-compute')
self.assertEqual(service.disabled, True)
def test_get_host_uptime(self):
result = self.conn.get_host_uptime()
self.assertEqual(result, 'fake uptime')
def test_supported_instances_is_included_in_host_state(self):
stats = self.conn.host_state.get_host_stats(False)
self.assertIn('supported_instances', stats)
def test_supported_instances_is_calculated_by_to_supported_instances(self):
def to_supported_instances(somedata):
return "SOMERETURNVALUE"
self.stubs.Set(host, 'to_supported_instances', to_supported_instances)
stats = self.conn.host_state.get_host_stats(False)
self.assertEqual("SOMERETURNVALUE", stats['supported_instances'])
def test_update_stats_caches_hostname(self):
self.mox.StubOutWithMock(host, 'call_xenhost')
self.mox.StubOutWithMock(vm_utils, 'scan_default_sr')
self.mox.StubOutWithMock(vm_utils, 'list_vms')
self.mox.StubOutWithMock(self.conn._session, 'call_xenapi')
data = {'disk_total': 0,
'disk_used': 0,
'disk_available': 0,
'supported_instances': 0,
'host_capabilities': [],
'host_hostname': 'foo',
'vcpus_used': 0,
}
sr_rec = {
'physical_size': 0,
'physical_utilisation': 0,
'virtual_allocation': 0,
}
for i in range(3):
host.call_xenhost(mox.IgnoreArg(), 'host_data', {}).AndReturn(data)
vm_utils.scan_default_sr(self.conn._session).AndReturn("ref")
vm_utils.list_vms(self.conn._session).AndReturn([])
self.conn._session.call_xenapi('SR.get_record', "ref").AndReturn(
sr_rec)
if i == 2:
# On the third call (the second below) change the hostname
data = dict(data, host_hostname='bar')
self.mox.ReplayAll()
stats = self.conn.host_state.get_host_stats(refresh=True)
self.assertEqual('foo', stats['hypervisor_hostname'])
stats = self.conn.host_state.get_host_stats(refresh=True)
self.assertEqual('foo', stats['hypervisor_hostname'])
class ToSupportedInstancesTestCase(test.NoDBTestCase):
def test_default_return_value(self):
self.assertEqual([],
host.to_supported_instances(None))
def test_return_value(self):
self.assertEqual([(arch.X86_64, hv_type.XEN, 'xen')],
host.to_supported_instances([u'xen-3.0-x86_64']))
def test_invalid_values_do_not_break(self):
self.assertEqual([(arch.X86_64, hv_type.XEN, 'xen')],
host.to_supported_instances([u'xen-3.0-x86_64', 'spam']))
def test_multiple_values(self):
self.assertEqual(
[
(arch.X86_64, hv_type.XEN, 'xen'),
(arch.I686, hv_type.XEN, 'hvm')
],
host.to_supported_instances([u'xen-3.0-x86_64', 'hvm-3.0-x86_32'])
)
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIAutoDiskConfigTestCase(stubs.XenAPITestBase):
def setUp(self):
super(XenAPIAutoDiskConfigTestCase, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.user_id = 'fake'
self.project_id = 'fake'
self.instance_values = {
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 80,
'ephemeral_gb': 0,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
self.context = context.RequestContext(self.user_id, self.project_id)
def fake_create_vbd(session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=True,
osvol=False):
pass
self.stubs.Set(vm_utils, 'create_vbd', fake_create_vbd)
def assertIsPartitionCalled(self, called):
marker = {"partition_called": False}
def fake_resize_part_and_fs(dev, start, old_sectors, new_sectors,
flags):
marker["partition_called"] = True
self.stubs.Set(vm_utils, "_resize_part_and_fs",
fake_resize_part_and_fs)
context.RequestContext(self.user_id, self.project_id)
session = get_session()
disk_image_type = vm_utils.ImageType.DISK_VHD
instance = create_instance_with_system_metadata(self.context,
self.instance_values)
vm_ref = xenapi_fake.create_vm(instance['name'], 'Halted')
vdi_ref = xenapi_fake.create_vdi(instance['name'], 'fake')
vdi_uuid = session.call_xenapi('VDI.get_record', vdi_ref)['uuid']
vdis = {'root': {'uuid': vdi_uuid, 'ref': vdi_ref}}
image_meta = {'id': 'null',
'disk_format': 'vhd',
'properties': {'vm_mode': 'xen'}}
self.conn._vmops._attach_disks(instance, image_meta, vm_ref,
instance['name'], vdis, disk_image_type, "fake_nw_inf")
self.assertEqual(marker["partition_called"], called)
def test_instance_not_auto_disk_config(self):
"""Should not partition unless instance is marked as
auto_disk_config.
"""
self.instance_values['auto_disk_config'] = False
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_fails_safe_two_partitions(self):
# Should not partition unless fail safes pass.
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 0, 100, 'ext4', "", ""), (2, 100, 200, 'ext4' "", "")]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_fails_safe_badly_numbered(self):
# Should not partition unless fail safes pass.
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(2, 100, 200, 'ext4', "", "")]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_fails_safe_bad_fstype(self):
# Should not partition unless fail safes pass.
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 100, 200, 'asdf', "", "")]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_passes_fail_safes(self):
"""Should partition if instance is marked as auto_disk_config=True and
virt-layer specific fail-safe checks pass.
"""
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 0, 100, 'ext4', "", "boot")]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(True)
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIGenerateLocal(stubs.XenAPITestBase):
"""Test generating of local disks, like swap and ephemeral."""
def setUp(self):
super(XenAPIGenerateLocal, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
db_fakes.stub_out_db_instance_api(self.stubs)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.user_id = 'fake'
self.project_id = 'fake'
self.instance_values = {
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 80,
'ephemeral_gb': 0,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
self.context = context.RequestContext(self.user_id, self.project_id)
def fake_create_vbd(session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=True,
osvol=False, empty=False, unpluggable=True):
return session.call_xenapi('VBD.create', {'VM': vm_ref,
'VDI': vdi_ref})
self.stubs.Set(vm_utils, 'create_vbd', fake_create_vbd)
def assertCalled(self, instance,
disk_image_type=vm_utils.ImageType.DISK_VHD):
context.RequestContext(self.user_id, self.project_id)
session = get_session()
vm_ref = xenapi_fake.create_vm(instance['name'], 'Halted')
vdi_ref = xenapi_fake.create_vdi(instance['name'], 'fake')
vdi_uuid = session.call_xenapi('VDI.get_record', vdi_ref)['uuid']
vdi_key = 'root'
if disk_image_type == vm_utils.ImageType.DISK_ISO:
vdi_key = 'iso'
vdis = {vdi_key: {'uuid': vdi_uuid, 'ref': vdi_ref}}
self.called = False
image_meta = {'id': 'null',
'disk_format': 'vhd',
'properties': {'vm_mode': 'xen'}}
self.conn._vmops._attach_disks(instance, image_meta, vm_ref,
instance['name'], vdis, disk_image_type, "fake_nw_inf")
self.assertTrue(self.called)
def test_generate_swap(self):
# Test swap disk generation.
instance_values = dict(self.instance_values, instance_type_id=5)
instance = create_instance_with_system_metadata(self.context,
instance_values)
def fake_generate_swap(*args, **kwargs):
self.called = True
self.stubs.Set(vm_utils, 'generate_swap', fake_generate_swap)
self.assertCalled(instance)
def test_generate_ephemeral(self):
# Test ephemeral disk generation.
instance_values = dict(self.instance_values, instance_type_id=4)
instance = create_instance_with_system_metadata(self.context,
instance_values)
def fake_generate_ephemeral(*args):
self.called = True
self.stubs.Set(vm_utils, 'generate_ephemeral', fake_generate_ephemeral)
self.assertCalled(instance)
def test_generate_iso_blank_root_disk(self):
instance_values = dict(self.instance_values, instance_type_id=4)
instance_values.pop('kernel_id')
instance_values.pop('ramdisk_id')
instance = create_instance_with_system_metadata(self.context,
instance_values)
def fake_generate_ephemeral(*args):
pass
self.stubs.Set(vm_utils, 'generate_ephemeral', fake_generate_ephemeral)
def fake_generate_iso(*args):
self.called = True
self.stubs.Set(vm_utils, 'generate_iso_blank_root_disk',
fake_generate_iso)
self.assertCalled(instance, vm_utils.ImageType.DISK_ISO)
class XenAPIBWCountersTestCase(stubs.XenAPITestBaseNoDB):
FAKE_VMS = {'test1:ref': dict(name_label='test1',
other_config=dict(nova_uuid='hash'),
domid='12',
_vifmap={'0': "a:b:c:d...",
'1': "e:f:12:q..."}),
'test2:ref': dict(name_label='test2',
other_config=dict(nova_uuid='hash'),
domid='42',
_vifmap={'0': "a:3:c:d...",
'1': "e:f:42:q..."}),
}
def setUp(self):
super(XenAPIBWCountersTestCase, self).setUp()
self.stubs.Set(vm_utils, 'list_vms',
XenAPIBWCountersTestCase._fake_list_vms)
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def _fake_get_vif_device_map(vm_rec):
return vm_rec['_vifmap']
self.stubs.Set(self.conn._vmops, "_get_vif_device_map",
_fake_get_vif_device_map)
@classmethod
def _fake_list_vms(cls, session):
return six.iteritems(cls.FAKE_VMS)
@staticmethod
def _fake_fetch_bandwidth_mt(session):
return {}
@staticmethod
def _fake_fetch_bandwidth(session):
return {'42':
{'0': {'bw_in': 21024, 'bw_out': 22048},
'1': {'bw_in': 231337, 'bw_out': 221212121}},
'12':
{'0': {'bw_in': 1024, 'bw_out': 2048},
'1': {'bw_in': 31337, 'bw_out': 21212121}},
}
def test_get_all_bw_counters(self):
instances = [dict(name='test1', uuid='1-2-3'),
dict(name='test2', uuid='4-5-6')]
self.stubs.Set(vm_utils, 'fetch_bandwidth',
self._fake_fetch_bandwidth)
result = self.conn.get_all_bw_counters(instances)
self.assertEqual(len(result), 4)
self.assertIn(dict(uuid='1-2-3',
mac_address="a:b:c:d...",
bw_in=1024,
bw_out=2048), result)
self.assertIn(dict(uuid='1-2-3',
mac_address="e:f:12:q...",
bw_in=31337,
bw_out=21212121), result)
self.assertIn(dict(uuid='4-5-6',
mac_address="a:3:c:d...",
bw_in=21024,
bw_out=22048), result)
self.assertIn(dict(uuid='4-5-6',
mac_address="e:f:42:q...",
bw_in=231337,
bw_out=221212121), result)
def test_get_all_bw_counters_in_failure_case(self):
"""Test that get_all_bw_conters returns an empty list when
no data returned from Xenserver. c.f. bug #910045.
"""
instances = [dict(name='instance-0001', uuid='1-2-3-4-5')]
self.stubs.Set(vm_utils, 'fetch_bandwidth',
self._fake_fetch_bandwidth_mt)
result = self.conn.get_all_bw_counters(instances)
self.assertEqual(result, [])
# TODO(salvatore-orlando): this class and
# nova.tests.unit.virt.test_libvirt.IPTablesFirewallDriverTestCase
# share a lot of code. Consider abstracting common code in a base
# class for firewall driver testing.
#
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIDom0IptablesFirewallTestCase(stubs.XenAPITestBase):
REQUIRES_LOCKING = True
_in_rules = [
'# Generated by iptables-save v1.4.10 on Sat Feb 19 00:03:19 2011',
'*nat',
':PREROUTING ACCEPT [1170:189210]',
':INPUT ACCEPT [844:71028]',
':OUTPUT ACCEPT [5149:405186]',
':POSTROUTING ACCEPT [5063:386098]',
'# Completed on Mon Dec 6 11:54:13 2010',
'# Generated by iptables-save v1.4.4 on Mon Dec 6 11:54:13 2010',
'*mangle',
':INPUT ACCEPT [969615:281627771]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [915599:63811649]',
':nova-block-ipv4 - [0:0]',
'[0:0] -A INPUT -i virbr0 -p tcp -m tcp --dport 67 -j ACCEPT ',
'[0:0] -A FORWARD -d 192.168.122.0/24 -o virbr0 -m state --state RELATED'
',ESTABLISHED -j ACCEPT ',
'[0:0] -A FORWARD -s 192.168.122.0/24 -i virbr0 -j ACCEPT ',
'[0:0] -A FORWARD -i virbr0 -o virbr0 -j ACCEPT ',
'[0:0] -A FORWARD -o virbr0 -j REJECT '
'--reject-with icmp-port-unreachable ',
'[0:0] -A FORWARD -i virbr0 -j REJECT '
'--reject-with icmp-port-unreachable ',
'COMMIT',
'# Completed on Mon Dec 6 11:54:13 2010',
'# Generated by iptables-save v1.4.4 on Mon Dec 6 11:54:13 2010',
'*filter',
':INPUT ACCEPT [969615:281627771]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [915599:63811649]',
':nova-block-ipv4 - [0:0]',
'[0:0] -A INPUT -i virbr0 -p tcp -m tcp --dport 67 -j ACCEPT ',
'[0:0] -A FORWARD -d 192.168.122.0/24 -o virbr0 -m state --state RELATED'
',ESTABLISHED -j ACCEPT ',
'[0:0] -A FORWARD -s 192.168.122.0/24 -i virbr0 -j ACCEPT ',
'[0:0] -A FORWARD -i virbr0 -o virbr0 -j ACCEPT ',
'[0:0] -A FORWARD -o virbr0 -j REJECT '
'--reject-with icmp-port-unreachable ',
'[0:0] -A FORWARD -i virbr0 -j REJECT '
'--reject-with icmp-port-unreachable ',
'COMMIT',
'# Completed on Mon Dec 6 11:54:13 2010',
]
_in6_filter_rules = [
'# Generated by ip6tables-save v1.4.4 on Tue Jan 18 23:47:56 2011',
'*filter',
':INPUT ACCEPT [349155:75810423]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [349256:75777230]',
'COMMIT',
'# Completed on Tue Jan 18 23:47:56 2011',
]
def setUp(self):
super(XenAPIDom0IptablesFirewallTestCase, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.user_id = 'mappin'
self.project_id = 'fake'
stubs.stubout_session(self.stubs, stubs.FakeSessionForFirewallTests,
test_case=self)
self.context = context.RequestContext(self.user_id, self.project_id)
self.network = importutils.import_object(CONF.network_manager)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.fw = self.conn._vmops.firewall_driver
def _create_instance_ref(self):
return db.instance_create(self.context,
{'user_id': self.user_id,
'project_id': self.project_id,
'instance_type_id': 1})
def _create_test_security_group(self):
admin_ctxt = context.get_admin_context()
secgroup = db.security_group_create(admin_ctxt,
{'user_id': self.user_id,
'project_id': self.project_id,
'name': 'testgroup',
'description': 'test group'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'icmp',
'from_port': -1,
'to_port': -1,
'cidr': '192.168.11.0/24'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'icmp',
'from_port': 8,
'to_port': -1,
'cidr': '192.168.11.0/24'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'tcp',
'from_port': 80,
'to_port': 81,
'cidr': '192.168.10.0/24'})
return secgroup
def _validate_security_group(self):
in_rules = filter(lambda l: not l.startswith('#'),
self._in_rules)
for rule in in_rules:
if 'nova' not in rule:
self.assertIn(rule, self._out_rules,
'Rule went missing: %s' % rule)
instance_chain = None
for rule in self._out_rules:
# This is pretty crude, but it'll do for now
# last two octets change
if re.search('-d 192.168.[0-9]{1,3}.[0-9]{1,3} -j', rule):
instance_chain = rule.split(' ')[-1]
break
self.assertTrue(instance_chain, "The instance chain wasn't added")
security_group_chain = None
for rule in self._out_rules:
# This is pretty crude, but it'll do for now
if '-A %s -j' % instance_chain in rule:
security_group_chain = rule.split(' ')[-1]
break
self.assertTrue(security_group_chain,
"The security group chain wasn't added")
regex = re.compile('\[0\:0\] -A .* -j ACCEPT -p icmp'
' -s 192.168.11.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"ICMP acceptance rule wasn't added")
regex = re.compile('\[0\:0\] -A .* -j ACCEPT -p icmp -m icmp'
' --icmp-type 8 -s 192.168.11.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"ICMP Echo Request acceptance rule wasn't added")
regex = re.compile('\[0\:0\] -A .* -j ACCEPT -p tcp --dport 80:81'
' -s 192.168.10.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"TCP port 80/81 acceptance rule wasn't added")
def test_static_filters(self):
instance_ref = self._create_instance_ref()
src_instance_ref = self._create_instance_ref()
admin_ctxt = context.get_admin_context()
secgroup = self._create_test_security_group()
src_secgroup = db.security_group_create(admin_ctxt,
{'user_id': self.user_id,
'project_id': self.project_id,
'name': 'testsourcegroup',
'description': 'src group'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'tcp',
'from_port': 80,
'to_port': 81,
'group_id': src_secgroup['id']})
db.instance_add_security_group(admin_ctxt, instance_ref['uuid'],
secgroup['id'])
db.instance_add_security_group(admin_ctxt, src_instance_ref['uuid'],
src_secgroup['id'])
instance_ref = db.instance_get(admin_ctxt, instance_ref['id'])
src_instance_ref = db.instance_get(admin_ctxt, src_instance_ref['id'])
network_model = fake_network.fake_get_instance_nw_info(self.stubs, 1)
from nova.compute import utils as compute_utils # noqa
self.stubs.Set(compute_utils, 'get_nw_info_for_instance',
lambda instance: network_model)
self.fw.prepare_instance_filter(instance_ref, network_model)
self.fw.apply_instance_filter(instance_ref, network_model)
self._validate_security_group()
# Extra test for TCP acceptance rules
for ip in network_model.fixed_ips():
if ip['version'] != 4:
continue
regex = re.compile('\[0\:0\] -A .* -j ACCEPT -p tcp'
' --dport 80:81 -s %s' % ip['address'])
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"TCP port 80/81 acceptance rule wasn't added")
db.instance_destroy(admin_ctxt, instance_ref['uuid'])
def test_filters_for_instance_with_ip_v6(self):
self.flags(use_ipv6=True)
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1)
rulesv4, rulesv6 = self.fw._filters_for_instance("fake", network_info)
self.assertEqual(len(rulesv4), 2)
self.assertEqual(len(rulesv6), 1)
def test_filters_for_instance_without_ip_v6(self):
self.flags(use_ipv6=False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1)
rulesv4, rulesv6 = self.fw._filters_for_instance("fake", network_info)
self.assertEqual(len(rulesv4), 2)
self.assertEqual(len(rulesv6), 0)
def test_multinic_iptables(self):
ipv4_rules_per_addr = 1
ipv4_addr_per_network = 2
ipv6_rules_per_addr = 1
ipv6_addr_per_network = 1
networks_count = 5
instance_ref = self._create_instance_ref()
_get_instance_nw_info = fake_network.fake_get_instance_nw_info
network_info = _get_instance_nw_info(self.stubs,
networks_count,
ipv4_addr_per_network)
network_info[0]['network']['subnets'][0]['meta']['dhcp_server'] = \
'1.1.1.1'
ipv4_len = len(self.fw.iptables.ipv4['filter'].rules)
ipv6_len = len(self.fw.iptables.ipv6['filter'].rules)
inst_ipv4, inst_ipv6 = self.fw.instance_rules(instance_ref,
network_info)
self.fw.prepare_instance_filter(instance_ref, network_info)
ipv4 = self.fw.iptables.ipv4['filter'].rules
ipv6 = self.fw.iptables.ipv6['filter'].rules
ipv4_network_rules = len(ipv4) - len(inst_ipv4) - ipv4_len
ipv6_network_rules = len(ipv6) - len(inst_ipv6) - ipv6_len
# Extra rules are for the DHCP request
rules = (ipv4_rules_per_addr * ipv4_addr_per_network *
networks_count) + 2
self.assertEqual(ipv4_network_rules, rules)
self.assertEqual(ipv6_network_rules,
ipv6_rules_per_addr * ipv6_addr_per_network * networks_count)
def test_do_refresh_security_group_rules(self):
admin_ctxt = context.get_admin_context()
instance_ref = self._create_instance_ref()
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
secgroup = self._create_test_security_group()
db.instance_add_security_group(admin_ctxt, instance_ref['uuid'],
secgroup['id'])
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.instance_info[instance_ref['id']] = (instance_ref,
network_info)
self._validate_security_group()
# add a rule to the security group
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'udp',
'from_port': 200,
'to_port': 299,
'cidr': '192.168.99.0/24'})
# validate the extra rule
self.fw.refresh_security_group_rules(secgroup)
regex = re.compile('\[0\:0\] -A .* -j ACCEPT -p udp --dport 200:299'
' -s 192.168.99.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"Rules were not updated properly. "
"The rule for UDP acceptance is missing")
def test_provider_firewall_rules(self):
# setup basic instance data
instance_ref = self._create_instance_ref()
# FRAGILE: as in libvirt tests
# peeks at how the firewall names chains
chain_name = 'inst-%s' % instance_ref['id']
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
self.fw.prepare_instance_filter(instance_ref, network_info)
self.assertIn('provider', self.fw.iptables.ipv4['filter'].chains)
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(0, len(rules))
admin_ctxt = context.get_admin_context()
# add a rule and send the update message, check for 1 rule
db.provider_fw_rule_create(admin_ctxt,
{'protocol': 'tcp',
'cidr': '10.99.99.99/32',
'from_port': 1,
'to_port': 65535})
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(1, len(rules))
# Add another, refresh, and make sure number of rules goes to two
provider_fw1 = db.provider_fw_rule_create(admin_ctxt,
{'protocol': 'udp',
'cidr': '10.99.99.99/32',
'from_port': 1,
'to_port': 65535})
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(2, len(rules))
# create the instance filter and make sure it has a jump rule
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.apply_instance_filter(instance_ref, network_info)
inst_rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == chain_name]
jump_rules = [rule for rule in inst_rules if '-j' in rule.rule]
provjump_rules = []
# IptablesTable doesn't make rules unique internally
for rule in jump_rules:
if 'provider' in rule.rule and rule not in provjump_rules:
provjump_rules.append(rule)
self.assertEqual(1, len(provjump_rules))
# remove a rule from the db, cast to compute to refresh rule
db.provider_fw_rule_destroy(admin_ctxt, provider_fw1['id'])
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(1, len(rules))
class XenAPISRSelectionTestCase(stubs.XenAPITestBaseNoDB):
"""Unit tests for testing we find the right SR."""
def test_safe_find_sr_raise_exception(self):
# Ensure StorageRepositoryNotFound is raise when wrong filter.
self.flags(sr_matching_filter='yadayadayada', group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = get_session()
self.assertRaises(exception.StorageRepositoryNotFound,
vm_utils.safe_find_sr, session)
def test_safe_find_sr_local_storage(self):
# Ensure the default local-storage is found.
self.flags(sr_matching_filter='other-config:i18n-key=local-storage',
group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = get_session()
# This test is only guaranteed if there is one host in the pool
self.assertEqual(len(xenapi_fake.get_all('host')), 1)
host_ref = xenapi_fake.get_all('host')[0]
pbd_refs = xenapi_fake.get_all('PBD')
for pbd_ref in pbd_refs:
pbd_rec = xenapi_fake.get_record('PBD', pbd_ref)
if pbd_rec['host'] != host_ref:
continue
sr_rec = xenapi_fake.get_record('SR', pbd_rec['SR'])
if sr_rec['other_config']['i18n-key'] == 'local-storage':
local_sr = pbd_rec['SR']
expected = vm_utils.safe_find_sr(session)
self.assertEqual(local_sr, expected)
def test_safe_find_sr_by_other_criteria(self):
# Ensure the SR is found when using a different filter.
self.flags(sr_matching_filter='other-config:my_fake_sr=true',
group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = get_session()
host_ref = xenapi_fake.get_all('host')[0]
local_sr = xenapi_fake.create_sr(name_label='Fake Storage',
type='lvm',
other_config={'my_fake_sr': 'true'},
host_ref=host_ref)
expected = vm_utils.safe_find_sr(session)
self.assertEqual(local_sr, expected)
def test_safe_find_sr_default(self):
# Ensure the default SR is found regardless of other-config.
self.flags(sr_matching_filter='default-sr:true',
group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = get_session()
pool_ref = session.call_xenapi('pool.get_all')[0]
expected = vm_utils.safe_find_sr(session)
self.assertEqual(session.call_xenapi('pool.get_default_SR', pool_ref),
expected)
def _create_service_entries(context, values={'avail_zone1': ['fake_host1',
'fake_host2'],
'avail_zone2': ['fake_host3'], }):
for avail_zone, hosts in six.iteritems(values):
for service_host in hosts:
db.service_create(context,
{'host': service_host,
'binary': 'nova-compute',
'topic': 'compute',
'report_count': 0})
return values
# FIXME(sirp): convert this to use XenAPITestBaseNoDB
class XenAPIAggregateTestCase(stubs.XenAPITestBase):
"""Unit tests for aggregate operations."""
def setUp(self):
super(XenAPIAggregateTestCase, self).setUp()
self.flags(connection_url='http://test_url',
connection_username='test_user',
connection_password='test_pass',
group='xenserver')
self.flags(instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver',
host='host',
compute_driver='xenapi.XenAPIDriver',
default_availability_zone='avail_zone1')
self.flags(use_local=True, group='conductor')
host_ref = xenapi_fake.get_all('host')[0]
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.context = context.get_admin_context()
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.compute = importutils.import_object(CONF.compute_manager)
self.api = compute_api.AggregateAPI()
values = {'name': 'test_aggr',
'metadata': {'availability_zone': 'test_zone',
pool_states.POOL_FLAG: 'XenAPI'}}
self.aggr = objects.Aggregate(context=self.context, id=1,
**values)
self.fake_metadata = {pool_states.POOL_FLAG: 'XenAPI',
'master_compute': 'host',
'availability_zone': 'fake_zone',
pool_states.KEY: pool_states.ACTIVE,
'host': xenapi_fake.get_record('host',
host_ref)['uuid']}
def test_pool_add_to_aggregate_called_by_driver(self):
calls = []
def pool_add_to_aggregate(context, aggregate, host, slave_info=None):
self.assertEqual("CONTEXT", context)
self.assertEqual("AGGREGATE", aggregate)
self.assertEqual("HOST", host)
self.assertEqual("SLAVEINFO", slave_info)
calls.append(pool_add_to_aggregate)
self.stubs.Set(self.conn._pool,
"add_to_aggregate",
pool_add_to_aggregate)
self.conn.add_to_aggregate("CONTEXT", "AGGREGATE", "HOST",
slave_info="SLAVEINFO")
self.assertIn(pool_add_to_aggregate, calls)
def test_pool_remove_from_aggregate_called_by_driver(self):
calls = []
def pool_remove_from_aggregate(context, aggregate, host,
slave_info=None):
self.assertEqual("CONTEXT", context)
self.assertEqual("AGGREGATE", aggregate)
self.assertEqual("HOST", host)
self.assertEqual("SLAVEINFO", slave_info)
calls.append(pool_remove_from_aggregate)
self.stubs.Set(self.conn._pool,
"remove_from_aggregate",
pool_remove_from_aggregate)
self.conn.remove_from_aggregate("CONTEXT", "AGGREGATE", "HOST",
slave_info="SLAVEINFO")
self.assertIn(pool_remove_from_aggregate, calls)
def test_add_to_aggregate_for_first_host_sets_metadata(self):
def fake_init_pool(id, name):
fake_init_pool.called = True
self.stubs.Set(self.conn._pool, "_init_pool", fake_init_pool)
aggregate = self._aggregate_setup()
self.conn._pool.add_to_aggregate(self.context, aggregate, "host")
result = db.aggregate_get(self.context, aggregate['id'])
self.assertTrue(fake_init_pool.called)
self.assertThat(self.fake_metadata,
matchers.DictMatches(result['metadetails']))
def test_join_slave(self):
# Ensure join_slave gets called when the request gets to master.
def fake_join_slave(id, compute_uuid, host, url, user, password):
fake_join_slave.called = True
self.stubs.Set(self.conn._pool, "_join_slave", fake_join_slave)
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.conn._pool.add_to_aggregate(self.context, aggregate, "host2",
dict(compute_uuid='fake_uuid',
url='fake_url',
user='fake_user',
passwd='fake_pass',
xenhost_uuid='fake_uuid'))
self.assertTrue(fake_join_slave.called)
def test_add_to_aggregate_first_host(self):
def fake_pool_set_name_label(self, session, pool_ref, name):
fake_pool_set_name_label.called = True
self.stubs.Set(xenapi_fake.SessionBase, "pool_set_name_label",
fake_pool_set_name_label)
self.conn._session.call_xenapi("pool.create", {"name": "asdf"})
metadata = {'availability_zone': 'fake_zone',
pool_states.POOL_FLAG: "XenAPI",
pool_states.KEY: pool_states.CREATED}
aggregate = objects.Aggregate(context=self.context)
aggregate.name = 'fake_aggregate'
aggregate.metadata = dict(metadata)
aggregate.create()
aggregate.add_host('host')
self.assertEqual(["host"], aggregate.hosts)
self.assertEqual(metadata, aggregate.metadata)
self.conn._pool.add_to_aggregate(self.context, aggregate, "host")
self.assertTrue(fake_pool_set_name_label.called)
def test_remove_from_aggregate_called(self):
def fake_remove_from_aggregate(context, aggregate, host):
fake_remove_from_aggregate.called = True
self.stubs.Set(self.conn._pool,
"remove_from_aggregate",
fake_remove_from_aggregate)
self.conn.remove_from_aggregate(None, None, None)
self.assertTrue(fake_remove_from_aggregate.called)
def test_remove_from_empty_aggregate(self):
result = self._aggregate_setup()
self.assertRaises(exception.InvalidAggregateActionDelete,
self.conn._pool.remove_from_aggregate,
self.context, result, "test_host")
def test_remove_slave(self):
# Ensure eject slave gets called.
def fake_eject_slave(id, compute_uuid, host_uuid):
fake_eject_slave.called = True
self.stubs.Set(self.conn._pool, "_eject_slave", fake_eject_slave)
self.fake_metadata['host2'] = 'fake_host2_uuid'
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata, aggr_state=pool_states.ACTIVE)
self.conn._pool.remove_from_aggregate(self.context, aggregate, "host2")
self.assertTrue(fake_eject_slave.called)
def test_remove_master_solo(self):
# Ensure metadata are cleared after removal.
def fake_clear_pool(id):
fake_clear_pool.called = True
self.stubs.Set(self.conn._pool, "_clear_pool", fake_clear_pool)
aggregate = self._aggregate_setup(metadata=self.fake_metadata)
self.conn._pool.remove_from_aggregate(self.context, aggregate, "host")
result = db.aggregate_get(self.context, aggregate['id'])
self.assertTrue(fake_clear_pool.called)
self.assertThat({'availability_zone': 'fake_zone',
pool_states.POOL_FLAG: 'XenAPI',
pool_states.KEY: pool_states.ACTIVE},
matchers.DictMatches(result['metadetails']))
def test_remote_master_non_empty_pool(self):
# Ensure AggregateError is raised if removing the master.
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.assertRaises(exception.InvalidAggregateActionDelete,
self.conn._pool.remove_from_aggregate,
self.context, aggregate, "host")
def _aggregate_setup(self, aggr_name='fake_aggregate',
aggr_zone='fake_zone',
aggr_state=pool_states.CREATED,
hosts=['host'], metadata=None):
aggregate = objects.Aggregate(context=self.context)
aggregate.name = aggr_name
aggregate.metadata = {'availability_zone': aggr_zone,
pool_states.POOL_FLAG: 'XenAPI',
pool_states.KEY: aggr_state,
}
if metadata:
aggregate.metadata.update(metadata)
aggregate.create()
for aggregate_host in hosts:
aggregate.add_host(aggregate_host)
return aggregate
def test_add_host_to_aggregate_invalid_changing_status(self):
"""Ensure InvalidAggregateActionAdd is raised when adding host while
aggregate is not ready.
"""
aggregate = self._aggregate_setup(aggr_state=pool_states.CHANGING)
ex = self.assertRaises(exception.InvalidAggregateActionAdd,
self.conn.add_to_aggregate, self.context,
aggregate, 'host')
self.assertIn('setup in progress', str(ex))
def test_add_host_to_aggregate_invalid_dismissed_status(self):
"""Ensure InvalidAggregateActionAdd is raised when aggregate is
deleted.
"""
aggregate = self._aggregate_setup(aggr_state=pool_states.DISMISSED)
ex = self.assertRaises(exception.InvalidAggregateActionAdd,
self.conn.add_to_aggregate, self.context,
aggregate, 'fake_host')
self.assertIn('aggregate deleted', str(ex))
def test_add_host_to_aggregate_invalid_error_status(self):
"""Ensure InvalidAggregateActionAdd is raised when aggregate is
in error.
"""
aggregate = self._aggregate_setup(aggr_state=pool_states.ERROR)
ex = self.assertRaises(exception.InvalidAggregateActionAdd,
self.conn.add_to_aggregate, self.context,
aggregate, 'fake_host')
self.assertIn('aggregate in error', str(ex))
def test_remove_host_from_aggregate_error(self):
# Ensure we can remove a host from an aggregate even if in error.
values = _create_service_entries(self.context)
fake_zone = list(values.keys())[0]
aggr = self.api.create_aggregate(self.context,
'fake_aggregate', fake_zone)
# let's mock the fact that the aggregate is ready!
metadata = {pool_states.POOL_FLAG: "XenAPI",
pool_states.KEY: pool_states.ACTIVE}
db.aggregate_metadata_add(self.context, aggr['id'], metadata)
for aggregate_host in values[fake_zone]:
aggr = self.api.add_host_to_aggregate(self.context,
aggr['id'], aggregate_host)
# let's mock the fact that the aggregate is in error!
expected = self.api.remove_host_from_aggregate(self.context,
aggr['id'],
values[fake_zone][0])
self.assertEqual(len(aggr['hosts']) - 1, len(expected['hosts']))
self.assertEqual(expected['metadata'][pool_states.KEY],
pool_states.ACTIVE)
def test_remove_host_from_aggregate_invalid_dismissed_status(self):
"""Ensure InvalidAggregateActionDelete is raised when aggregate is
deleted.
"""
aggregate = self._aggregate_setup(aggr_state=pool_states.DISMISSED)
self.assertRaises(exception.InvalidAggregateActionDelete,
self.conn.remove_from_aggregate, self.context,
aggregate, 'fake_host')
def test_remove_host_from_aggregate_invalid_changing_status(self):
"""Ensure InvalidAggregateActionDelete is raised when aggregate is
changing.
"""
aggregate = self._aggregate_setup(aggr_state=pool_states.CHANGING)
self.assertRaises(exception.InvalidAggregateActionDelete,
self.conn.remove_from_aggregate, self.context,
aggregate, 'fake_host')
def test_add_aggregate_host_raise_err(self):
# Ensure the undo operation works correctly on add.
def fake_driver_add_to_aggregate(context, aggregate, host, **_ignore):
raise exception.AggregateError(
aggregate_id='', action='', reason='')
self.stubs.Set(self.compute.driver, "add_to_aggregate",
fake_driver_add_to_aggregate)
metadata = {pool_states.POOL_FLAG: "XenAPI",
pool_states.KEY: pool_states.ACTIVE}
self.aggr.metadata = metadata
self.aggr.hosts = ['fake_host']
self.assertRaises(exception.AggregateError,
self.compute.add_aggregate_host,
self.context, host="fake_host",
aggregate=self.aggr,
slave_info=None)
self.assertEqual(self.aggr.metadata[pool_states.KEY],
pool_states.ERROR)
self.assertEqual(self.aggr.hosts, ['fake_host'])
class MockComputeAPI(object):
def __init__(self):
self._mock_calls = []
def add_aggregate_host(self, ctxt, aggregate,
host_param, host, slave_info):
self._mock_calls.append((
self.add_aggregate_host, ctxt, aggregate,
host_param, host, slave_info))
def remove_aggregate_host(self, ctxt, aggregate_id, host_param,
host, slave_info):
self._mock_calls.append((
self.remove_aggregate_host, ctxt, aggregate_id,
host_param, host, slave_info))
class StubDependencies(object):
"""Stub dependencies for ResourcePool."""
def __init__(self):
self.compute_rpcapi = MockComputeAPI()
def _is_hv_pool(self, *_ignore):
return True
def _get_metadata(self, *_ignore):
return {
pool_states.KEY: {},
'master_compute': 'master'
}
def _create_slave_info(self, *ignore):
return "SLAVE_INFO"
class ResourcePoolWithStubs(StubDependencies, pool.ResourcePool):
"""A ResourcePool, use stub dependencies."""
class HypervisorPoolTestCase(test.NoDBTestCase):
fake_aggregate = {
'id': 98,
'hosts': [],
'metadata': {
'master_compute': 'master',
pool_states.POOL_FLAG: {},
pool_states.KEY: {}
}
}
def test_slave_asks_master_to_add_slave_to_pool(self):
slave = ResourcePoolWithStubs()
slave.add_to_aggregate("CONTEXT", self.fake_aggregate, "slave")
self.assertIn(
(slave.compute_rpcapi.add_aggregate_host,
"CONTEXT", jsonutils.to_primitive(self.fake_aggregate),
"slave", "master", "SLAVE_INFO"),
slave.compute_rpcapi._mock_calls)
def test_slave_asks_master_to_remove_slave_from_pool(self):
slave = ResourcePoolWithStubs()
slave.remove_from_aggregate("CONTEXT", self.fake_aggregate, "slave")
self.assertIn(
(slave.compute_rpcapi.remove_aggregate_host,
"CONTEXT", 98, "slave", "master", "SLAVE_INFO"),
slave.compute_rpcapi._mock_calls)
class SwapXapiHostTestCase(test.NoDBTestCase):
def test_swapping(self):
self.assertEqual(
"http://otherserver:8765/somepath",
pool.swap_xapi_host(
"http://someserver:8765/somepath", 'otherserver'))
def test_no_port(self):
self.assertEqual(
"http://otherserver/somepath",
pool.swap_xapi_host(
"http://someserver/somepath", 'otherserver'))
def test_no_path(self):
self.assertEqual(
"http://otherserver",
pool.swap_xapi_host(
"http://someserver", 'otherserver'))
class XenAPILiveMigrateTestCase(stubs.XenAPITestBaseNoDB):
"""Unit tests for live_migration."""
def setUp(self):
super(XenAPILiveMigrateTestCase, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver',
host='host')
db_fakes.stub_out_db_instance_api(self.stubs)
self.context = context.get_admin_context()
def test_live_migration_calls_vmops(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_live_migrate(context, instance_ref, dest, post_method,
recover_method, block_migration, migrate_data):
fake_live_migrate.called = True
self.stubs.Set(self.conn._vmops, "live_migrate", fake_live_migrate)
self.conn.live_migration(None, None, None, None, None)
self.assertTrue(fake_live_migrate.called)
def test_pre_live_migration(self):
# ensure method is present
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.conn.pre_live_migration(None, None, None, None, None)
def test_post_live_migration_at_destination(self):
# ensure method is present
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
fake_instance = {"name": "name"}
fake_network_info = "network_info"
def fake_fw(instance, network_info):
self.assertEqual(instance, fake_instance)
self.assertEqual(network_info, fake_network_info)
fake_fw.call_count += 1
def fake_create_kernel_and_ramdisk(context, session, instance,
name_label):
return "fake-kernel-file", "fake-ramdisk-file"
fake_fw.call_count = 0
_vmops = self.conn._vmops
self.stubs.Set(_vmops.firewall_driver,
'setup_basic_filtering', fake_fw)
self.stubs.Set(_vmops.firewall_driver,
'prepare_instance_filter', fake_fw)
self.stubs.Set(_vmops.firewall_driver,
'apply_instance_filter', fake_fw)
self.stubs.Set(vm_utils, "create_kernel_and_ramdisk",
fake_create_kernel_and_ramdisk)
def fake_get_vm_opaque_ref(instance):
fake_get_vm_opaque_ref.called = True
self.stubs.Set(_vmops, "_get_vm_opaque_ref", fake_get_vm_opaque_ref)
fake_get_vm_opaque_ref.called = False
def fake_strip_base_mirror_from_vdis(session, vm_ref):
fake_strip_base_mirror_from_vdis.called = True
self.stubs.Set(vm_utils, "strip_base_mirror_from_vdis",
fake_strip_base_mirror_from_vdis)
fake_strip_base_mirror_from_vdis.called = False
self.conn.post_live_migration_at_destination(None, fake_instance,
fake_network_info, None)
self.assertEqual(fake_fw.call_count, 3)
self.assertTrue(fake_get_vm_opaque_ref.called)
self.assertTrue(fake_strip_base_mirror_from_vdis.called)
def test_check_can_live_migrate_destination_with_block_migration(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.stubs.Set(vm_utils, "safe_find_sr", lambda _x: "asdf")
expected = {'block_migration': True,
'migrate_data': {
'migrate_send_data': "fake_migrate_data",
'destination_sr_ref': 'asdf'
}
}
result = self.conn.check_can_live_migrate_destination(self.context,
{'host': 'host'},
{}, {},
True, False)
self.assertEqual(expected, result)
def test_check_live_migrate_destination_verifies_ip(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
for pif_ref in xenapi_fake.get_all('PIF'):
pif_rec = xenapi_fake.get_record('PIF', pif_ref)
pif_rec['IP'] = ''
pif_rec['IPv6'] = ''
self.stubs.Set(vm_utils, "safe_find_sr", lambda _x: "asdf")
self.assertRaises(exception.MigrationError,
self.conn.check_can_live_migrate_destination,
self.context, {'host': 'host'},
{}, {},
True, False)
def test_check_can_live_migrate_destination_block_migration_fails(self):
stubs.stubout_session(self.stubs,
stubs.FakeSessionForFailedMigrateTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.assertRaises(exception.MigrationError,
self.conn.check_can_live_migrate_destination,
self.context, {'host': 'host'},
{}, {},
True, False)
def _add_default_live_migrate_stubs(self, conn):
def fake_generate_vdi_map(destination_sr_ref, _vm_ref):
pass
def fake_get_iscsi_srs(destination_sr_ref, _vm_ref):
return []
def fake_get_vm_opaque_ref(instance):
return "fake_vm"
def fake_lookup_kernel_ramdisk(session, vm):
return ("fake_PV_kernel", "fake_PV_ramdisk")
self.stubs.Set(conn._vmops, "_generate_vdi_map",
fake_generate_vdi_map)
self.stubs.Set(conn._vmops, "_get_iscsi_srs",
fake_get_iscsi_srs)
self.stubs.Set(conn._vmops, "_get_vm_opaque_ref",
fake_get_vm_opaque_ref)
self.stubs.Set(vm_utils, "lookup_kernel_ramdisk",
fake_lookup_kernel_ramdisk)
def test_check_can_live_migrate_source_with_block_migrate(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
dest_check_data = {'block_migration': True,
'migrate_data': {
'destination_sr_ref': None,
'migrate_send_data': None
}}
result = self.conn.check_can_live_migrate_source(self.context,
{'host': 'host'},
dest_check_data)
self.assertEqual(dest_check_data, result)
def test_check_can_live_migrate_source_with_block_migrate_iscsi(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
def fake_get_iscsi_srs(destination_sr_ref, _vm_ref):
return ['sr_ref']
self.stubs.Set(self.conn._vmops, "_get_iscsi_srs",
fake_get_iscsi_srs)
def fake_make_plugin_call(plugin, method, **args):
return "true"
self.stubs.Set(self.conn._vmops, "_make_plugin_call",
fake_make_plugin_call)
dest_check_data = {'block_migration': True,
'migrate_data': {
'destination_sr_ref': None,
'migrate_send_data': None
}}
result = self.conn.check_can_live_migrate_source(self.context,
{'host': 'host'},
dest_check_data)
self.assertEqual(dest_check_data, result)
def test_check_can_live_migrate_source_with_block_iscsi_fails(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
def fake_get_iscsi_srs(destination_sr_ref, _vm_ref):
return ['sr_ref']
self.stubs.Set(self.conn._vmops, "_get_iscsi_srs",
fake_get_iscsi_srs)
def fake_make_plugin_call(plugin, method, **args):
return {'returncode': 'error', 'message': 'Plugin not found'}
self.stubs.Set(self.conn._vmops, "_make_plugin_call",
fake_make_plugin_call)
self.assertRaises(exception.MigrationError,
self.conn.check_can_live_migrate_source,
self.context, {'host': 'host'},
{})
def test_check_can_live_migrate_source_with_block_migrate_fails(self):
stubs.stubout_session(self.stubs,
stubs.FakeSessionForFailedMigrateTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
dest_check_data = {'block_migration': True,
'migrate_data': {
'destination_sr_ref': None,
'migrate_send_data': None
}}
self.assertRaises(exception.MigrationError,
self.conn.check_can_live_migrate_source,
self.context,
{'host': 'host'},
dest_check_data)
def test_check_can_live_migrate_works(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_aggregate_get_by_host(context, host, key=None):
self.assertEqual(CONF.host, host)
return [dict(test_aggregate.fake_aggregate,
metadetails={"host": "test_host_uuid"})]
self.stubs.Set(db, "aggregate_get_by_host",
fake_aggregate_get_by_host)
self.conn.check_can_live_migrate_destination(self.context,
{'host': 'host'}, False, False)
def test_check_can_live_migrate_fails(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_aggregate_get_by_host(context, host, key=None):
self.assertEqual(CONF.host, host)
return [dict(test_aggregate.fake_aggregate,
metadetails={"dest_other": "test_host_uuid"})]
self.stubs.Set(db, "aggregate_get_by_host",
fake_aggregate_get_by_host)
self.assertRaises(exception.MigrationError,
self.conn.check_can_live_migrate_destination,
self.context, {'host': 'host'}, None, None)
def test_live_migration(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_get_vm_opaque_ref(instance):
return "fake_vm"
self.stubs.Set(self.conn._vmops, "_get_vm_opaque_ref",
fake_get_vm_opaque_ref)
def fake_get_host_opaque_ref(context, destination_hostname):
return "fake_host"
self.stubs.Set(self.conn._vmops, "_get_host_opaque_ref",
fake_get_host_opaque_ref)
def post_method(context, instance, destination_hostname,
block_migration, migrate_data):
post_method.called = True
self.conn.live_migration(self.conn, None, None, post_method, None)
self.assertTrue(post_method.called, "post_method.called")
def test_live_migration_on_failure(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
def fake_get_vm_opaque_ref(instance):
return "fake_vm"
self.stubs.Set(self.conn._vmops, "_get_vm_opaque_ref",
fake_get_vm_opaque_ref)
def fake_get_host_opaque_ref(context, destination_hostname):
return "fake_host"
self.stubs.Set(self.conn._vmops, "_get_host_opaque_ref",
fake_get_host_opaque_ref)
def fake_call_xenapi(*args):
raise NotImplementedError()
self.stubs.Set(self.conn._vmops._session, "call_xenapi",
fake_call_xenapi)
def recover_method(context, instance, destination_hostname,
block_migration):
recover_method.called = True
self.assertRaises(NotImplementedError, self.conn.live_migration,
self.conn, None, None, None, recover_method)
self.assertTrue(recover_method.called, "recover_method.called")
def test_live_migration_calls_post_migration(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
def post_method(context, instance, destination_hostname,
block_migration, migrate_data):
post_method.called = True
# pass block_migration = True and migrate data
migrate_data = {"destination_sr_ref": "foo",
"migrate_send_data": "bar"}
self.conn.live_migration(self.conn, None, None, post_method, None,
True, migrate_data)
self.assertTrue(post_method.called, "post_method.called")
def test_live_migration_block_cleans_srs(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
def fake_get_iscsi_srs(context, instance):
return ['sr_ref']
self.stubs.Set(self.conn._vmops, "_get_iscsi_srs",
fake_get_iscsi_srs)
def fake_forget_sr(context, instance):
fake_forget_sr.called = True
self.stubs.Set(volume_utils, "forget_sr",
fake_forget_sr)
def post_method(context, instance, destination_hostname,
block_migration, migrate_data):
post_method.called = True
migrate_data = {"destination_sr_ref": "foo",
"migrate_send_data": "bar"}
self.conn.live_migration(self.conn, None, None, post_method, None,
True, migrate_data)
self.assertTrue(post_method.called, "post_method.called")
self.assertTrue(fake_forget_sr.called, "forget_sr.called")
def test_live_migration_with_block_migration_raises_invalid_param(self):
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
def recover_method(context, instance, destination_hostname,
block_migration):
recover_method.called = True
# pass block_migration = True and no migrate data
self.assertRaises(exception.InvalidParameterValue,
self.conn.live_migration, self.conn,
None, None, None, recover_method, True, None)
self.assertTrue(recover_method.called, "recover_method.called")
def test_live_migration_with_block_migration_fails_migrate_send(self):
stubs.stubout_session(self.stubs,
stubs.FakeSessionForFailedMigrateTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(self.conn)
def recover_method(context, instance, destination_hostname,
block_migration):
recover_method.called = True
# pass block_migration = True and migrate data
migrate_data = dict(destination_sr_ref='foo', migrate_send_data='bar')
self.assertRaises(exception.MigrationError,
self.conn.live_migration, self.conn,
None, None, None, recover_method, True, migrate_data)
self.assertTrue(recover_method.called, "recover_method.called")
def test_live_migrate_block_migration_xapi_call_parameters(self):
fake_vdi_map = object()
class Session(xenapi_fake.SessionBase):
def VM_migrate_send(self_, session, vmref, migrate_data, islive,
vdi_map, vif_map, options):
self.assertEqual('SOMEDATA', migrate_data)
self.assertEqual(fake_vdi_map, vdi_map)
stubs.stubout_session(self.stubs, Session)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(conn)
def fake_generate_vdi_map(destination_sr_ref, _vm_ref):
return fake_vdi_map
self.stubs.Set(conn._vmops, "_generate_vdi_map",
fake_generate_vdi_map)
def dummy_callback(*args, **kwargs):
pass
conn.live_migration(
self.context, instance=dict(name='ignore'), dest=None,
post_method=dummy_callback, recover_method=dummy_callback,
block_migration="SOMEDATA",
migrate_data=dict(migrate_send_data='SOMEDATA',
destination_sr_ref="TARGET_SR_OPAQUE_REF"))
def test_live_migrate_pool_migration_xapi_call_parameters(self):
class Session(xenapi_fake.SessionBase):
def VM_pool_migrate(self_, session, vm_ref, host_ref, options):
self.assertEqual("fake_ref", host_ref)
self.assertEqual({"live": "true"}, options)
raise IOError()
stubs.stubout_session(self.stubs, Session)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self._add_default_live_migrate_stubs(conn)
def fake_get_host_opaque_ref(context, destination):
return "fake_ref"
self.stubs.Set(conn._vmops, "_get_host_opaque_ref",
fake_get_host_opaque_ref)
def dummy_callback(*args, **kwargs):
pass
self.assertRaises(IOError, conn.live_migration,
self.context, instance=dict(name='ignore'), dest=None,
post_method=dummy_callback, recover_method=dummy_callback,
block_migration=False, migrate_data={})
def test_generate_vdi_map(self):
stubs.stubout_session(self.stubs, xenapi_fake.SessionBase)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
vm_ref = "fake_vm_ref"
def fake_find_sr(_session):
self.assertEqual(conn._session, _session)
return "source_sr_ref"
self.stubs.Set(vm_utils, "safe_find_sr", fake_find_sr)
def fake_get_instance_vdis_for_sr(_session, _vm_ref, _sr_ref):
self.assertEqual(conn._session, _session)
self.assertEqual(vm_ref, _vm_ref)
self.assertEqual("source_sr_ref", _sr_ref)
return ["vdi0", "vdi1"]
self.stubs.Set(vm_utils, "get_instance_vdis_for_sr",
fake_get_instance_vdis_for_sr)
result = conn._vmops._generate_vdi_map("dest_sr_ref", vm_ref)
self.assertEqual({"vdi0": "dest_sr_ref",
"vdi1": "dest_sr_ref"}, result)
def test_rollback_live_migration_at_destination(self):
stubs.stubout_session(self.stubs, xenapi_fake.SessionBase)
conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
with mock.patch.object(conn, "destroy") as mock_destroy:
conn.rollback_live_migration_at_destination("context",
"instance", [], {'block_device_mapping': []})
self.assertFalse(mock_destroy.called)
class XenAPIInjectMetadataTestCase(stubs.XenAPITestBaseNoDB):
def setUp(self):
super(XenAPIInjectMetadataTestCase, self).setUp()
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.flags(firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.conn = xenapi_conn.XenAPIDriver(fake.FakeVirtAPI(), False)
self.xenstore = dict(persist={}, ephem={})
self.called_fake_get_vm_opaque_ref = False
def fake_get_vm_opaque_ref(inst, instance):
self.called_fake_get_vm_opaque_ref = True
if instance["uuid"] == "not_found":
raise exception.NotFound
self.assertEqual(instance, {'uuid': 'fake'})
return 'vm_ref'
def fake_add_to_param_xenstore(inst, vm_ref, key, val):
self.assertEqual(vm_ref, 'vm_ref')
self.xenstore['persist'][key] = val
def fake_remove_from_param_xenstore(inst, vm_ref, key):
self.assertEqual(vm_ref, 'vm_ref')
if key in self.xenstore['persist']:
del self.xenstore['persist'][key]
def fake_write_to_xenstore(inst, instance, path, value, vm_ref=None):
self.assertEqual(instance, {'uuid': 'fake'})
self.assertEqual(vm_ref, 'vm_ref')
self.xenstore['ephem'][path] = jsonutils.dumps(value)
def fake_delete_from_xenstore(inst, instance, path, vm_ref=None):
self.assertEqual(instance, {'uuid': 'fake'})
self.assertEqual(vm_ref, 'vm_ref')
if path in self.xenstore['ephem']:
del self.xenstore['ephem'][path]
self.stubs.Set(vmops.VMOps, '_get_vm_opaque_ref',
fake_get_vm_opaque_ref)
self.stubs.Set(vmops.VMOps, '_add_to_param_xenstore',
fake_add_to_param_xenstore)
self.stubs.Set(vmops.VMOps, '_remove_from_param_xenstore',
fake_remove_from_param_xenstore)
self.stubs.Set(vmops.VMOps, '_write_to_xenstore',
fake_write_to_xenstore)
self.stubs.Set(vmops.VMOps, '_delete_from_xenstore',
fake_delete_from_xenstore)
def test_inject_instance_metadata(self):
# Add some system_metadata to ensure it doesn't get added
# to xenstore
instance = dict(metadata=[{'key': 'a', 'value': 1},
{'key': 'b', 'value': 2},
{'key': 'c', 'value': 3},
# Check xenstore key sanitizing
{'key': 'hi.there', 'value': 4},
{'key': 'hi!t.e/e', 'value': 5}],
# Check xenstore key sanitizing
system_metadata=[{'key': 'sys_a', 'value': 1},
{'key': 'sys_b', 'value': 2},
{'key': 'sys_c', 'value': 3}],
uuid='fake')
self.conn._vmops._inject_instance_metadata(instance, 'vm_ref')
self.assertEqual(self.xenstore, {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
'vm-data/user-metadata/hi_there': '4',
'vm-data/user-metadata/hi_t_e_e': '5',
},
'ephem': {},
})
def test_change_instance_metadata_add(self):
# Test XenStore key sanitizing here, too.
diff = {'test.key': ['+', 4]}
instance = {'uuid': 'fake'}
self.xenstore = {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
},
'ephem': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
},
}
self.conn._vmops.change_instance_metadata(instance, diff)
self.assertEqual(self.xenstore, {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
'vm-data/user-metadata/test_key': '4',
},
'ephem': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
'vm-data/user-metadata/test_key': '4',
},
})
def test_change_instance_metadata_update(self):
diff = dict(b=['+', 4])
instance = {'uuid': 'fake'}
self.xenstore = {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
},
'ephem': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
},
}
self.conn._vmops.change_instance_metadata(instance, diff)
self.assertEqual(self.xenstore, {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '4',
'vm-data/user-metadata/c': '3',
},
'ephem': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '4',
'vm-data/user-metadata/c': '3',
},
})
def test_change_instance_metadata_delete(self):
diff = dict(b=['-'])
instance = {'uuid': 'fake'}
self.xenstore = {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
},
'ephem': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/b': '2',
'vm-data/user-metadata/c': '3',
},
}
self.conn._vmops.change_instance_metadata(instance, diff)
self.assertEqual(self.xenstore, {
'persist': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/c': '3',
},
'ephem': {
'vm-data/user-metadata/a': '1',
'vm-data/user-metadata/c': '3',
},
})
def test_change_instance_metadata_not_found(self):
instance = {'uuid': 'not_found'}
self.conn._vmops.change_instance_metadata(instance, "fake_diff")
self.assertTrue(self.called_fake_get_vm_opaque_ref)
class XenAPISessionTestCase(test.NoDBTestCase):
def _get_mock_xapisession(self, software_version):
class MockXapiSession(xenapi_session.XenAPISession):
def __init__(_ignore):
"Skip the superclass's dirty init"
def _get_software_version(_ignore):
return software_version
return MockXapiSession()
def test_local_session(self):
session = self._get_mock_xapisession({})
session.is_local_connection = True
session.XenAPI = self.mox.CreateMockAnything()
session.XenAPI.xapi_local().AndReturn("local_connection")
self.mox.ReplayAll()
self.assertEqual("local_connection",
session._create_session("unix://local"))
def test_remote_session(self):
session = self._get_mock_xapisession({})
session.is_local_connection = False
session.XenAPI = self.mox.CreateMockAnything()
session.XenAPI.Session("url").AndReturn("remote_connection")
self.mox.ReplayAll()
self.assertEqual("remote_connection", session._create_session("url"))
def test_get_product_version_product_brand_does_not_fail(self):
session = self._get_mock_xapisession({
'build_number': '0',
'date': '2012-08-03',
'hostname': 'komainu',
'linux': '3.2.0-27-generic',
'network_backend': 'bridge',
'platform_name': 'XCP_Kronos',
'platform_version': '1.6.0',
'xapi': '1.3',
'xen': '4.1.2',
'xencenter_max': '1.10',
'xencenter_min': '1.10'
})
self.assertEqual(
((1, 6, 0), None),
session._get_product_version_and_brand()
)
def test_get_product_version_product_brand_xs_6(self):
session = self._get_mock_xapisession({
'product_brand': 'XenServer',
'product_version': '6.0.50',
'platform_version': '0.0.1'
})
self.assertEqual(
((6, 0, 50), 'XenServer'),
session._get_product_version_and_brand()
)
def test_verify_plugin_version_same(self):
session = self._get_mock_xapisession({})
session.PLUGIN_REQUIRED_VERSION = '2.4'
self.mox.StubOutWithMock(session, 'call_plugin_serialized')
session.call_plugin_serialized('nova_plugin_version', 'get_version',
).AndReturn("2.4")
self.mox.ReplayAll()
session._verify_plugin_version()
def test_verify_plugin_version_compatible(self):
session = self._get_mock_xapisession({})
session.XenAPI = xenapi_fake.FakeXenAPI()
session.PLUGIN_REQUIRED_VERSION = '2.4'
self.mox.StubOutWithMock(session, 'call_plugin_serialized')
session.call_plugin_serialized('nova_plugin_version', 'get_version',
).AndReturn("2.5")
self.mox.ReplayAll()
session._verify_plugin_version()
def test_verify_plugin_version_bad_maj(self):
session = self._get_mock_xapisession({})
session.XenAPI = xenapi_fake.FakeXenAPI()
session.PLUGIN_REQUIRED_VERSION = '2.4'
self.mox.StubOutWithMock(session, 'call_plugin_serialized')
session.call_plugin_serialized('nova_plugin_version', 'get_version',
).AndReturn("3.0")
self.mox.ReplayAll()
self.assertRaises(xenapi_fake.Failure, session._verify_plugin_version)
def test_verify_plugin_version_bad_min(self):
session = self._get_mock_xapisession({})
session.XenAPI = xenapi_fake.FakeXenAPI()
session.PLUGIN_REQUIRED_VERSION = '2.4'
self.mox.StubOutWithMock(session, 'call_plugin_serialized')
session.call_plugin_serialized('nova_plugin_version', 'get_version',
).AndReturn("2.3")
self.mox.ReplayAll()
self.assertRaises(xenapi_fake.Failure, session._verify_plugin_version)
def test_verify_current_version_matches(self):
session = self._get_mock_xapisession({})
# Import the plugin to extract its version
path = os.path.dirname(__file__)
rel_path_elem = "../../../../../plugins/xenserver/xenapi/etc/xapi.d/" \
"plugins/nova_plugin_version"
for elem in rel_path_elem.split('/'):
path = os.path.join(path, elem)
path = os.path.realpath(path)
plugin_version = None
with open(path) as plugin_file:
for line in plugin_file:
if "PLUGIN_VERSION = " in line:
plugin_version = line.strip()[17:].strip('"')
self.assertEqual(session.PLUGIN_REQUIRED_VERSION,
plugin_version)
class XenAPIFakeTestCase(test.NoDBTestCase):
def test_query_matches(self):
record = {'a': '1', 'b': '2', 'c_d': '3'}
tests = {'field "a"="1"': True,
'field "b"="2"': True,
'field "b"="4"': False,
'not field "b"="4"': True,
'field "a"="1" and field "b"="4"': False,
'field "a"="1" or field "b"="4"': True,
'field "c__d"="3"': True,
'field \'b\'=\'2\'': True,
}
for query in tests.keys():
expected = tests[query]
fail_msg = "for test '%s'" % query
self.assertEqual(xenapi_fake._query_matches(record, query),
expected, fail_msg)
def test_query_bad_format(self):
record = {'a': '1', 'b': '2', 'c': '3'}
tests = ['"a"="1" or "b"="4"',
'a=1',
]
for query in tests:
fail_msg = "for test '%s'" % query
self.assertFalse(xenapi_fake._query_matches(record, query),
fail_msg)
| gpl-2.0 | 8,827,661,696,526,691,000 | 41.512446 | 79 | 0.562598 | false |
RudolfCardinal/crate | crate_anon/nlp_manager/cloud_request_sender.py | 1 | 12012 | #!/usr/bin/env python
"""
crate_anon/nlp_manager/cloud_request_sender.py
===============================================================================
Copyright (C) 2015-2021 Rudolf Cardinal ([email protected]).
This file is part of CRATE.
CRATE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CRATE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CRATE. If not, see <http://www.gnu.org/licenses/>.
===============================================================================
**CloudRequestSender class.**
"""
# =============================================================================
# Imports
# =============================================================================
from enum import auto, Enum
import logging
from typing import (
Any, Dict, List, Optional, Tuple, Generator, TYPE_CHECKING,
)
from crate_anon.nlp_manager.constants import (
DEFAULT_REPORT_EVERY_NLP,
)
from crate_anon.nlp_manager.input_field_config import (
InputFieldConfig,
FN_SRCDB,
FN_SRCTABLE,
FN_SRCPKFIELD,
FN_SRCPKVAL,
FN_SRCPKSTR,
FN_SRCFIELD,
)
from crate_anon.nlp_manager.models import FN_SRCHASH
from crate_anon.nlp_manager.cloud_request import (
CloudRequestProcess,
RecordNotPrintable,
RecordsPerRequestExceeded,
RequestTooLong,
)
from crate_anon.nlp_manager.cloud_run_info import CloudRunInfo
if TYPE_CHECKING:
from http.cookiejar import CookieJar
log = logging.getLogger(__name__)
# =============================================================================
# CloudRequestSender
# =============================================================================
class CloudRequestSender(object):
"""
Class to encapsulate a NLP request outbound to a cloud NLP server.
"""
class State(Enum):
"""
Request state.
"""
BUILDING_REQUEST = auto()
SENDING_REQUEST = auto()
FINISHED = auto()
def __init__(
self,
text_generator: Generator[Tuple[str, Dict[str, Any]], None, None],
crinfo: CloudRunInfo,
ifconfig: InputFieldConfig,
report_every: int = DEFAULT_REPORT_EVERY_NLP,
incremental: bool = False,
queue: bool = True) -> None:
"""
Args:
text_generator:
Generator that generates text strings from the source
database. See
:meth:`crate_anon.nlp_manager.input_field_config.InputFieldConfig.gen_text`.
crinfo:
A :class:`crate_anon.nlp_manager.cloud_run_info.CloudRunInfo`
object.
ifconfig:
An
:class:`crate_anon.nlp_manager.input_field_config.InputFieldConfig`
object.
report_every:
Report to the log every *n* requests.
incremental:
Process in incremental mode (ignoring source records that have
not changed since last time)?
queue:
Queue the requests for back-end processing (rather than waiting
for an immediate reply)?
"""
self._text_generator = text_generator
self._crinfo = crinfo
self._ifconfig = ifconfig
self._report_every = report_every
self._incremental = incremental
self._queue = queue
self._global_recnum = -1
self._requests = [] # type: List[CloudRequestProcess]
self._cookies = None # type: Optional[CookieJar]
self._request_count = 0 # number of requests sent
self._text = None # type: Optional[str]
self._other_values = None # type: Optional[Dict[str, Any]]
self._request_is_empty = True
self._need_new_record = True
self._need_new_request = True
self._num_recs_processed = 0
self._state = self.State.BUILDING_REQUEST
self._request = None # type: Optional[CloudRequestProcess]
def send_requests(
self,
global_recnum: int) -> Tuple[List[CloudRequestProcess], bool, int]:
"""
Sends off a series of cloud requests and returns them as a list.
``self._queue`` determines whether these are queued requests or not.
Also returns whether the generator for the text is empty.
Return tuple is: ``requests, some_records_processed, global_recnum``.
"""
self._global_recnum = global_recnum
self._requests = []
self._cookies = None
self._request_count = 1
self._text = None
self._other_values = None
self._request_is_empty = True
self._need_new_record = True
self._need_new_request = True
# Check processors are available
available_procs = self._crinfo.get_remote_processors()
if not available_procs:
return [], False, self._global_recnum
self._num_recs_processed = 0
self._state = self.State.BUILDING_REQUEST
# If we've reached the limit of records before commit, return to
# outer function in order to process and commit (or write to file if
# it's a queued request)
while self._state != self.State.FINISHED:
if self._state == self.State.BUILDING_REQUEST:
self._build_request()
if self._state == self.State.SENDING_REQUEST:
self._send_request()
return self._requests, self._num_recs_processed > 0, self._global_recnum # noqa
def _build_request(self) -> None:
"""
Adds another record to the outbound request, until the request is
fully built. Updates our state to reflect what needs to happen next.
"""
if self._need_new_record:
try:
self._get_next_record()
except StopIteration:
self._update_state_for_no_more_records()
return
hasher = self._crinfo.nlpdef.hash
srchash = hasher(self._text)
if self._incremental and self._record_already_processed(srchash):
return
self._num_recs_processed += 1
self._other_values[FN_SRCHASH] = srchash
if self._need_new_request:
self._request = self._get_new_cloud_request()
self._request_is_empty = True
self._need_new_request = False
self._need_new_record = True
# Add the text to the cloud request with the appropriate metadata
try:
self._request.add_text(
self._text, self._other_values
)
# added OK, request now has some text
self._request_is_empty = False
except RecordNotPrintable:
# Text contained no printable characters. Skip it.
pass
except (RecordsPerRequestExceeded, RequestTooLong) as e:
if isinstance(e, RequestTooLong) and self._request_is_empty:
# Get some new text next time
log.warning("Skipping text that's too long to send")
else:
# Try same text again with a fresh request
self._need_new_record = False
self._state = self.State.SENDING_REQUEST
if self._record_limit_reached():
self._state = self.State.SENDING_REQUEST
def _get_new_cloud_request(self) -> CloudRequestProcess:
"""
Creates and returns a new
:class:`crate_anon.nlp_manager.cloud_request.CloudRequestProcess`
object.
"""
return CloudRequestProcess(self._crinfo)
def _update_state_for_no_more_records(self) -> None:
"""
No more input records are available. This means either (a) we've sent
all our requests and have finished, or (b) we're building our last
request and we need to send it. Set the state accordingly.
"""
if self._request_is_empty or self._need_new_request:
# Nothing more to send
self._state = self.State.FINISHED
return
# Send last request
self._state = self.State.SENDING_REQUEST
def _record_already_processed(self, srchash: str) -> bool:
"""
Has this source record (identified by its PK and its hash) already been
processed? (If so, then in incremental mode, we can skip it.)
"""
pkval = self._other_values[FN_SRCPKVAL]
pkstr = self._other_values[FN_SRCPKSTR]
progrec = self._ifconfig.get_progress_record(pkval, pkstr)
if progrec is not None:
if progrec.srchash == srchash:
log.debug("Record previously processed; skipping")
return True
log.debug("Record has changed")
else:
log.debug("Record is new")
return False
def _record_limit_reached(self) -> bool:
"""
Have we processed as many records as we're allowed before we should
COMMIT to the database?
"""
limit_before_commit = self._crinfo.cloudcfg.limit_before_commit
return self._num_recs_processed >= limit_before_commit
def _get_next_record(self) -> None:
"""
Reads the next text record and metadata into ``self._text`` and
``self._other_values``.
Raises:
:exc:`StopIteration` if there are no more records
"""
self._text, self._other_values = next(self._text_generator)
self._global_recnum += 1
pkval = self._other_values[FN_SRCPKVAL]
pkstr = self._other_values[FN_SRCPKSTR]
# 'ifconfig.get_progress_record' expects pkstr to be None if it's
# empty
if not pkstr:
pkstr = None
if self._report_every and self._global_recnum % self._report_every == 0: # noqa
# total number of records in table
totalcount = self._ifconfig.get_count()
log.info(
"Processing {db}.{t}.{c}, PK: {pkf}={pkv} "
"(record {g_recnum}/{totalcount})".format(
db=self._other_values[FN_SRCDB],
t=self._other_values[FN_SRCTABLE],
c=self._other_values[FN_SRCFIELD],
pkf=self._other_values[FN_SRCPKFIELD],
pkv=pkstr if pkstr else pkval,
g_recnum=self._global_recnum,
totalcount=totalcount
)
)
def _send_request(self) -> None:
"""
Send a pending request to the remote NLP server.
Update the state afterwards.
"""
self._request.send_process_request(
queue=self._queue,
cookies=self._cookies,
include_text_in_reply=self._crinfo.cloudcfg.has_gate_processors
)
# If there's a connection error, we only get this far if we
# didn't choose to stop at failure
if self._request.request_failed:
log.warning("Continuing after failed request.")
else:
if self._request.cookies:
self._cookies = self._request.cookies
log.info(f"Sent request to be processed: #{self._request_count} "
f"of this block")
self._request_count += 1
self._requests.append(self._request)
if self._record_limit_reached():
self._state = self.State.FINISHED
return
self._state = self.State.BUILDING_REQUEST
self._need_new_request = True
| gpl-3.0 | -5,249,729,148,744,573,000 | 34.75 | 93 | 0.565851 | false |
tchellomello/home-assistant | homeassistant/components/vesync/switch.py | 1 | 3309 | """Support for VeSync switches."""
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .common import VeSyncDevice
from .const import DOMAIN, VS_DISCOVERY, VS_DISPATCHERS, VS_SWITCHES
_LOGGER = logging.getLogger(__name__)
DEV_TYPE_TO_HA = {
"wifi-switch-1.3": "outlet",
"ESW03-USA": "outlet",
"ESW01-EU": "outlet",
"ESW15-USA": "outlet",
"ESWL01": "switch",
"ESWL03": "switch",
"ESO15-TB": "outlet",
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches."""
async def async_discover(devices):
"""Add new devices to platform."""
_async_setup_entities(devices, async_add_entities)
disp = async_dispatcher_connect(
hass, VS_DISCOVERY.format(VS_SWITCHES), async_discover
)
hass.data[DOMAIN][VS_DISPATCHERS].append(disp)
_async_setup_entities(hass.data[DOMAIN][VS_SWITCHES], async_add_entities)
return True
@callback
def _async_setup_entities(devices, async_add_entities):
"""Check if device is online and add entity."""
dev_list = []
for dev in devices:
if DEV_TYPE_TO_HA.get(dev.device_type) == "outlet":
dev_list.append(VeSyncSwitchHA(dev))
elif DEV_TYPE_TO_HA.get(dev.device_type) == "switch":
dev_list.append(VeSyncLightSwitch(dev))
else:
_LOGGER.warning(
"%s - Unknown device type - %s", dev.device_name, dev.device_type
)
continue
async_add_entities(dev_list, update_before_add=True)
class VeSyncBaseSwitch(VeSyncDevice, SwitchEntity):
"""Base class for VeSync switch Device Representations."""
def turn_on(self, **kwargs):
"""Turn the device on."""
self.device.turn_on()
class VeSyncSwitchHA(VeSyncBaseSwitch, SwitchEntity):
"""Representation of a VeSync switch."""
def __init__(self, plug):
"""Initialize the VeSync switch device."""
super().__init__(plug)
self.smartplug = plug
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
if hasattr(self.smartplug, "weekly_energy_total"):
attr["voltage"] = self.smartplug.voltage
attr["weekly_energy_total"] = self.smartplug.weekly_energy_total
attr["monthly_energy_total"] = self.smartplug.monthly_energy_total
attr["yearly_energy_total"] = self.smartplug.yearly_energy_total
return attr
@property
def current_power_w(self):
"""Return the current power usage in W."""
return self.smartplug.power
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return self.smartplug.energy_today
def update(self):
"""Update outlet details and energy usage."""
self.smartplug.update()
self.smartplug.update_energy()
class VeSyncLightSwitch(VeSyncBaseSwitch, SwitchEntity):
"""Handle representation of VeSync Light Switch."""
def __init__(self, switch):
"""Initialize Light Switch device class."""
super().__init__(switch)
self.switch = switch
| apache-2.0 | -3,332,572,542,167,740,000 | 29.925234 | 81 | 0.642188 | false |
openqt/algorithms | projecteuler/pe368-a-kempner-like-series.py | 1 | 1219 | #!/usr/bin/env python
# coding=utf-8
"""368. A Kempner-like series
https://projecteuler.net/problem=368
The **harmonic series** $1 + \dfrac{1}{2} + \dfrac{1}{3} + \dfrac{1}{4} + ...$
is well known to be divergent.
If we however omit from this series every term where the denominator has a 9
in it, the series remarkably enough converges to approximately 22.9206766193.
This modified harmonic series is called the **Kempner** series.
Let us now consider another modified harmonic series by omitting from the
harmonic series every term where the denominator has 3 or more equal
consecutive digits. One can verify that out of the first 1200 terms of the
harmonic series, only 20 terms will be omitted.
These 20 omitted terms are:
$$\dfrac{1}{111}, \dfrac{1}{222}, \dfrac{1}{333}, \dfrac{1}{444},
\dfrac{1}{555}, \dfrac{1}{666}, \dfrac{1}{777}, \dfrac{1}{888},
\dfrac{1}{999}, \dfrac{1}{1000}, \dfrac{1}{1110}, \\\\\ \dfrac{1}{1111},
\dfrac{1}{1112}, \dfrac{1}{1113}, \dfrac{1}{1114}, \dfrac{1}{1115},
\dfrac{1}{1116}, \dfrac{1}{1117}, \dfrac{1}{1118}, \dfrac{1}{1119}$$
This series converges as well.
Find the value the series converges to.
Give your answer rounded to 10 digits behind the decimal point.
"""
| gpl-3.0 | 5,967,106,988,400,226,000 | 39.633333 | 79 | 0.701395 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.