repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
pombredanne/django-avocado | avocado/modeltree.py | 1 | 22222 | import inspect
from django.db import models
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
from avocado.conf import settings
__all__ = ('ModelTree',)
DEFAULT_MODELTREE_ALIAS = 'default'
class ModelTreeNode(object):
def __init__(self, model, parent=None, rel_type=None, rel_reversed=None,
related_name=None, accessor_name=None, depth=0):
"""Defines attributes of a `model' and the relationship to the parent
model.
`name' - the `model's class name
`db_table' - the model's database table name
`pk_field' - the model's primary key field
`parent' - a reference to the parent ModelTreeNode
`parent_model' - a reference to the `parent' model, since it may be
None
`rel_type' - denotes the _kind_ of relationship with the
following possibilities: 'manytomany', 'onetoone', or 'foreignkey'.
`rel_reversed' - denotes whether this node was derived from a
forward relationship (an attribute lives on the parent model) or
a reverse relationship (an attribute lives on this model).
`related_name' - is the query string representation which is used
when querying via the ORM.
`accessor_name' - can be used when accessing the model object's
attributes e.g. getattr(obj, accessor_name). this is relative to
the parent model.
`depth' - the depth of this node relative to the root (zero-based
index)
`children' - a list containing the child nodes
"""
self.model = model
self.name = model.__name__
self.db_table = model._meta.db_table
self.pk_field = model._meta.pk.column
self.parent = parent
self.parent_model = parent and parent.model or None
self.rel_type = rel_type
self.rel_reversed = rel_reversed
self.related_name = related_name
self.accessor_name = accessor_name
self.depth = depth
self.children = []
def __str__(self):
return '%s via %s' % (self.name, self.parent_model.__name__)
def _get_m2m_db_table(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.m2m_db_table()
else:
return f.field.m2m_db_table()
m2m_db_table = property(_get_m2m_db_table)
def _get_m2m_field(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.m2m_column_name()
else:
return f.field.m2m_column_name()
m2m_field = property(_get_m2m_field)
def _get_m2m_reverse_field(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.m2m_reverse_name()
else:
return f.field.m2m_reverse_name()
m2m_reverse_field = property(_get_m2m_reverse_field)
def _get_foreignkey_field(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.column
else:
return f.field.column
foreignkey_field = property(_get_foreignkey_field)
def _get_join_connections(self):
"""Returns a list of connections that need to be added to a
QuerySet object that properly joins this model and the parent.
"""
if not hasattr(self, '_join_connections'):
connections = []
# setup initial FROM clause
connections.append((None, self.parent.db_table, None, None))
# setup two connections for m2m
if self.rel_type == 'manytomany':
c1 = (
self.parent.db_table,
self.m2m_db_table,
self.parent.pk_field,
self.rel_reversed and self.m2m_reverse_field or \
self.m2m_field,
)
c2 = (
self.m2m_db_table,
self.db_table,
self.rel_reversed and self.m2m_field or \
self.m2m_reverse_field,
self.pk_field,
)
connections.append(c1)
connections.append(c2)
else:
c1 = (
self.parent.db_table,
self.db_table,
self.rel_reversed and self.parent.pk_field or \
self.foreignkey_field,
self.rel_reversed and self.foreignkey_field or \
self.parent.pk_field,
)
connections.append(c1)
self._join_connections = connections
return self._join_connections
join_connections = property(_get_join_connections)
def remove_child(self, model):
for i, cnode in enumerate(self.children):
if cnode.model is model:
return self.children.pop(i)
class ModelTree(object):
"""A class to handle building and parsing a tree structure given a model.
`root_model' - the model of interest in which everything is relatively
defined
`exclude' - a list of models that are not to be added to the tree
"""
def __init__(self, root_model, exclude=(), routes=()):
self.root_model = self._get_model(root_model)
self.exclude = map(self._get_model, exclude)
self._rts, self._tos = self._build_routes(routes)
self._tree_hash = {}
def check(self, queryset):
if queryset.model is self.root_model:
return True
return False
def _get_model(self, label):
# model class
if inspect.isclass(label) and issubclass(label, models.Model):
return label
# passed as a label string
elif isinstance(label, basestring):
app_label, model_label = label.lower().split('.')
model = models.get_model(app_label, model_label)
if model:
return model
raise TypeError, 'model "%s" could not be found' % label
def _build_routes(self, routes):
"""
Routes provide a means of specifying JOINs between two tables.
The minimum information necessary to define an explicit JOIN is as
follows:
'from_label' - defines the model on the right side of the join
'to_label' - defines the model on the left side of the join
'join_field' - defines the field in which the join will occur
'symmetrical' - defines whether the same join will be constructed
if the 'from_model' and 'to_model' are reversed
"""
rts = {}
tos = {}
for route in routes:
# unpack
from_label, to_label, join_field, symmetrical = route
# get models
from_model = self._get_model(from_label)
to_model = self._get_model(to_label)
# get field
if join_field is not None:
model_name, field_name = join_field.split('.')
model_name = model_name.lower()
if model_name == from_model.__name__.lower():
field = from_model._meta.get_field_by_name(field_name)[0]
elif model_name == to_model.__name__.lower():
field = to_model._meta.get_field_by_name(field_name)[0]
else:
raise TypeError, 'model for join_field, "%s", does not match' % field_name
if field is None:
raise TypeError, 'field "%s" not found'
else:
field = None
if field:
rts[(from_model, to_model)] = field
if symmetrical:
rts[(to_model, from_model)] = field
else:
tos[to_model] = from_model
return rts, tos
def _filter_one2one(self, field):
"""Tests if this field is a OneToOneField. If a route exists for this
field's model and it's target model, ensure this is the field that
should be used to join the the two tables.
"""
if isinstance(field, models.OneToOneField):
# route has been defined with a specific field required
tup = (field.model, field.rel.to)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return field
def _filter_related_one2one(self, rel):
"""Tests if this RelatedObject represents a OneToOneField. If a route
exists for this field's model and it's target model, ensure this is
the field that should be used to join the the two tables.
"""
field = rel.field
if isinstance(field, models.OneToOneField):
# route has been defined with a specific field required
tup = (rel.model, field.model)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return rel
def _filter_fk(self, field):
"""Tests if this field is a ForeignKey. If a route exists for this
field's model and it's target model, ensure this is the field that
should be used to join the the two tables.
"""
if isinstance(field, models.ForeignKey):
# route has been defined with a specific field required
tup = (field.model, field.rel.to)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return field
def _filter_related_fk(self, rel):
"""Tests if this RelatedObject represents a ForeignKey. If a route
exists for this field's model and it's target model, ensure this is
the field that should be used to join the the two tables.
"""
field = rel.field
if isinstance(field, models.ForeignKey):
# route has been defined with a specific field required
tup = (rel.model, field.model)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return rel
def _filter_m2m(self, field):
"""Tests if this field is a ManyToManyField. If a route exists for this
field's model and it's target model, ensure this is the field that
should be used to join the the two tables.
"""
if isinstance(field, models.ManyToManyField):
# route has been defined with a specific field required
tup = (field.model, field.rel.to)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return field
def _filter_related_m2m(self, rel):
"""Tests if this RelatedObject represents a ManyToManyField. If a route
exists for this field's model and it's target model, ensure this is
the field that should be used to join the the two tables.
"""
field = rel.field
if isinstance(field, models.ManyToManyField):
# route has been defined with a specific field required
tup = (rel.model, field.model)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return rel
def _add_node(self, parent, model, rel_type, rel_reversed, related_name,
accessor_name, depth):
"""Adds a node to the tree only if a node of the same `model' does not
already exist in the tree with smaller depth. If the node is added, the
tree traversal continues finding the node's relations.
Conditions in which the node will fail to be added:
- the model is excluded completely
- the model is going back the same path it came from
- the model is circling back to the root_model
- the model does not come from the parent.model (via _tos)
"""
exclude = set(self.exclude + [parent.parent_model, self.root_model])
# ignore excluded models and prevent circular paths
if model in exclude:
return
# if a route exists, only allow the model to be added if coming from the
# specified parent.model
if self._tos.has_key(model) and self._tos.get(model) is not parent.model:
return
node_hash = self._tree_hash.get(model, None)
# don't add node if a path with a shorter depth exists. this is applied
# after the correct join has been determined. generally if a route is
# defined for relation, this will never be an issue since there would
# only be one path available. if a route is not defined, the shorter
# path will be found
if not node_hash or node_hash['depth'] > depth:
if node_hash:
node_hash['parent'].remove_child(model)
node = ModelTreeNode(model, parent, rel_type, rel_reversed,
related_name, accessor_name, depth)
self._tree_hash[model] = {'parent': parent, 'depth': depth,
'node': node}
node = self._find_relations(node, depth)
parent.children.append(node)
del node
def _find_relations(self, node, depth=0):
"""Finds all relations given a node.
NOTE: the many-to-many relations are evaluated first to prevent
'through' models being bound as a ForeignKey relationship.
"""
depth += 1
model = node.model
# determine relational fields to determine paths
forward_fields = model._meta.fields
reverse_fields = model._meta.get_all_related_objects()
forward_o2o = filter(self._filter_one2one, forward_fields)
reverse_o2o = filter(self._filter_related_one2one, reverse_fields)
forward_fk = filter(self._filter_fk, forward_fields)
reverse_fk = filter(self._filter_related_fk, reverse_fields)
forward_m2m = filter(self._filter_m2m, model._meta.many_to_many)
reverse_m2m = filter(self._filter_related_m2m, model._meta.get_all_related_many_to_many_objects())
# iterate m2m relations
for f in forward_m2m:
kwargs = {
'parent': node,
'model': f.rel.to,
'rel_type': 'manytomany',
'rel_reversed': False,
'related_name': f.name,
'accessor_name': f.name,
'depth': depth,
}
self._add_node(**kwargs)
# iterate over related m2m fields
for r in reverse_m2m:
kwargs = {
'parent': node,
'model': r.model,
'rel_type': 'manytomany',
'rel_reversed': True,
'related_name': r.field.related_query_name(),
'accessor_name': r.get_accessor_name(),
'depth': depth,
}
self._add_node(**kwargs)
# iterate over one2one fields
for f in forward_o2o:
kwargs = {
'parent': node,
'model': f.rel.to,
'rel_type': 'onetoone',
'rel_reversed': False,
'related_name': f.name,
'accessor_name': f.name,
'depth': depth,
}
self._add_node(**kwargs)
# iterate over related one2one fields
for r in reverse_o2o:
kwargs = {
'parent': node,
'model': r.model,
'rel_type': 'onetoone',
'rel_reversed': True,
'related_name': r.field.related_query_name(),
'accessor_name': r.get_accessor_name(),
'depth': depth,
}
self._add_node(**kwargs)
# iterate over fk fields
for f in forward_fk:
kwargs = {
'parent': node,
'model': f.rel.to,
'rel_type': 'foreignkey',
'rel_reversed': False,
'related_name': f.name,
'accessor_name': f.name,
'depth': depth,
}
self._add_node(**kwargs)
# iterate over related foreign keys
for r in reverse_fk:
kwargs = {
'parent': node,
'model': r.model,
'rel_type': 'foreignkey',
'rel_reversed': True,
'related_name': r.field.related_query_name(),
'accessor_name': r.get_accessor_name(),
'depth': depth,
}
self._add_node(**kwargs)
return node
def _get_root_node(self):
"Sets the `root_node' and implicitly builds the entire tree."
if not hasattr(self, '_root_node'):
node = ModelTreeNode(self.root_model)
self._root_node = self._find_relations(node)
self._tree_hash[self.root_model] = {'parent': None, 'depth': 0,
'node': self._root_node}
return self._root_node
root_node = property(_get_root_node)
def _find_path(self, model, node, node_path=[]):
if node.model == model:
return node_path
for cnode in node.children:
mpath = self._find_path(model, cnode, node_path + [cnode])
if mpath:
return mpath
def path_to(self, model):
"Returns a list of nodes thats defines the path of traversal."
model = self._get_model(model)
return self._find_path(model, self.root_node)
def path_to_with_root(self, model):
"""Returns a list of nodes thats defines the path of traversal
including the root node.
"""
model = self._get_model(model)
return self._find_path(model, self.root_node, [self.root_node])
def get_node_by_model(self, model):
"Finds the node with the specified model."
model = self._get_model(model)
if not self._tree_hash:
self.root_node
val = self._tree_hash.get(model, None)
if val is None:
return
return val['node']
def query_string(self, node_path, field_name, operator=None):
"Returns a query string given a path"
toks = [n.related_name for n in node_path] + [field_name]
if operator is not None:
toks.append(operator)
return str('__'.join(toks))
def q(self, node_path, field_name, value, operator=None):
"Returns a Q object."
key = self.query_string(node_path, field_name, operator)
return Q(**{key: value})
def accessor_names(self, node_path):
"""Returns a list of the accessor names given a list of nodes. This is
most useful when needing to dynamically access attributes starting from
an instance of the `root_node' object.
"""
return [n.accessor_name for n in node_path]
def get_all_join_connections(self, node_path):
"""Returns a list of JOIN connections that can be manually applied to a
QuerySet object, e.g.:
queryset = SomeModel.objects.all()
modeltree = ModelTree(SomeModel)
nodes = modeltree.path_to(SomeOtherModel)
conns = modeltree.get_all_join_connections(nodes)
for c in conns:
queryset.query.join(c, promote=True)
This allows for the ORM to handle setting up the JOINs which may be
different depending the QuerySet being altered.
"""
connections = []
for i,node in enumerate(node_path):
if i == 0:
connections.extend(node.join_connections)
else:
connections.extend(node.join_connections[1:])
return connections
def add_joins(self, model, queryset, **kwargs):
model = self._get_model(model)
clone = queryset._clone()
nodes = self.path_to(model)
conns = self.get_all_join_connections(nodes)
for c in conns:
clone.query.join(c, **kwargs)
return clone
def print_path(self, node=None, depth=0):
"Traverses the entire tree and prints a hierarchical view to stdout."
if node is None:
node = self.root_node
if node:
print '- ' * depth * 2, '"%s"' % node.name, 'at a depth of', node.depth
if node.children:
depth += 1
for x in node.children:
self.print_path(x, depth)
def get_accessor_pairs(self, node_path):
"Used for testing purposes."
accessor_names = self.accessor_names(node_path)
node_path = node_path[:-1] # don't need the last item
if len(node_path) == 0 or node_path[0] is not self.root_node:
node_path = [self.root_node] + node_path
else:
accessor_names = accessor_names[1:]
return zip(node_path, accessor_names)
def get_queryset(self):
"Returns a QuerySet relative to the ``root_model``."
return self.root_model.objects.all()
class LazyModelTree(object):
def __init__(self, modeltrees):
self.modeltrees = modeltrees
self._modeltrees = {}
def __getitem__(self, alias):
if not self.modeltrees:
raise ImproperlyConfigured, 'You must at least specify the "%s" ' \
'modeltree config' % DEFAULT_MODELTREE_ALIAS
if alias not in self._modeltrees:
try:
kwargs = self.modeltrees[alias]
except KeyError:
raise KeyError, 'No modeltree settings defined for "%s"' % alias
self._modeltrees[alias] = ModelTree(**kwargs)
return self._modeltrees[alias]
trees = LazyModelTree(settings.MODELTREES)
| bsd-3-clause | -7,547,567,735,134,848,000 | 36.160535 | 106 | 0.563451 | false |
chemiron/aiopool | aiopool/fork.py | 1 | 6082 | import asyncio
import logging
import os
import signal
from struct import Struct
import time
from .base import (WorkerProcess, ChildProcess,
IDLE_CHECK, IDLE_TIME)
MSG_HEAD = 0x0
MSG_PING = 0x1
MSG_PONG = 0x2
MSG_CLOSE = 0x3
PACK_MSG = Struct('!BB').pack
UNPACK_MSG = Struct('!BB').unpack
logger = logging.getLogger(__name__)
class ConnectionClosedError(Exception):
pass
@asyncio.coroutine
def connect_write_pipe(file):
loop = asyncio.get_event_loop()
transport, _ = yield from loop.connect_write_pipe(asyncio.Protocol, file)
return PipeWriter(transport)
@asyncio.coroutine
def connect_read_pipe(file):
loop = asyncio.get_event_loop()
pipe_reader = PipeReader(loop=loop)
transport, _ = yield from loop.connect_read_pipe(
lambda: PipeReadProtocol(pipe_reader), file)
pipe_reader.transport = transport
return pipe_reader
class PipeWriter:
def __init__(self, transport):
self.transport = transport
def _send(self, msg):
self.transport.write(PACK_MSG(MSG_HEAD, msg))
def ping(self):
self._send(MSG_PING)
def pong(self):
self._send(MSG_PONG)
def stop(self):
self._send(MSG_CLOSE)
def close(self):
if self.transport is not None:
self.transport.close()
class PipeReadProtocol(asyncio.Protocol):
def __init__(self, reader):
self.reader = reader
def data_received(self, data):
self.reader.feed(data)
def connection_lost(self, exc):
self.reader.close()
class PipeReader:
closed = False
transport = None
def __init__(self, loop):
self.loop = loop
self._waiters = asyncio.Queue()
def close(self):
self.closed = True
while not self._waiters.empty():
waiter = self._waiters.get_nowait()
if not waiter.done():
waiter.set_exception(ConnectionClosedError())
if self.transport is not None:
self.transport.close()
def feed(self, data):
asyncio.async(self._feed_waiter(data))
@asyncio.coroutine
def _feed_waiter(self, data):
waiter = yield from self._waiters.get()
waiter.set_result(data)
@asyncio.coroutine
def read(self):
if self.closed:
raise ConnectionClosedError()
waiter = asyncio.Future(loop=self.loop)
yield from self._waiters.put(waiter)
data = yield from waiter
hdr, msg = UNPACK_MSG(data)
if hdr == MSG_HEAD:
return msg
class ForkChild(ChildProcess):
_heartbeat_task = None
def __init__(self, parent_read, parent_write, loader, **options):
ChildProcess.__init__(self, loader, **options)
self.parent_read = parent_read
self.parent_write = parent_write
@asyncio.coroutine
def on_start(self):
self._heartbeat_task = asyncio.Task(self.heartbeat())
def stop(self):
if self._heartbeat_task is not None:
self._heartbeat_task.cancel()
ChildProcess.stop(self)
@asyncio.coroutine
def heartbeat(self):
# setup pipes
reader = yield from connect_read_pipe(
os.fdopen(self.parent_read, 'rb'))
writer = yield from connect_write_pipe(
os.fdopen(self.parent_write, 'wb'))
while True:
try:
msg = yield from reader.read()
except ConnectionClosedError:
logger.info('Parent is dead, {} stopping...'
''.format(os.getpid()))
break
if msg == MSG_PING:
writer.pong()
elif msg.tp == MSG_CLOSE:
break
reader.close()
writer.close()
self.stop()
class ForkWorker(WorkerProcess):
pid = ping = None
reader = writer = None
chat_task = heartbeat_task = None
def start_child(self):
parent_read, child_write = os.pipe()
child_read, parent_write = os.pipe()
pid = os.fork()
if pid:
# parent
os.close(parent_read)
os.close(parent_write)
asyncio.async(self.connect(pid, child_write, child_read))
else:
# child
os.close(child_write)
os.close(child_read)
# cleanup after fork
asyncio.set_event_loop(None)
# setup process
process = ForkChild(parent_read, parent_write, self.loader)
process.start()
def kill_child(self):
self.chat_task.cancel()
self.heartbeat_task.cancel()
self.reader.close()
self.writer.close()
try:
os.kill(self.pid, signal.SIGTERM)
os.waitpid(self.pid, 0)
except ProcessLookupError:
pass
@asyncio.coroutine
def heartbeat(self, writer):
idle_time = self.options.get('idle_time', IDLE_TIME)
idle_check = self.options.get('idle_check', IDLE_CHECK)
while True:
yield from asyncio.sleep(idle_check)
if (time.monotonic() - self.ping) < idle_time:
writer.ping()
else:
self.restart()
return
@asyncio.coroutine
def chat(self, reader):
while True:
try:
msg = yield from reader.read()
except ConnectionClosedError:
self.restart()
return
if msg == MSG_PONG:
self.ping = time.monotonic()
@asyncio.coroutine
def connect(self, pid, up_write, down_read):
# setup pipes
reader = yield from connect_read_pipe(
os.fdopen(down_read, 'rb'))
writer = yield from connect_write_pipe(
os.fdopen(up_write, 'wb'))
# store info
self.pid = pid
self.ping = time.monotonic()
self.reader = reader
self.writer = writer
self.chat_task = asyncio.Task(self.chat(reader))
self.heartbeat_task = asyncio.Task(self.heartbeat(writer))
| mit | 982,928,408,585,555,200 | 24.447699 | 77 | 0.57366 | false |
hypebeast/etapi | etapi/utils.py | 1 | 1765 | # -*- coding: utf-8 -*-
'''Helper utilities and decorators.'''
import time
from flask import flash
def flash_errors(form, category="warning"):
'''Flash all errors for a form.'''
for field, errors in form.errors.items():
for error in errors:
flash("{0} - {1}"
.format(getattr(form, field).label.text, error), category)
def pretty_date(dt, default=None):
"""
Returns string representing "time since" e.g.
3 days ago, 5 hours ago etc.
Ref: https://bitbucket.org/danjac/newsmeme/src/a281babb9ca3/newsmeme/
"""
if default is None:
default = 'just now'
now = datetime.utcnow()
diff = now - dt
periods = (
(diff.days / 365, 'year', 'years'),
(diff.days / 30, 'month', 'months'),
(diff.days / 7, 'week', 'weeks'),
(diff.days, 'day', 'days'),
(diff.seconds / 3600, 'hour', 'hours'),
(diff.seconds / 60, 'minute', 'minutes'),
(diff.seconds, 'second', 'seconds'),
)
for period, singular, plural in periods:
if not period:
continue
if period == 1:
return u'%d %s ago' % (period, singular)
else:
return u'%d %s ago' % (period, plural)
return default
def pretty_seconds_to_hhmmss(seconds):
if not seconds:
return None
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%d h %d m %s s" % (h, m, s)
def pretty_seconds_to_hhmm(seconds):
if not seconds:
return None
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%d h %d m" % (h, m)
def pretty_seconds_to_hh(seconds):
if not seconds:
return None
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%d h" % (h)
| bsd-3-clause | -7,001,446,943,259,879,000 | 24.955882 | 78 | 0.549575 | false |
DBeath/flask-feedrsub | tests/period_test.py | 1 | 1488 | from datetime import datetime
from dateutil.relativedelta import relativedelta
from feedrsub.database import db
from feedrsub.models.period import PERIOD, Period
from feedrsub.models.populate_db import populate_periods
def test_populate_periods(session):
populate_periods()
daily = Period.query.filter_by(name=PERIOD.DAILY).first()
assert daily.name == PERIOD.DAILY
immediate = Period.query.filter_by(name=PERIOD.IMMEDIATE).first()
assert immediate.name == PERIOD.IMMEDIATE
weekly = Period.query.filter_by(name=PERIOD.WEEKLY).first()
assert weekly.name == PERIOD.WEEKLY
monthly = Period.query.filter_by(name=PERIOD.MONTHLY).first()
assert monthly.name == PERIOD.MONTHLY
def test_period_creation(session):
period_desc = "A Yearly period"
period_name = "YEARLY"
period = Period(period_name, period_desc)
db.session.add(period)
db.session.commit()
yearly = Period.query.filter_by(name=period_name).first()
assert yearly.name == period_name
assert yearly.description == period_desc
def test_get_from_date_with_name(session):
now = datetime.utcnow()
past = now - relativedelta(days=1)
from_date = Period.get_from_date(PERIOD.DAILY, now)
assert from_date == past
def test_get_from_date_with_period(session):
now = datetime.utcnow()
past = now - relativedelta(days=1)
period = Period(name=PERIOD.DAILY)
from_date = Period.get_from_date(period, now)
assert from_date == past
| mit | 7,589,990,972,609,637,000 | 27.615385 | 69 | 0.715054 | false |
nigelb/Static-UPnP | examples/Chromecast/StaticUPnP_StaticServices.py | 1 | 3345 | # static_upnp responds to upnp search requests with statically configures responses.
# Copyright (C) 2016 NigelB
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import socket
from dnslib import DNSQuestion, QTYPE
from static_upnp.chromecast_helpers import get_chromecast_uuid, get_date, get_chromecast_mdns_response
from static_upnp.chromecast_helpers import get_service_descriptor, get_chromecast_friendly_name
from static_upnp.mDNS import StaticMDNDService
from static_upnp.static import StaticService
OK = """HTTP/1.1 200 OK
CACHE-CONTROL: max-age={max_age}
DATE: {date}
EXT:
LOCATION: http://{ip}:{port}/ssdp/device-desc.xml
OPT: "http://schemas.upnp.org/upnp/1/0/"; ns=01
01-NLS: 161d2e68-1dd2-11b2-9fd5-f9d9dc2ad10b
SERVER: Linux/3.8.13+, UPnP/1.0, Portable SDK for UPnP devices/1.6.18
X-User-Agent: redsonic
ST: {st}
USN: {usn}
BOOTID.UPNP.ORG: 4
CONFIGID.UPNP.ORG: 2
"""
NOTIFY = """NOTIFY * HTTP/1.1
HOST: 239.255.255.250:1900
CACHE-CONTROL: max-age=1800
LOCATION: http://{ip}:{port}/ssdp/device-desc.xml
NT: {st}
NTS: {nts}
OPT: "http://schemas.upnp.org/upnp/1/0/"; ns=01
01-NLS: 161d2e68-1dd2-11b2-9fd5-f9d9dc2ad10b
SERVER: Linux/3.8.13+, UPnP/1.0, Portable SDK for UPnP devices/1.6.18
X-User-Agent: redsonic
USN: {uuid}
"""
chromecast_ip = socket.gethostbyname_ex("Chromecast")[2][0]
chromecast_port = 8008
chromecast_service_descriptor = get_service_descriptor(chromecast_ip, chromecast_port)
chromecast_uuid = get_chromecast_uuid(chromecast_service_descriptor)
chromecast_friendly_name = get_chromecast_friendly_name(chromecast_service_descriptor)
chromecast_bs = "XXXXXXXXXXXX"
chromecast_cd = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
services = [
StaticService({
"ip": chromecast_ip,
"port": chromecast_port,
"uuid": chromecast_uuid,
"max_age": "1800",
"date": get_date
}, 1024,
OK=OK,
NOTIFY=NOTIFY,
services=[
{
"st": "upnp:rootdevice",
"usn": "uuid:{uuid}::{st}"
},
{
"st": "uuid:{uuid}",
"usn": "uuid:{uuid}"
},
{
"st": "urn:dial-multiscreen-org:device:dial:1",
"usn": "uuid:{uuid}::{st}"
},
{
"st": "urn:dial-multiscreen-org:service:dial:1",
"usn": "uuid:{uuid}::{st}"
},
])
]
mdns_services=[StaticMDNDService(
response_generator=lambda query: get_chromecast_mdns_response(query, chromecast_ip, chromecast_uuid, chromecast_friendly_name, chromecast_bs, chromecast_cd),
dns_question=DNSQuestion(qname="_googlecast._tcp.local", qtype=QTYPE.PTR, qclass=32769)
)]
| gpl-2.0 | 871,185,580,901,961,200 | 31.794118 | 161 | 0.676233 | false |
ponty/MyElectronicProjects | pavement.py | 1 | 1718 | from easyprocess import Proc
from paver.easy import *
import paver.doctools
import paver.virtual
import paver.misctasks
from paved import *
from paved.dist import *
from paved.util import *
from paved.docs import *
from paved.pycheck import *
from paved.pkg import *
options(
sphinx=Bunch(
docroot='docs',
builddir="_build",
),
# pdf=Bunch(
# builddir='_build',
# builder='latex',
# ),
)
options.paved.clean.rmdirs += ['.tox',
'dist',
'build',
]
options.paved.clean.patterns += ['*.pickle',
'*.doctree',
'*.gz',
'nosetests.xml',
'sloccount.sc',
'*.pdf', '*.tex',
'*_sch_*.png',
'*_brd_*.png',
'*.b#*', '*.s#*', # eagle
#'*.pro',
'*.hex',
'*.zip',
'distribute_setup.py',
'*.bak',
# kicad
'$savepcb.brd',
'*.erc',
'*.000',
]
options.paved.dist.manifest.include.remove('distribute_setup.py')
options.paved.dist.manifest.include.remove('paver-minilib.zip')
@task
@needs(
# 'clean',
'cog',
'html',
'pdf',
)
def alltest():
'all tasks to check'
pass
| bsd-2-clause | 2,015,404,359,761,255,000 | 25.430769 | 65 | 0.360885 | false |
GoogleCloudPlatform/python-docs-samples | appengine/standard/endpoints-frameworks-v2/quickstart/main_test.py | 1 | 1894 | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from endpoints import message_types
import mock
import main
def test_list_greetings(testbed):
api = main.GreetingApi()
response = api.list_greetings(message_types.VoidMessage())
assert len(response.items) == 2
def test_get_greeting(testbed):
api = main.GreetingApi()
request = main.GreetingApi.get_greeting.remote.request_type(id=1)
response = api.get_greeting(request)
assert response.message == 'goodbye world!'
def test_multiply_greeting(testbed):
api = main.GreetingApi()
request = main.GreetingApi.multiply_greeting.remote.request_type(
times=4,
message='help I\'m trapped in a test case.')
response = api.multiply_greeting(request)
assert response.message == 'help I\'m trapped in a test case.' * 4
def test_authed_greet(testbed):
api = main.AuthedGreetingApi()
with mock.patch('main.endpoints.get_current_user') as user_mock:
user_mock.return_value = None
response = api.greet(message_types.VoidMessage())
assert response.message == 'Hello, Anonymous'
user_mock.return_value = mock.Mock()
user_mock.return_value.email.return_value = '[email protected]'
response = api.greet(message_types.VoidMessage())
assert response.message == 'Hello, [email protected]'
| apache-2.0 | -4,486,417,427,585,140,700 | 34.074074 | 74 | 0.712777 | false |
OSU-CS-325/Project_Two_Coin_Change | run-files/analysisQ7.py | 1 | 2957 | import sys
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import random
import datetime
# Import the three change making algorithms
sys.path.insert(0, "../divide-conquer/")
sys.path.insert(0, "../dynamic-programming")
sys.path.insert(0, "../greedy")
from changeslow import changeslow
from changegreedy import changegreedy
from changedp import changedp
### QUESTION 7 ###
def Q7(slow, minChange, maxChange):
lenV = []
runtimeGreedy = []
runtimeDP = []
runtimeSlow = []
numExp = 10
maxRange = 1000
if (slow):
maxRange = 10 # some much smaller number
for i in range(1, maxRange): # V can be of length 1 to (maxRange - 1)
print "\n------ running V length=" + str(i) + "------"
lenV.append(i)
#print "lenV:", lenV
runtimeGreedy.append(0)
runtimeDP.append(0)
runtimeSlow.append(0)
for j in range(numExp): # run numExp experiments for this length of V
print "\n ---- running experiment=" + str(j + 1) + " ----"
coinArray = []
for k in range(i): # generate V of size i [1, rand, ..., rand, max=1 + 5*(maxRange-2)]
if (k == 0):
coinArray.append(1)
else:
randFrom = coinArray[len(coinArray) - 1] + 1
randTo = coinArray[len(coinArray) - 1] + 5
coinArray.append(random.randint(randFrom, randTo))
change = random.randint(minChange, maxChange)
#print " coinArray:", coinArray
#print " change:", change
print " running greedy..."
start = datetime.datetime.now()
_, _ = changegreedy(coinArray, change)
end = datetime.datetime.now()
delta = end - start
delta = int(delta.total_seconds() * 1000000)
print " " + str(delta)
runtimeGreedy[i - 1] += delta
print " running DP..."
start = datetime.datetime.now()
_, _ = changedp(coinArray, change)
end = datetime.datetime.now()
delta = end - start
delta = int(delta.total_seconds() * 1000000)
print " " + str(delta)
runtimeDP[i - 1] += delta
if (slow):
print " running slow..."
start = datetime.datetime.now()
_, _ = changeslow(coinArray, change)
end = datetime.datetime.now()
delta = end - start
delta = int(delta.total_seconds() * 1000000)
print " " + str(delta)
runtimeSlow[i - 1] += delta
runtimeGreedy[i - 1] /= numExp
runtimeDP[i - 1] /= numExp
if (slow):
runtimeSlow[i - 1] /= numExp
plt.figure(21)
plt.plot(lenV, runtimeGreedy, 'b-', linewidth=2.0, label='Greedy')
plt.plot(lenV, runtimeDP, 'r--', linewidth=2.0, label='DP')
if (slow):
plt.plot(lenV, runtimeSlow, 'g-.', linewidth=2.0, label='Slow')
plt.legend(loc='upper left')
plt.title('Runtime vs len(V[]) for randomized V[] and A')
plt.ylabel('Avg. Runtime (10^-6 sec)')
plt.xlabel('len(V[])')
plt.grid(True)
if (slow):
plt.savefig('img/Q7slow_runtime.png', bbox_inches='tight')
else:
plt.savefig('img/Q7_runtime.png', bbox_inches='tight')
def main():
Q7(False, 100, 100)
#Q7(True)
if __name__ == "__main__":
main()
| mit | 992,061,664,401,384,600 | 26.37963 | 89 | 0.631721 | false |
looker/sentry | tests/sentry/integrations/github/test_issues.py | 1 | 6535 | from __future__ import absolute_import
import responses
from mock import patch
from exam import fixture
from django.test import RequestFactory
from sentry.integrations.github.integration import GitHubIntegration
from sentry.models import Integration, ExternalIssue
from sentry.testutils import TestCase
from sentry.utils import json
class GitHubIssueBasicTest(TestCase):
@fixture
def request(self):
return RequestFactory()
def setUp(self):
self.user = self.create_user()
self.organization = self.create_organization(owner=self.user)
self.model = Integration.objects.create(
provider='github',
external_id='github_external_id',
name='getsentry',
)
self.model.add_organization(self.organization.id)
self.integration = GitHubIntegration(self.model, self.organization.id)
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_get_allowed_assignees(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.GET,
'https://api.github.com/repos/getsentry/sentry/assignees',
json=[{'login': 'MeredithAnya'}]
)
repo = 'getsentry/sentry'
assert self.integration.get_allowed_assignees(repo) == (
('', 'Unassigned'),
('MeredithAnya', 'MeredithAnya')
)
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_create_issue(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.POST,
'https://api.github.com/repos/getsentry/sentry/issues',
json={'number': 321, 'title': 'hello', 'body': 'This is the description'}
)
form_data = {
'repo': 'getsentry/sentry',
'title': 'hello',
'description': 'This is the description',
}
assert self.integration.create_issue(form_data) == {
'key': 321,
'description': 'This is the description',
'title': 'hello',
'repo': 'getsentry/sentry',
}
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
payload = json.loads(request.body)
assert payload == {'body': 'This is the description', 'assignee': None, 'title': 'hello'}
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_get_repo_issues(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.GET,
'https://api.github.com/repos/getsentry/sentry/issues',
json=[{'number': 321, 'title': 'hello', 'body': 'This is the description'}]
)
repo = 'getsentry/sentry'
assert self.integration.get_repo_issues(repo) == ((321, '#321 hello'),)
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_link_issue(self, mock_get_jwt):
issue_id = 321
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.GET,
'https://api.github.com/repos/getsentry/sentry/issues/321',
json={'number': issue_id, 'title': 'hello', 'body': 'This is the description'}
)
data = {
'repo': 'getsentry/sentry',
'externalIssue': issue_id,
'comment': 'hello',
}
assert self.integration.get_issue(issue_id, data=data) == {
'key': issue_id,
'description': 'This is the description',
'title': 'hello',
'repo': 'getsentry/sentry',
}
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def after_link_issue(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.POST,
'https://api.github.com/repos/getsentry/sentry/issues/321/comments',
json={'body': 'hello'}
)
data = {'comment': 'hello'}
external_issue = ExternalIssue.objects.create(
organization_id=self.organization.id,
integration_id=self.model.id,
key='hello#321',
)
self.integration.after_link_issue(external_issue, data=data)
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
payload = json.loads(request.body)
assert payload == {'body': 'hello'}
| bsd-3-clause | -1,895,879,553,764,790,500 | 35.305556 | 97 | 0.59847 | false |
mikedh/trimesh | trimesh/creation.py | 1 | 40606 | """
creation.py
--------------
Create meshes from primitives, or with operations.
"""
from .base import Trimesh
from .constants import log, tol
from .geometry import faces_to_edges, align_vectors, plane_transform
from . import util
from . import grouping
from . import triangles
from . import transformations as tf
import numpy as np
import collections
try:
# shapely is a soft dependency
from shapely.geometry import Polygon
from shapely.wkb import loads as load_wkb
except BaseException as E:
# shapely will sometimes raise OSErrors
# on import rather than just ImportError
from . import exceptions
# re-raise the exception when someone tries
# to use the module that they don't have
Polygon = exceptions.closure(E)
load_wkb = exceptions.closure(E)
def revolve(linestring,
angle=None,
sections=None,
transform=None,
**kwargs):
"""
Revolve a 2D line string around the 2D Y axis, with a result with
the 2D Y axis pointing along the 3D Z axis.
This function is intended to handle the complexity of indexing
and is intended to be used to create all radially symmetric primitives,
eventually including cylinders, annular cylinders, capsules, cones,
and UV spheres.
Note that if your linestring is closed, it needs to be counterclockwise
if you would like face winding and normals facing outwards.
Parameters
-------------
linestring : (n, 2) float
Lines in 2D which will be revolved
angle : None or float
Angle in radians to revolve curve by
sections : None or int
Number of sections result should have
If not specified default is 32 per revolution
transform : None or (4, 4) float
Transform to apply to mesh after construction
**kwargs : dict
Passed to Trimesh constructor
Returns
--------------
revolved : Trimesh
Mesh representing revolved result
"""
linestring = np.asanyarray(linestring, dtype=np.float64)
# linestring must be ordered 2D points
if len(linestring.shape) != 2 or linestring.shape[1] != 2:
raise ValueError('linestring must be 2D!')
if angle is None:
# default to closing the revolution
angle = np.pi * 2
closed = True
else:
# check passed angle value
closed = angle >= ((np.pi * 2) - 1e-8)
if sections is None:
# default to 32 sections for a full revolution
sections = int(angle / (np.pi * 2) * 32)
# change to face count
sections += 1
# create equally spaced angles
theta = np.linspace(0, angle, sections)
# 2D points around the revolution
points = np.column_stack((np.cos(theta), np.sin(theta)))
# how many points per slice
per = len(linestring)
# use the 2D X component as radius
radius = linestring[:, 0]
# use the 2D Y component as the height along revolution
height = linestring[:, 1]
# a lot of tiling to get our 3D vertices
vertices = np.column_stack((
np.tile(points, (1, per)).reshape((-1, 2)) *
np.tile(radius, len(points)).reshape((-1, 1)),
np.tile(height, len(points))))
if closed:
# should be a duplicate set of vertices
assert np.allclose(vertices[:per],
vertices[-per:])
# chop off duplicate vertices
vertices = vertices[:-per]
if transform is not None:
# apply transform to vertices
vertices = tf.transform_points(vertices, transform)
# how many slices of the pie
slices = len(theta) - 1
# start with a quad for every segment
# this is a superset which will then be reduced
quad = np.array([0, per, 1,
1, per, per + 1])
# stack the faces for a single slice of the revolution
single = np.tile(quad, per).reshape((-1, 3))
# `per` is basically the stride of the vertices
single += np.tile(np.arange(per), (2, 1)).T.reshape((-1, 1))
# remove any zero-area triangle
# this covers many cases without having to think too much
single = single[triangles.area(vertices[single]) > tol.merge]
# how much to offset each slice
# note arange multiplied by vertex stride
# but tiled by the number of faces we actually have
offset = np.tile(np.arange(slices) * per,
(len(single), 1)).T.reshape((-1, 1))
# stack a single slice into N slices
stacked = np.tile(single.ravel(), slices).reshape((-1, 3))
if tol.strict:
# make sure we didn't screw up stacking operation
assert np.allclose(stacked.reshape((-1, single.shape[0], 3)) - single, 0)
# offset stacked and wrap vertices
faces = (stacked + offset) % len(vertices)
# create the mesh from our vertices and faces
mesh = Trimesh(vertices=vertices, faces=faces,
**kwargs)
# strict checks run only in unit tests
if (tol.strict and
np.allclose(radius[[0, -1]], 0.0) or
np.allclose(linestring[0], linestring[-1])):
# if revolved curve starts and ends with zero radius
# it should really be a valid volume, unless the sign
# reversed on the input linestring
assert mesh.is_volume
return mesh
def extrude_polygon(polygon,
height,
transform=None,
triangle_args=None,
**kwargs):
"""
Extrude a 2D shapely polygon into a 3D mesh
Parameters
----------
polygon : shapely.geometry.Polygon
2D geometry to extrude
height : float
Distance to extrude polygon along Z
triangle_args : str or None
Passed to triangle
**kwargs:
passed to Trimesh
Returns
----------
mesh : trimesh.Trimesh
Resulting extrusion as watertight body
"""
# create a triangulation from the polygon
vertices, faces = triangulate_polygon(
polygon, triangle_args=triangle_args, **kwargs)
# extrude that triangulation along Z
mesh = extrude_triangulation(vertices=vertices,
faces=faces,
height=height,
transform=transform,
**kwargs)
return mesh
def sweep_polygon(polygon,
path,
angles=None,
**kwargs):
"""
Extrude a 2D shapely polygon into a 3D mesh along an
arbitrary 3D path. Doesn't handle sharp curvature well.
Parameters
----------
polygon : shapely.geometry.Polygon
Profile to sweep along path
path : (n, 3) float
A path in 3D
angles : (n,) float
Optional rotation angle relative to prior vertex
at each vertex
Returns
-------
mesh : trimesh.Trimesh
Geometry of result
"""
path = np.asanyarray(path, dtype=np.float64)
if not util.is_shape(path, (-1, 3)):
raise ValueError('Path must be (n, 3)!')
# Extract 2D vertices and triangulation
verts_2d = np.array(polygon.exterior)[:-1]
base_verts_2d, faces_2d = triangulate_polygon(polygon, **kwargs)
n = len(verts_2d)
# Create basis for first planar polygon cap
x, y, z = util.generate_basis(path[0] - path[1])
tf_mat = np.ones((4, 4))
tf_mat[:3, :3] = np.c_[x, y, z]
tf_mat[:3, 3] = path[0]
# Compute 3D locations of those vertices
verts_3d = np.c_[verts_2d, np.zeros(n)]
verts_3d = tf.transform_points(verts_3d, tf_mat)
base_verts_3d = np.c_[base_verts_2d,
np.zeros(len(base_verts_2d))]
base_verts_3d = tf.transform_points(base_verts_3d,
tf_mat)
# keep matching sequence of vertices and 0- indexed faces
vertices = [base_verts_3d]
faces = [faces_2d]
# Compute plane normals for each turn --
# each turn induces a plane halfway between the two vectors
v1s = util.unitize(path[1:-1] - path[:-2])
v2s = util.unitize(path[1:-1] - path[2:])
norms = np.cross(np.cross(v1s, v2s), v1s + v2s)
norms[(norms == 0.0).all(1)] = v1s[(norms == 0.0).all(1)]
norms = util.unitize(norms)
final_v1 = util.unitize(path[-1] - path[-2])
norms = np.vstack((norms, final_v1))
v1s = np.vstack((v1s, final_v1))
# Create all side walls by projecting the 3d vertices into each plane
# in succession
for i in range(len(norms)):
verts_3d_prev = verts_3d
# Rotate if needed
if angles is not None:
tf_mat = tf.rotation_matrix(angles[i],
norms[i],
path[i])
verts_3d_prev = tf.transform_points(verts_3d_prev,
tf_mat)
# Project vertices onto plane in 3D
ds = np.einsum('ij,j->i', (path[i + 1] - verts_3d_prev), norms[i])
ds = ds / np.dot(v1s[i], norms[i])
verts_3d_new = np.einsum('i,j->ij', ds, v1s[i]) + verts_3d_prev
# Add to face and vertex lists
new_faces = [[i + n, (i + 1) % n, i] for i in range(n)]
new_faces.extend([[(i - 1) % n + n, i + n, i] for i in range(n)])
# save faces and vertices into a sequence
faces.append(np.array(new_faces))
vertices.append(np.vstack((verts_3d, verts_3d_new)))
verts_3d = verts_3d_new
# do the main stack operation from a sequence to (n,3) arrays
# doing one vstack provides a substantial speedup by
# avoiding a bunch of temporary allocations
vertices, faces = util.append_faces(vertices, faces)
# Create final cap
x, y, z = util.generate_basis(path[-1] - path[-2])
vecs = verts_3d - path[-1]
coords = np.c_[np.einsum('ij,j->i', vecs, x),
np.einsum('ij,j->i', vecs, y)]
base_verts_2d, faces_2d = triangulate_polygon(Polygon(coords))
base_verts_3d = (np.einsum('i,j->ij', base_verts_2d[:, 0], x) +
np.einsum('i,j->ij', base_verts_2d[:, 1], y)) + path[-1]
faces = np.vstack((faces, faces_2d + len(vertices)))
vertices = np.vstack((vertices, base_verts_3d))
return Trimesh(vertices, faces)
def extrude_triangulation(vertices,
faces,
height,
transform=None,
**kwargs):
"""
Extrude a 2D triangulation into a watertight mesh.
Parameters
----------
vertices : (n, 2) float
2D vertices
faces : (m, 3) int
Triangle indexes of vertices
height : float
Distance to extrude triangulation
**kwargs : dict
Passed to Trimesh constructor
Returns
---------
mesh : trimesh.Trimesh
Mesh created from extrusion
"""
vertices = np.asanyarray(vertices, dtype=np.float64)
height = float(height)
faces = np.asanyarray(faces, dtype=np.int64)
if not util.is_shape(vertices, (-1, 2)):
raise ValueError('Vertices must be (n,2)')
if not util.is_shape(faces, (-1, 3)):
raise ValueError('Faces must be (n,3)')
if np.abs(height) < tol.merge:
raise ValueError('Height must be nonzero!')
# make sure triangulation winding is pointing up
normal_test = triangles.normals(
[util.stack_3D(vertices[faces[0]])])[0]
normal_dot = np.dot(normal_test,
[0.0, 0.0, np.sign(height)])[0]
# make sure the triangulation is aligned with the sign of
# the height we've been passed
if normal_dot < 0.0:
faces = np.fliplr(faces)
# stack the (n,3) faces into (3*n, 2) edges
edges = faces_to_edges(faces)
edges_sorted = np.sort(edges, axis=1)
# edges which only occur once are on the boundary of the polygon
# since the triangulation may have subdivided the boundary of the
# shapely polygon, we need to find it again
edges_unique = grouping.group_rows(
edges_sorted, require_count=1)
# (n, 2, 2) set of line segments (positions, not references)
boundary = vertices[edges[edges_unique]]
# we are creating two vertical triangles for every 2D line segment
# on the boundary of the 2D triangulation
vertical = np.tile(boundary.reshape((-1, 2)), 2).reshape((-1, 2))
vertical = np.column_stack((vertical,
np.tile([0, height, 0, height],
len(boundary))))
vertical_faces = np.tile([3, 1, 2, 2, 1, 0],
(len(boundary), 1))
vertical_faces += np.arange(len(boundary)).reshape((-1, 1)) * 4
vertical_faces = vertical_faces.reshape((-1, 3))
# stack the (n,2) vertices with zeros to make them (n, 3)
vertices_3D = util.stack_3D(vertices)
# a sequence of zero- indexed faces, which will then be appended
# with offsets to create the final mesh
faces_seq = [faces[:, ::-1],
faces.copy(),
vertical_faces]
vertices_seq = [vertices_3D,
vertices_3D.copy() + [0.0, 0, height],
vertical]
# append sequences into flat nicely indexed arrays
vertices, faces = util.append_faces(vertices_seq, faces_seq)
if transform is not None:
# apply transform here to avoid later bookkeeping
vertices = tf.transform_points(
vertices, transform)
# if the transform flips the winding flip faces back
# so that the normals will be facing outwards
if tf.flips_winding(transform):
# fliplr makes arrays non-contiguous
faces = np.ascontiguousarray(np.fliplr(faces))
# create mesh object with passed keywords
mesh = Trimesh(vertices=vertices,
faces=faces,
**kwargs)
# only check in strict mode (unit tests)
if tol.strict:
assert mesh.volume > 0.0
return mesh
def triangulate_polygon(polygon,
triangle_args=None,
engine=None,
**kwargs):
"""
Given a shapely polygon create a triangulation using a
python interface to `triangle.c` or mapbox-earcut.
> pip install triangle
> pip install mapbox_earcut
Parameters
---------
polygon : Shapely.geometry.Polygon
Polygon object to be triangulated
triangle_args : str or None
Passed to triangle.triangulate i.e: 'p', 'pq30'
engine : None or str
Any value other than 'earcut' will use `triangle`
Returns
--------------
vertices : (n, 2) float
Points in space
faces : (n, 3) int
Index of vertices that make up triangles
"""
if engine == 'earcut':
from mapbox_earcut import triangulate_float64
# get vertices as sequence where exterior is the first value
vertices = [np.array(polygon.exterior)]
vertices.extend(np.array(i) for i in polygon.interiors)
# record the index from the length of each vertex array
rings = np.cumsum([len(v) for v in vertices])
# stack vertices into (n, 2) float array
vertices = np.vstack(vertices)
# run triangulation
faces = triangulate_float64(vertices, rings).reshape(
(-1, 3)).astype(np.int64).reshape((-1, 3))
return vertices, faces
# do the import here for soft requirement
from triangle import triangulate
# set default triangulation arguments if not specified
if triangle_args is None:
triangle_args = 'p'
# turn the polygon in to vertices, segments, and hole points
arg = _polygon_to_kwargs(polygon)
# run the triangulation
result = triangulate(arg, triangle_args)
return result['vertices'], result['triangles']
def _polygon_to_kwargs(polygon):
"""
Given a shapely polygon generate the data to pass to
the triangle mesh generator
Parameters
---------
polygon : Shapely.geometry.Polygon
Input geometry
Returns
--------
result : dict
Has keys: vertices, segments, holes
"""
if not polygon.is_valid:
raise ValueError('invalid shapely polygon passed!')
def round_trip(start, length):
"""
Given a start index and length, create a series of (n, 2) edges which
create a closed traversal.
Examples
---------
start, length = 0, 3
returns: [(0,1), (1,2), (2,0)]
"""
tiled = np.tile(np.arange(start, start + length).reshape((-1, 1)), 2)
tiled = tiled.reshape(-1)[1:-1].reshape((-1, 2))
tiled = np.vstack((tiled, [tiled[-1][-1], tiled[0][0]]))
return tiled
def add_boundary(boundary, start):
# coords is an (n, 2) ordered list of points on the polygon boundary
# the first and last points are the same, and there are no
# guarantees on points not being duplicated (which will
# later cause meshpy/triangle to shit a brick)
coords = np.array(boundary.coords)
# find indices points which occur only once, and sort them
# to maintain order
unique = np.sort(grouping.unique_rows(coords)[0])
cleaned = coords[unique]
vertices.append(cleaned)
facets.append(round_trip(start, len(cleaned)))
# holes require points inside the region of the hole, which we find
# by creating a polygon from the cleaned boundary region, and then
# using a representative point. You could do things like take the mean of
# the points, but this is more robust (to things like concavity), if
# slower.
test = Polygon(cleaned)
holes.append(np.array(test.representative_point().coords)[0])
return len(cleaned)
# sequence of (n,2) points in space
vertices = collections.deque()
# sequence of (n,2) indices of vertices
facets = collections.deque()
# list of (2) vertices in interior of hole regions
holes = collections.deque()
start = add_boundary(polygon.exterior, 0)
for interior in polygon.interiors:
try:
start += add_boundary(interior, start)
except BaseException:
log.warning('invalid interior, continuing')
continue
# create clean (n,2) float array of vertices
# and (m, 2) int array of facets
# by stacking the sequence of (p,2) arrays
vertices = np.vstack(vertices)
facets = np.vstack(facets).tolist()
# shapely polygons can include a Z component
# strip it out for the triangulation
if vertices.shape[1] == 3:
vertices = vertices[:, :2]
result = {'vertices': vertices,
'segments': facets}
# holes in meshpy lingo are a (h, 2) list of (x,y) points
# which are inside the region of the hole
# we added a hole for the exterior, which we slice away here
holes = np.array(holes)[1:]
if len(holes) > 0:
result['holes'] = holes
return result
def box(extents=None, transform=None, **kwargs):
"""
Return a cuboid.
Parameters
------------
extents : float, or (3,) float
Edge lengths
transform: (4, 4) float
Transformation matrix
**kwargs:
passed to Trimesh to create box
Returns
------------
geometry : trimesh.Trimesh
Mesh of a cuboid
"""
# vertices of the cube
vertices = np.array([0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1,
1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1],
order='C',
dtype=np.float64).reshape((-1, 3))
vertices -= 0.5
# resize cube based on passed extents
if extents is not None:
extents = np.asanyarray(extents, dtype=np.float64)
if extents.shape != (3,):
raise ValueError('Extents must be (3,)!')
vertices *= extents
else:
extents = np.asarray((1.0, 1.0, 1.0), dtype=np.float64)
# hardcoded face indices
faces = [1, 3, 0, 4, 1, 0, 0, 3, 2, 2, 4, 0, 1, 7, 3, 5, 1, 4,
5, 7, 1, 3, 7, 2, 6, 4, 2, 2, 7, 6, 6, 5, 4, 7, 5, 6]
faces = np.array(faces, order='C', dtype=np.int64).reshape((-1, 3))
face_normals = [-1, 0, 0, 0, -1, 0, -1, 0, 0, 0, 0, -1, 0, 0, 1, 0, -1,
0, 0, 0, 1, 0, 1, 0, 0, 0, -1, 0, 1, 0, 1, 0, 0, 1, 0, 0]
face_normals = np.asanyarray(face_normals,
order='C',
dtype=np.float64).reshape(-1, 3)
if 'metadata' not in kwargs:
kwargs['metadata'] = dict()
kwargs['metadata'].update(
{'shape': 'box',
'extents': extents})
box = Trimesh(vertices=vertices,
faces=faces,
face_normals=face_normals,
process=False,
**kwargs)
# do the transform here to preserve face normals
if transform is not None:
box.apply_transform(transform)
return box
def icosahedron():
"""
Create an icosahedron, a 20 faced polyhedron.
Returns
-------------
ico : trimesh.Trimesh
Icosahederon centered at the origin.
"""
t = (1.0 + 5.0**.5) / 2.0
vertices = [-1, t, 0, 1, t, 0, -1, -t, 0, 1, -t, 0, 0, -1, t, 0, 1, t,
0, -1, -t, 0, 1, -t, t, 0, -1, t, 0, 1, -t, 0, -1, -t, 0, 1]
faces = [0, 11, 5, 0, 5, 1, 0, 1, 7, 0, 7, 10, 0, 10, 11,
1, 5, 9, 5, 11, 4, 11, 10, 2, 10, 7, 6, 7, 1, 8,
3, 9, 4, 3, 4, 2, 3, 2, 6, 3, 6, 8, 3, 8, 9,
4, 9, 5, 2, 4, 11, 6, 2, 10, 8, 6, 7, 9, 8, 1]
# scale vertices so each vertex radius is 1.0
vertices = np.reshape(vertices, (-1, 3)) / np.sqrt(2.0 + t)
faces = np.reshape(faces, (-1, 3))
mesh = Trimesh(vertices=vertices,
faces=faces,
process=False)
return mesh
def icosphere(subdivisions=3, radius=1.0, color=None):
"""
Create an isophere centered at the origin.
Parameters
----------
subdivisions : int
How many times to subdivide the mesh.
Note that the number of faces will grow as function of
4 ** subdivisions, so you probably want to keep this under ~5
radius : float
Desired radius of sphere
color: (3,) float or uint8
Desired color of sphere
Returns
---------
ico : trimesh.Trimesh
Meshed sphere
"""
def refine_spherical():
vectors = ico.vertices
scalar = (vectors ** 2).sum(axis=1)**.5
unit = vectors / scalar.reshape((-1, 1))
offset = radius - scalar
ico.vertices += unit * offset.reshape((-1, 1))
ico = icosahedron()
ico._validate = False
for j in range(subdivisions):
ico = ico.subdivide()
refine_spherical()
ico._validate = True
if color is not None:
ico.visual.face_colors = color
ico.metadata.update({'shape': 'sphere',
'radius': radius})
return ico
def uv_sphere(radius=1.0,
count=[32, 32],
theta=None,
phi=None):
"""
Create a UV sphere (latitude + longitude) centered at the
origin. Roughly one order of magnitude faster than an
icosphere but slightly uglier.
Parameters
----------
radius : float
Radius of sphere
count : (2,) int
Number of latitude and longitude lines
theta : (n,) float
Optional theta angles in radians
phi : (n,) float
Optional phi angles in radians
Returns
----------
mesh : trimesh.Trimesh
Mesh of UV sphere with specified parameters
"""
count = np.array(count, dtype=np.int64)
count += np.mod(count, 2)
count[1] *= 2
# generate vertices on a sphere using spherical coordinates
if theta is None:
theta = np.linspace(0, np.pi, count[0])
if phi is None:
phi = np.linspace(0, np.pi * 2, count[1])[:-1]
spherical = np.dstack((np.tile(phi, (len(theta), 1)).T,
np.tile(theta, (len(phi), 1)))).reshape((-1, 2))
vertices = util.spherical_to_vector(spherical) * radius
# generate faces by creating a bunch of pie wedges
c = len(theta)
# a quad face as two triangles
pairs = np.array([[c, 0, 1],
[c + 1, c, 1]])
# increment both triangles in each quad face by the same offset
incrementor = np.tile(np.arange(c - 1), (2, 1)).T.reshape((-1, 1))
# create the faces for a single pie wedge of the sphere
strip = np.tile(pairs, (c - 1, 1))
strip += incrementor
# the first and last faces will be degenerate since the first
# and last vertex are identical in the two rows
strip = strip[1:-1]
# tile pie wedges into a sphere
faces = np.vstack([strip + (i * c) for i in range(len(phi))])
# poles are repeated in every strip, so a mask to merge them
mask = np.arange(len(vertices))
# the top pole are all the same vertex
mask[0::c] = 0
# the bottom pole are all the same vertex
mask[c - 1::c] = c - 1
# faces masked to remove the duplicated pole vertices
# and mod to wrap to fill in the last pie wedge
faces = mask[np.mod(faces, len(vertices))]
# we save a lot of time by not processing again
# since we did some bookkeeping mesh is watertight
mesh = Trimesh(vertices=vertices, faces=faces, process=False,
metadata={'shape': 'sphere',
'radius': radius})
return mesh
def capsule(height=1.0,
radius=1.0,
count=[32, 32]):
"""
Create a mesh of a capsule, or a cylinder with hemispheric ends.
Parameters
----------
height : float
Center to center distance of two spheres
radius : float
Radius of the cylinder and hemispheres
count : (2,) int
Number of sections on latitude and longitude
Returns
----------
capsule : trimesh.Trimesh
Capsule geometry with:
- cylinder axis is along Z
- one hemisphere is centered at the origin
- other hemisphere is centered along the Z axis at height
"""
height = float(height)
radius = float(radius)
count = np.array(count, dtype=np.int64)
count += np.mod(count, 2)
# create a theta where there is a double band around the equator
# so that we can offset the top and bottom of a sphere to
# get a nicely meshed capsule
theta = np.linspace(0, np.pi, count[0])
center = np.clip(np.arctan(tol.merge / radius),
tol.merge, np.inf)
offset = np.array([-center, center]) + (np.pi / 2)
theta = np.insert(theta,
int(len(theta) / 2),
offset)
capsule = uv_sphere(radius=radius,
count=count,
theta=theta)
top = capsule.vertices[:, 2] > tol.zero
capsule.vertices[top] += [0, 0, height]
capsule.metadata.update({'shape': 'capsule',
'height': height,
'radius': radius})
return capsule
def cone(radius,
height,
sections=None,
transform=None,
**kwargs):
"""
Create a mesh of a cone along Z centered at the origin.
Parameters
----------
radius : float
The radius of the cylinder
height : float
The height of the cylinder
sections : int or None
How many pie wedges per revolution
transform : (4, 4) float or None
Transform to apply after creation
**kwargs : dict
Passed to Trimesh constructor
Returns
----------
cone: trimesh.Trimesh
Resulting mesh of a cone
"""
# create the 2D outline of a cone
linestring = [[0, 0],
[radius, 0],
[0, height]]
# revolve the profile to create a cone
if 'metadata' not in kwargs:
kwargs['metadata'] = dict()
kwargs['metadata'].update(
{'shape': 'cone',
'radius': radius,
'height': height})
cone = revolve(linestring=linestring,
sections=sections,
transform=transform,
**kwargs)
return cone
def cylinder(radius,
height=None,
sections=None,
segment=None,
transform=None,
**kwargs):
"""
Create a mesh of a cylinder along Z centered at the origin.
Parameters
----------
radius : float
The radius of the cylinder
height : float or None
The height of the cylinder
sections : int or None
How many pie wedges should the cylinder have
segment : (2, 3) float
Endpoints of axis, overrides transform and height
transform : (4, 4) float
Transform to apply
**kwargs:
passed to Trimesh to create cylinder
Returns
----------
cylinder: trimesh.Trimesh
Resulting mesh of a cylinder
"""
if segment is not None:
# override transform and height with the segment
transform, height = _segment_to_cylinder(segment=segment)
if height is None:
raise ValueError('either `height` or `segment` must be passed!')
half = abs(float(height)) / 2.0
# create a profile to revolve
linestring = [[0, -half],
[radius, -half],
[radius, half],
[0, half]]
if 'metadata' not in kwargs:
kwargs['metadata'] = dict()
kwargs['metadata'].update(
{'shape': 'cylinder',
'height': height,
'radius': radius})
# generate cylinder through simple revolution
return revolve(linestring=linestring,
sections=sections,
transform=transform,
**kwargs)
def annulus(r_min,
r_max,
height=None,
sections=None,
transform=None,
segment=None,
**kwargs):
"""
Create a mesh of an annular cylinder along Z centered at the origin.
Parameters
----------
r_min : float
The inner radius of the annular cylinder
r_max : float
The outer radius of the annular cylinder
height : float
The height of the annular cylinder
sections : int or None
How many pie wedges should the annular cylinder have
transform : (4, 4) float or None
Transform to apply to move result from the origin
segment : None or (2, 3) float
Override transform and height with a line segment
**kwargs:
passed to Trimesh to create annulus
Returns
----------
annulus : trimesh.Trimesh
Mesh of annular cylinder
"""
if segment is not None:
# override transform and height with the segment if passed
transform, height = _segment_to_cylinder(segment=segment)
if height is None:
raise ValueError('either `height` or `segment` must be passed!')
r_min = abs(float(r_min))
# if center radius is zero this is a cylinder
if r_min < tol.merge:
return cylinder(radius=r_max,
height=height,
sections=sections,
transform=transform)
r_max = abs(float(r_max))
# we're going to center at XY plane so take half the height
half = abs(float(height)) / 2.0
# create counter-clockwise rectangle
linestring = [[r_min, -half],
[r_max, -half],
[r_max, half],
[r_min, half],
[r_min, -half]]
if 'metadata' not in kwargs:
kwargs['metadata'] = dict()
kwargs['metadata'].update(
{'shape': 'annulus',
'r_min': r_min,
'r_max': r_max,
'height': height})
# revolve the curve
annulus = revolve(linestring=linestring,
sections=sections,
transform=transform,
**kwargs)
return annulus
def _segment_to_cylinder(segment):
"""
Convert a line segment to a transform and height for a cylinder
or cylinder-like primitive.
Parameters
-----------
segment : (2, 3) float
3D line segment in space
Returns
-----------
transform : (4, 4) float
Matrix to move a Z-extruded origin cylinder to segment
height : float
The height of the cylinder needed
"""
segment = np.asanyarray(segment, dtype=np.float64)
if segment.shape != (2, 3):
raise ValueError('segment must be 2 3D points!')
vector = segment[1] - segment[0]
# override height with segment length
height = np.linalg.norm(vector)
# point in middle of line
midpoint = segment[0] + (vector * 0.5)
# align Z with our desired direction
rotation = align_vectors([0, 0, 1], vector)
# translate to midpoint of segment
translation = tf.translation_matrix(midpoint)
# compound the rotation and translation
transform = np.dot(translation, rotation)
return transform, height
def random_soup(face_count=100):
"""
Return random triangles as a Trimesh
Parameters
-----------
face_count : int
Number of faces desired in mesh
Returns
-----------
soup : trimesh.Trimesh
Geometry with face_count random faces
"""
vertices = np.random.random((face_count * 3, 3)) - 0.5
faces = np.arange(face_count * 3).reshape((-1, 3))
soup = Trimesh(vertices=vertices, faces=faces)
return soup
def axis(origin_size=0.04,
transform=None,
origin_color=None,
axis_radius=None,
axis_length=None):
"""
Return an XYZ axis marker as a Trimesh, which represents position
and orientation. If you set the origin size the other parameters
will be set relative to it.
Parameters
----------
transform : (4, 4) float
Transformation matrix
origin_size : float
Radius of sphere that represents the origin
origin_color : (3,) float or int, uint8 or float
Color of the origin
axis_radius : float
Radius of cylinder that represents x, y, z axis
axis_length: float
Length of cylinder that represents x, y, z axis
Returns
-------
marker : trimesh.Trimesh
Mesh geometry of axis indicators
"""
# the size of the ball representing the origin
origin_size = float(origin_size)
# set the transform and use origin-relative
# sized for other parameters if not specified
if transform is None:
transform = np.eye(4)
if origin_color is None:
origin_color = [255, 255, 255, 255]
if axis_radius is None:
axis_radius = origin_size / 5.0
if axis_length is None:
axis_length = origin_size * 10.0
# generate a ball for the origin
axis_origin = uv_sphere(radius=origin_size,
count=[10, 10])
axis_origin.apply_transform(transform)
# apply color to the origin ball
axis_origin.visual.face_colors = origin_color
# create the cylinder for the z-axis
translation = tf.translation_matrix(
[0, 0, axis_length / 2])
z_axis = cylinder(
radius=axis_radius,
height=axis_length,
transform=transform.dot(translation))
# XYZ->RGB, Z is blue
z_axis.visual.face_colors = [0, 0, 255]
# create the cylinder for the y-axis
translation = tf.translation_matrix(
[0, 0, axis_length / 2])
rotation = tf.rotation_matrix(np.radians(-90),
[1, 0, 0])
y_axis = cylinder(
radius=axis_radius,
height=axis_length,
transform=transform.dot(rotation).dot(translation))
# XYZ->RGB, Y is green
y_axis.visual.face_colors = [0, 255, 0]
# create the cylinder for the x-axis
translation = tf.translation_matrix(
[0, 0, axis_length / 2])
rotation = tf.rotation_matrix(np.radians(90),
[0, 1, 0])
x_axis = cylinder(
radius=axis_radius,
height=axis_length,
transform=transform.dot(rotation).dot(translation))
# XYZ->RGB, X is red
x_axis.visual.face_colors = [255, 0, 0]
# append the sphere and three cylinders
marker = util.concatenate([axis_origin,
x_axis,
y_axis,
z_axis])
return marker
def camera_marker(camera,
marker_height=0.4,
origin_size=None):
"""
Create a visual marker for a camera object, including an axis and FOV.
Parameters
---------------
camera : trimesh.scene.Camera
Camera object with FOV and transform defined
marker_height : float
How far along the camera Z should FOV indicators be
origin_size : float
Sphere radius of the origin (default: marker_height / 10.0)
Returns
------------
meshes : list
Contains Trimesh and Path3D objects which can be visualized
"""
# create sane origin size from marker height
if origin_size is None:
origin_size = marker_height / 10.0
# append the visualizations to an array
meshes = [axis(origin_size=origin_size)]
try:
# path is a soft dependency
from .path.exchange.load import load_path
except ImportError:
# they probably don't have shapely installed
log.warning('unable to create FOV visualization!',
exc_info=True)
return meshes
# calculate vertices from camera FOV angles
x = marker_height * np.tan(np.deg2rad(camera.fov[0]) / 2.0)
y = marker_height * np.tan(np.deg2rad(camera.fov[1]) / 2.0)
z = marker_height
# combine the points into the vertices of an FOV visualization
points = np.array(
[(0, 0, 0),
(-x, -y, z),
(x, -y, z),
(x, y, z),
(-x, y, z)],
dtype=float)
# create line segments for the FOV visualization
# a segment from the origin to each bound of the FOV
segments = np.column_stack(
(np.zeros_like(points), points)).reshape(
(-1, 3))
# add a loop for the outside of the FOV then reshape
# the whole thing into multiple line segments
segments = np.vstack((segments,
points[[1, 2,
2, 3,
3, 4,
4, 1]])).reshape((-1, 2, 3))
# add a single Path3D object for all line segments
meshes.append(load_path(segments))
return meshes
def truncated_prisms(tris, origin=None, normal=None):
"""
Return a mesh consisting of multiple watertight prisms below
a list of triangles, truncated by a specified plane.
Parameters
-------------
triangles : (n, 3, 3) float
Triangles in space
origin : None or (3,) float
Origin of truncation plane
normal : None or (3,) float
Unit normal vector of truncation plane
Returns
-----------
mesh : trimesh.Trimesh
Triangular mesh
"""
if origin is None:
transform = np.eye(4)
else:
transform = plane_transform(origin=origin, normal=normal)
# transform the triangles to the specified plane
transformed = tf.transform_points(
tris.reshape((-1, 3)), transform).reshape((-1, 9))
# stack triangles such that every other one is repeated
vs = np.column_stack((transformed, transformed)).reshape((-1, 3, 3))
# set the Z value of the second triangle to zero
vs[1::2, :, 2] = 0
# reshape triangles to a flat array of points and transform back to original frame
vertices = tf.transform_points(
vs.reshape((-1, 3)), matrix=np.linalg.inv(transform))
# face indexes for a *single* truncated triangular prism
f = np.array([[2, 1, 0],
[3, 4, 5],
[0, 1, 4],
[1, 2, 5],
[2, 0, 3],
[4, 3, 0],
[5, 4, 1],
[3, 5, 2]])
# find the projection of each triangle with the normal vector
cross = np.dot([0, 0, 1], triangles.cross(transformed.reshape((-1, 3, 3))).T)
# stack faces into one prism per triangle
f_seq = np.tile(f, (len(transformed), 1)).reshape((-1, len(f), 3))
# if the normal of the triangle was positive flip the winding
f_seq[cross > 0] = np.fliplr(f)
# offset stacked faces to create correct indices
faces = (f_seq + (np.arange(len(f_seq)) * 6).reshape((-1, 1, 1))).reshape((-1, 3))
# create a mesh from the data
mesh = Trimesh(vertices=vertices, faces=faces, process=False)
return mesh
| mit | 7,771,530,752,950,177,000 | 30.973228 | 86 | 0.580727 | false |
stormvirux/vturra-cli | vturra/asys.py | 1 | 1936 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# from scipy import stats
# import statsmodels.api as sm
# from numpy.random import randn
import matplotlib as mpl
# import seaborn as sns
# sns.set_color_palette("deep", desat=.6)
mpl.rc("figure", figsize=(8, 4))
def Compavg():
data=Total()
markMax=[]
markAvg=[]
N = 5
ind = np.arange(N)
width = 0.35
fig = plt.figure()
ax = fig.add_subplot(111)
markMax.extend((data["Total"].max(),data["Total.1"].max(),data["Total.2"].max(),data["Total.3"].max(),data["Total.4"].max()))
markAvg.extend((data["Total"].mean(),data["Total.1"].mean(),data["Total.2"].mean(),data["Total.3"].mean(),data["Total.4"].mean()))
rects1 = ax.bar(ind, markMax, width, color='black')
rects2 = ax.bar(ind+width, markAvg, width, color='green')
ax.set_xlim(-width,len(ind)+width)
ax.set_ylim(0,120)
ax.set_ylabel('Marks')
ax.set_title('Max, Mean and Your Marks')
xTickMarks = ['Subject'+str(i) for i in range(1,6)]
ax.set_xticks(ind+width)
xtickNames = ax.set_xticklabels(xTickMarks)
plt.setp(xtickNames, rotation=10, fontsize=10)
ax.legend( (rects1[0], rects2[0]), ('Max', 'Mean') )
plt.show()
def compSub():
# max_data = np.r_[data["Total"]].max()
# bins = np.linspace(0, max_data, max_data + 1)
data=Total()
plt.hist(data['Total'],linewidth=0, alpha=.7)
plt.hist(data['Total.1'],linewidth=0,alpha=.7)
plt.hist(data['Total.2'],linewidth=0,alpha=.7)
plt.hist(data['Total.3'],linewidth=0,alpha=.7)
plt.hist(data['Total.4'],linewidth=0,alpha=.7)
plt.title("Total marks Histogram")
plt.xlabel("Value")
plt.ylabel("Frequency")
plt.show()
def Total():
data=pd.read_csv("output10cs.csv")
df3=data[['Total','Total.1','Total.2','Total.3','Total.4','Total.5','Total.6','Total.7']]
data["Main Total"]=df3.sum(axis=1)
data = data.dropna()
data.reset_index(drop=True)
return data
#compSub()
# Compavg()
| mit | 5,560,022,484,515,166,000 | 29.730159 | 131 | 0.66064 | false |
hall1467/wikidata_usage_tracking | wbc_usage/utilities/determine_wikis.py | 1 | 2123 | """
Prints all wikis to stdout.
Usage:
determine_wikis (-h|--help)
determine_wikis [--debug]
[--verbose]
Options:
-h, --help This help message is printed
--debug Print debug logging to stderr
--verbose Print dots and stuff to stderr
"""
import logging
import mwapi
import sys
import json
import docopt
logger = logging.getLogger(__name__)
def main(argv=None):
args = docopt.docopt(__doc__, argv=argv)
logging.basicConfig(
level=logging.WARNING if not args['--debug'] else logging.DEBUG,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
verbose = args['--verbose']
run(verbose)
# Contacts API to return list of wikis
# Code credit: https://github.com/WikiEducationFoundation/academic_classification/blob/master/pageclassifier/revgather.py
def run(verbose):
session = mwapi.Session(
'https://en.wikipedia.org',
user_agent='hall1467'
)
results = session.get(
action='sitematrix'
)
for database_dictionary in extract_query_results(results):
if verbose:
sys.stderr.write("Printing json for the database: " +
database_dictionary['dbname'] + "\n")
sys.stderr.flush()
sys.stdout.write(json.dumps(database_dictionary) + "\n")
# Code credit: https://github.com/WikiEducationFoundation/academic_classification/blob/master/pageclassifier/revgather.py
def extract_query_results(results):
results = results['sitematrix']
for entry in results:
if entry == 'count':
continue
if entry == 'specials':
for special_entry in results[entry]:
yield ({
"dbname" : special_entry['dbname'],
"wikiurl" : special_entry['url']
})
continue
for wiki in results[entry]['site']:
yield {
"dbname" : wiki['dbname'],
"wikiurl" : wiki['url']
}
| mit | 7,319,689,185,539,322,000 | 25.5375 | 121 | 0.563354 | false |
simod/geonode | geonode/layers/views.py | 1 | 58343 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import os
import sys
import logging
import shutil
import base64
import traceback
import uuid
import decimal
import re
from django.db.models import Q
from celery.exceptions import TimeoutError
from django.contrib.gis.geos import GEOSGeometry
from django.template.response import TemplateResponse
from requests import Request
from itertools import chain
from six import string_types
from owslib.wfs import WebFeatureService
from owslib.feature.schema import get_schema
from guardian.shortcuts import get_perms
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.conf import settings
from django.utils.translation import ugettext as _
from geonode import geoserver, qgis_server
try:
import json
except ImportError:
from django.utils import simplejson as json
from django.utils.html import escape
from django.template.defaultfilters import slugify
from django.forms.models import inlineformset_factory
from django.db import transaction
from django.db.models import F
from django.forms.utils import ErrorList
from geonode.services.models import Service
from geonode.layers.forms import LayerForm, LayerUploadForm, NewLayerUploadForm, LayerAttributeForm
from geonode.base.forms import CategoryForm, TKeywordForm
from geonode.layers.models import Layer, Attribute, UploadSession
from geonode.base.enumerations import CHARSETS
from geonode.base.models import TopicCategory
from geonode.groups.models import GroupProfile
from geonode.utils import (resolve_object,
default_map_config,
check_ogc_backend,
llbbox_to_mercator,
bbox_to_projection,
GXPLayer,
GXPMap)
from geonode.layers.utils import file_upload, is_raster, is_vector
from geonode.people.forms import ProfileForm, PocForm
from geonode.security.views import _perms_info_json
from geonode.documents.models import get_related_documents
from geonode.utils import build_social_links
from geonode.base.views import batch_modify
from geonode.base.models import Thesaurus
from geonode.maps.models import Map
from geonode.geoserver.helpers import (gs_catalog,
ogc_server_settings,
set_layer_style) # cascading_delete
from .tasks import delete_layer
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
from geonode.geoserver.helpers import _render_thumbnail
if check_ogc_backend(qgis_server.BACKEND_PACKAGE):
from geonode.qgis_server.models import QGISServerLayer
CONTEXT_LOG_FILE = ogc_server_settings.LOG_FILE
logger = logging.getLogger("geonode.layers.views")
DEFAULT_SEARCH_BATCH_SIZE = 10
MAX_SEARCH_BATCH_SIZE = 25
GENERIC_UPLOAD_ERROR = _("There was an error while attempting to upload your data. \
Please try again, or contact and administrator if the problem continues.")
METADATA_UPLOADED_PRESERVE_ERROR = _("Note: this layer's orginal metadata was \
populated and preserved by importing a metadata XML file. This metadata cannot be edited.")
_PERMISSION_MSG_DELETE = _("You are not permitted to delete this layer")
_PERMISSION_MSG_GENERIC = _('You do not have permissions for this layer.')
_PERMISSION_MSG_MODIFY = _("You are not permitted to modify this layer")
_PERMISSION_MSG_METADATA = _(
"You are not permitted to modify this layer's metadata")
_PERMISSION_MSG_VIEW = _("You are not permitted to view this layer")
def log_snippet(log_file):
if not log_file or not os.path.isfile(log_file):
return "No log file at %s" % log_file
with open(log_file, "r") as f:
f.seek(0, 2) # Seek @ EOF
fsize = f.tell() # Get Size
f.seek(max(fsize - 10024, 0), 0) # Set pos @ last n chars
return f.read()
def _resolve_layer(request, alternate, permission='base.view_resourcebase',
msg=_PERMISSION_MSG_GENERIC, **kwargs):
"""
Resolve the layer by the provided typename (which may include service name) and check the optional permission.
"""
service_typename = alternate.split(":", 1)
if Service.objects.filter(name=service_typename[0]).exists():
service = Service.objects.filter(name=service_typename[0])
return resolve_object(
request,
Layer,
{
'alternate': service_typename[1] if service[0].method != "C" else alternate},
permission=permission,
permission_msg=msg,
**kwargs)
else:
return resolve_object(request,
Layer,
{'alternate': alternate},
permission=permission,
permission_msg=msg,
**kwargs)
# Basic Layer Views #
@login_required
def layer_upload(request, template='upload/layer_upload.html'):
if request.method == 'GET':
mosaics = Layer.objects.filter(is_mosaic=True).order_by('name')
ctx = {
'mosaics': mosaics,
'charsets': CHARSETS,
'is_layer': True,
}
return render(request, template, context=ctx)
elif request.method == 'POST':
name = None
form = NewLayerUploadForm(request.POST, request.FILES)
tempdir = None
saved_layer = None
errormsgs = []
out = {'success': False}
if form.is_valid():
title = form.cleaned_data["layer_title"]
# Replace dots in filename - GeoServer REST API upload bug
# and avoid any other invalid characters.
# Use the title if possible, otherwise default to the filename
if title is not None and len(title) > 0:
name_base = title
else:
name_base, __ = os.path.splitext(
form.cleaned_data["base_file"].name)
title = slugify(name_base.replace(".", "_"))
name = slugify(name_base.replace(".", "_"))
if form.cleaned_data["abstract"] is not None and len(
form.cleaned_data["abstract"]) > 0:
abstract = form.cleaned_data["abstract"]
else:
abstract = "No abstract provided."
try:
# Moved this inside the try/except block because it can raise
# exceptions when unicode characters are present.
# This should be followed up in upstream Django.
tempdir, base_file = form.write_files()
if not form.cleaned_data["style_upload_form"]:
saved_layer = file_upload(
base_file,
name=name,
user=request.user,
overwrite=False,
charset=form.cleaned_data["charset"],
abstract=abstract,
title=title,
metadata_uploaded_preserve=form.cleaned_data[
"metadata_uploaded_preserve"],
metadata_upload_form=form.cleaned_data["metadata_upload_form"])
else:
saved_layer = Layer.objects.get(alternate=title)
if not saved_layer:
msg = 'Failed to process. Could not find matching layer.'
raise Exception(msg)
sld = open(base_file).read()
set_layer_style(saved_layer, title, base_file, sld)
except Exception as e:
exception_type, error, tb = sys.exc_info()
logger.exception(e)
out['success'] = False
try:
out['errors'] = u''.join(error).encode('utf-8')
except BaseException:
try:
out['errors'] = str(error)
except BaseException:
try:
tb = traceback.format_exc()
out['errors'] = tb
except BaseException:
pass
# Assign the error message to the latest UploadSession from
# that user.
latest_uploads = UploadSession.objects.filter(
user=request.user).order_by('-date')
if latest_uploads.count() > 0:
upload_session = latest_uploads[0]
upload_session.error = str(error)
upload_session.traceback = traceback.format_exc(tb)
upload_session.context = log_snippet(CONTEXT_LOG_FILE)
upload_session.save()
out['traceback'] = upload_session.traceback
out['context'] = upload_session.context
out['upload_session'] = upload_session.id
else:
out['success'] = True
if hasattr(saved_layer, 'info'):
out['info'] = saved_layer.info
out['url'] = reverse(
'layer_detail', args=[
saved_layer.service_typename])
if hasattr(saved_layer, 'bbox_string'):
out['bbox'] = saved_layer.bbox_string
if hasattr(saved_layer, 'srid'):
out['crs'] = {
'type': 'name',
'properties': saved_layer.srid
}
out['ogc_backend'] = settings.OGC_SERVER['default']['BACKEND']
upload_session = saved_layer.upload_session
if upload_session:
upload_session.processed = True
upload_session.save()
permissions = form.cleaned_data["permissions"]
if permissions is not None and len(permissions.keys()) > 0:
saved_layer.set_permissions(permissions)
saved_layer.handle_moderated_uploads()
finally:
if tempdir is not None:
shutil.rmtree(tempdir)
else:
for e in form.errors.values():
errormsgs.extend([escape(v) for v in e])
out['errors'] = form.errors
out['errormsgs'] = errormsgs
if out['success']:
status_code = 200
else:
status_code = 400
if settings.MONITORING_ENABLED:
if saved_layer or name:
layer_name = saved_layer.alternate if hasattr(
saved_layer, 'alternate') else name
request.add_resource('layer', layer_name)
return HttpResponse(
json.dumps(out),
content_type='application/json',
status=status_code)
def layer_detail(request, layername, template='layers/layer_detail.html'):
layer = _resolve_layer(
request,
layername,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
# assert False, str(layer_bbox)
config = layer.attribute_config()
# Add required parameters for GXP lazy-loading
layer_bbox = layer.bbox[0:4]
bbox = layer_bbox[:]
bbox[0] = float(layer_bbox[0])
bbox[1] = float(layer_bbox[2])
bbox[2] = float(layer_bbox[1])
bbox[3] = float(layer_bbox[3])
def decimal_encode(bbox):
import decimal
_bbox = []
for o in [float(coord) for coord in bbox]:
if isinstance(o, decimal.Decimal):
o = (str(o) for o in [o])
_bbox.append(o)
return _bbox
def sld_definition(style):
from urllib import quote
_sld = {
"title": style.sld_title or style.name,
"legend": {
"height": "40",
"width": "22",
"href": layer.ows_url +
"?service=wms&request=GetLegendGraphic&format=image%2Fpng&width=20&height=20&layer=" +
quote(layer.service_typename, safe=''),
"format": "image/png"
},
"name": style.name
}
return _sld
if hasattr(layer, 'srid'):
config['crs'] = {
'type': 'name',
'properties': layer.srid
}
# Add required parameters for GXP lazy-loading
attribution = "%s %s" % (layer.owner.first_name,
layer.owner.last_name) if layer.owner.first_name or layer.owner.last_name else str(
layer.owner)
srs = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857')
srs_srid = int(srs.split(":")[1]) if srs != "EPSG:900913" else 3857
config["attribution"] = "<span class='gx-attribution-title'>%s</span>" % attribution
config["format"] = getattr(
settings, 'DEFAULT_LAYER_FORMAT', 'image/png')
config["title"] = layer.title
config["wrapDateLine"] = True
config["visibility"] = True
config["srs"] = srs
config["bbox"] = decimal_encode(
bbox_to_projection([float(coord) for coord in layer_bbox] + [layer.srid, ],
target_srid=int(srs.split(":")[1]))[:4])
config["capability"] = {
"abstract": layer.abstract,
"name": layer.alternate,
"title": layer.title,
"queryable": True,
"storeType": layer.storeType,
"bbox": {
layer.srid: {
"srs": layer.srid,
"bbox": decimal_encode(bbox)
},
srs: {
"srs": srs,
"bbox": decimal_encode(
bbox_to_projection([float(coord) for coord in layer_bbox] + [layer.srid, ],
target_srid=srs_srid)[:4])
},
"EPSG:4326": {
"srs": "EPSG:4326",
"bbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else
decimal_encode(bbox_to_projection(
[float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=4326)[:4])
},
"EPSG:900913": {
"srs": "EPSG:900913",
"bbox": decimal_encode(bbox) if layer.srid == 'EPSG:900913' else
decimal_encode(bbox_to_projection(
[float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=3857)[:4])
}
},
"srs": {
srs: True
},
"formats": ["image/png", "application/atom xml", "application/atom+xml", "application/json;type=utfgrid",
"application/openlayers", "application/pdf", "application/rss xml", "application/rss+xml",
"application/vnd.google-earth.kml", "application/vnd.google-earth.kml xml",
"application/vnd.google-earth.kml+xml", "application/vnd.google-earth.kml+xml;mode=networklink",
"application/vnd.google-earth.kmz", "application/vnd.google-earth.kmz xml",
"application/vnd.google-earth.kmz+xml", "application/vnd.google-earth.kmz;mode=networklink",
"atom", "image/geotiff", "image/geotiff8", "image/gif", "image/gif;subtype=animated",
"image/jpeg", "image/png8", "image/png; mode=8bit", "image/svg", "image/svg xml",
"image/svg+xml", "image/tiff", "image/tiff8", "image/vnd.jpeg-png",
"kml", "kmz", "openlayers", "rss", "text/html; subtype=openlayers", "utfgrid"],
"attribution": {
"title": attribution
},
"infoFormats": ["text/plain", "application/vnd.ogc.gml", "text/xml", "application/vnd.ogc.gml/3.1.1",
"text/xml; subtype=gml/3.1.1", "text/html", "application/json"],
"styles": [sld_definition(s) for s in layer.styles.all()],
"prefix": layer.alternate.split(":")[0] if ":" in layer.alternate else "",
"keywords": [k.name for k in layer.keywords.all()] if layer.keywords else [],
"llbbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else
decimal_encode(bbox_to_projection(
[float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=4326)[:4])
}
all_times = None
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
from geonode.geoserver.views import get_capabilities
workspace, layername = layer.alternate.split(
":") if ":" in layer.alternate else (None, layer.alternate)
# WARNING Please make sure to have enabled DJANGO CACHE as per
# https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching
wms_capabilities_resp = get_capabilities(
request, layer.id, tolerant=True)
if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400:
wms_capabilities = wms_capabilities_resp.getvalue()
if wms_capabilities:
import xml.etree.ElementTree as ET
e = ET.fromstring(wms_capabilities)
for atype in e.findall(
"./[Name='%s']/Extent[@name='time']" % (layername)):
dim_name = atype.get('name')
if dim_name:
dim_name = str(dim_name).lower()
if dim_name == 'time':
dim_values = atype.text
if dim_values:
all_times = dim_values.split(",")
break
if all_times:
config["capability"]["dimensions"] = {
"time": {
"name": "time",
"units": "ISO8601",
"unitsymbol": None,
"nearestVal": False,
"multipleVal": False,
"current": False,
"default": "current",
"values": all_times
}
}
if layer.storeType == "remoteStore":
service = layer.remote_service
source_params = {}
if service.type in ('REST_MAP', 'REST_IMG'):
source_params = {
"ptype": service.ptype,
"remote": True,
"url": service.service_url,
"name": service.name,
"title": "[R] %s" % service.title}
maplayer = GXPLayer(
name=layer.alternate,
ows_url=layer.ows_url,
layer_params=json.dumps(config),
source_params=json.dumps(source_params)
)
else:
maplayer = GXPLayer(
name=layer.alternate,
ows_url=layer.ows_url,
layer_params=json.dumps(config)
)
# Update count for popularity ranking,
# but do not includes admins or resource owners
layer.view_count_up(request.user)
# center/zoom don't matter; the viewer will center on the layer bounds
map_obj = GXPMap(
sender=Layer,
projection=getattr(
settings,
'DEFAULT_MAP_CRS',
'EPSG:3857'))
NON_WMS_BASE_LAYERS = [
la for la in default_map_config(request)[1] if la.ows_url is None]
metadata = layer.link_set.metadata().filter(
name__in=settings.DOWNLOAD_FORMATS_METADATA)
granules = None
all_granules = None
all_times = None
filter = None
if layer.is_mosaic:
try:
cat = gs_catalog
cat._cache.clear()
store = cat.get_store(layer.name)
coverages = cat.mosaic_coverages(store)
filter = None
try:
if request.GET["filter"]:
filter = request.GET["filter"]
except BaseException:
pass
offset = 10 * (request.page - 1)
granules = cat.mosaic_granules(
coverages['coverages']['coverage'][0]['name'],
store,
limit=10,
offset=offset,
filter=filter)
all_granules = cat.mosaic_granules(
coverages['coverages']['coverage'][0]['name'], store, filter=filter)
except BaseException:
granules = {"features": []}
all_granules = {"features": []}
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
from geonode.geoserver.views import get_capabilities
workspace, layername = layer.alternate.split(
":") if ":" in layer.alternate else (None, layer.alternate)
# WARNING Please make sure to have enabled DJANGO CACHE as per
# https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching
wms_capabilities_resp = get_capabilities(
request, layer.id, tolerant=True)
if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400:
wms_capabilities = wms_capabilities_resp.getvalue()
if wms_capabilities:
import xml.etree.ElementTree as ET
e = ET.fromstring(wms_capabilities)
for atype in e.findall(
"./[Name='%s']/Extent[@name='time']" % (layername)):
dim_name = atype.get('name')
if dim_name:
dim_name = str(dim_name).lower()
if dim_name == 'time':
dim_values = atype.text
if dim_values:
all_times = dim_values.split(",")
break
group = None
if layer.group:
try:
group = GroupProfile.objects.get(slug=layer.group.name)
except GroupProfile.DoesNotExist:
group = None
# a flag to be used for qgis server
show_popup = False
if 'show_popup' in request.GET and request.GET["show_popup"]:
show_popup = True
context_dict = {
'resource': layer,
'group': group,
'perms_list': get_perms(request.user, layer.get_self_resource()),
"permissions_json": _perms_info_json(layer),
"documents": get_related_documents(layer),
"metadata": metadata,
"is_layer": True,
"wps_enabled": settings.OGC_SERVER['default']['WPS_ENABLED'],
"granules": granules,
"all_granules": all_granules,
"all_times": all_times,
"show_popup": show_popup,
"filter": filter,
"storeType": layer.storeType,
# "online": (layer.remote_service.probe == 200) if layer.storeType == "remoteStore" else True
}
if request and 'access_token' in request.session:
access_token = request.session['access_token']
else:
u = uuid.uuid1()
access_token = u.hex
context_dict["viewer"] = json.dumps(map_obj.viewer_json(
request, * (NON_WMS_BASE_LAYERS + [maplayer])))
context_dict["preview"] = getattr(
settings,
'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY',
'geoext')
context_dict["crs"] = getattr(
settings,
'DEFAULT_MAP_CRS',
'EPSG:3857')
# provide bbox in EPSG:4326 for leaflet
if context_dict["preview"] == 'leaflet':
srid, wkt = layer.geographic_bounding_box.split(';')
srid = re.findall(r'\d+', srid)
geom = GEOSGeometry(wkt, srid=int(srid[0]))
geom.transform(4326)
context_dict["layer_bbox"] = ','.join([str(c) for c in geom.extent])
if layer.storeType == 'dataStore':
links = layer.link_set.download().filter(
Q(name__in=settings.DOWNLOAD_FORMATS_VECTOR) |
Q(link_type='original'))
else:
links = layer.link_set.download().filter(
Q(name__in=settings.DOWNLOAD_FORMATS_RASTER) |
Q(link_type='original'))
links_view = [item for idx, item in enumerate(links) if
item.url and 'wms' in item.url or 'gwc' in item.url]
links_download = [item for idx, item in enumerate(
links) if item.url and 'wms' not in item.url and 'gwc' not in item.url]
for item in links_view:
if item.url and access_token and 'access_token' not in item.url:
params = {'access_token': access_token}
item.url = Request('GET', item.url, params=params).prepare().url
for item in links_download:
if item.url and access_token and 'access_token' not in item.url:
params = {'access_token': access_token}
item.url = Request('GET', item.url, params=params).prepare().url
if request.user.has_perm('view_resourcebase', layer.get_self_resource()):
context_dict["links"] = links_view
if request.user.has_perm(
'download_resourcebase',
layer.get_self_resource()):
if layer.storeType == 'dataStore':
links = layer.link_set.download().filter(
name__in=settings.DOWNLOAD_FORMATS_VECTOR)
else:
links = layer.link_set.download().filter(
name__in=settings.DOWNLOAD_FORMATS_RASTER)
context_dict["links_download"] = links_download
if settings.SOCIAL_ORIGINS:
context_dict["social_links"] = build_social_links(request, layer)
layers_names = layer.alternate
try:
if settings.DEFAULT_WORKSPACE and settings.DEFAULT_WORKSPACE in layers_names:
workspace, name = layers_names.split(':', 1)
else:
name = layers_names
except BaseException:
logger.error("Can not identify workspace type and layername")
context_dict["layer_name"] = json.dumps(layers_names)
try:
# get type of layer (raster or vector)
if layer.storeType == 'coverageStore':
context_dict["layer_type"] = "raster"
elif layer.storeType == 'dataStore':
if layer.has_time:
context_dict["layer_type"] = "vector_time"
else:
context_dict["layer_type"] = "vector"
location = "{location}{service}".format(** {
'location': settings.OGC_SERVER['default']['LOCATION'],
'service': 'wms',
})
# get schema for specific layer
username = settings.OGC_SERVER['default']['USER']
password = settings.OGC_SERVER['default']['PASSWORD']
schema = get_schema(
location,
name,
username=username,
password=password)
# get the name of the column which holds the geometry
if 'the_geom' in schema['properties']:
schema['properties'].pop('the_geom', None)
elif 'geom' in schema['properties']:
schema['properties'].pop("geom", None)
# filter the schema dict based on the values of layers_attributes
layer_attributes_schema = []
for key in schema['properties'].keys():
layer_attributes_schema.append(key)
filtered_attributes = layer_attributes_schema
context_dict["schema"] = schema
context_dict["filtered_attributes"] = filtered_attributes
except BaseException:
logger.error(
"Possible error with OWSLib. Turning all available properties to string")
if settings.GEOTIFF_IO_ENABLED:
from geonode.contrib.geotiffio import create_geotiff_io_url
context_dict["link_geotiff_io"] = create_geotiff_io_url(layer, access_token)
# maps owned by user needed to fill the "add to existing map section" in template
if request.user.is_authenticated():
context_dict["maps"] = Map.objects.filter(owner=request.user)
return TemplateResponse(
request, template, context=context_dict)
# Loads the data using the OWS lib when the "Do you want to filter it"
# button is clicked.
def load_layer_data(request, template='layers/layer_detail.html'):
context_dict = {}
data_dict = json.loads(request.POST.get('json_data'))
layername = data_dict['layer_name']
filtered_attributes = [x for x in data_dict['filtered_attributes'] if '/load_layer_data' not in x]
workspace, name = layername.split(':')
location = "{location}{service}".format(** {
'location': settings.OGC_SERVER['default']['LOCATION'],
'service': 'wms',
})
try:
# TODO: should be improved by using OAuth2 token (or at least user
# related to it) instead of super-powers
username = settings.OGC_SERVER['default']['USER']
password = settings.OGC_SERVER['default']['PASSWORD']
wfs = WebFeatureService(
location,
version='1.1.0',
username=username,
password=password)
response = wfs.getfeature(
typename=name,
propertyname=filtered_attributes,
outputFormat='application/json')
x = response.read()
x = json.loads(x)
features_response = json.dumps(x)
decoded = json.loads(features_response)
decoded_features = decoded['features']
properties = {}
for key in decoded_features[0]['properties']:
properties[key] = []
# loop the dictionary based on the values on the list and add the properties
# in the dictionary (if doesn't exist) together with the value
from collections import Iterable
for i in range(len(decoded_features)):
for key, value in decoded_features[i]['properties'].iteritems():
if value != '' and isinstance(value, (string_types, int, float)) and (
(isinstance(value, Iterable) and '/load_layer_data' not in value) or value):
properties[key].append(value)
for key in properties:
properties[key] = list(set(properties[key]))
properties[key].sort()
context_dict["feature_properties"] = properties
except BaseException:
import traceback
traceback.print_exc()
logger.error("Possible error with OWSLib.")
return HttpResponse(json.dumps(context_dict),
content_type="application/json")
def layer_feature_catalogue(
request,
layername,
template='../../catalogue/templates/catalogue/feature_catalogue.xml'):
layer = _resolve_layer(request, layername)
if layer.storeType != 'dataStore':
out = {
'success': False,
'errors': 'layer is not a feature type'
}
return HttpResponse(
json.dumps(out),
content_type='application/json',
status=400)
attributes = []
for attrset in layer.attribute_set.order_by('display_order'):
attr = {
'name': attrset.attribute,
'type': attrset.attribute_type
}
attributes.append(attr)
context_dict = {
'layer': layer,
'attributes': attributes,
'metadata': settings.PYCSW['CONFIGURATION']['metadata:main']
}
return render(
request,
template,
context=context_dict,
content_type='application/xml')
@login_required
def layer_metadata(
request,
layername,
template='layers/layer_metadata.html',
ajax=True):
layer = _resolve_layer(
request,
layername,
'base.change_resourcebase_metadata',
_PERMISSION_MSG_METADATA)
layer_attribute_set = inlineformset_factory(
Layer,
Attribute,
extra=0,
form=LayerAttributeForm,
)
topic_category = layer.category
poc = layer.poc
metadata_author = layer.metadata_author
# assert False, str(layer_bbox)
config = layer.attribute_config()
# Add required parameters for GXP lazy-loading
layer_bbox = layer.bbox
bbox = [float(coord) for coord in list(layer_bbox[0:4])]
if hasattr(layer, 'srid'):
config['crs'] = {
'type': 'name',
'properties': layer.srid
}
config["srs"] = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857')
config["bbox"] = bbox if config["srs"] != 'EPSG:3857' \
else llbbox_to_mercator([float(coord) for coord in bbox])
config["title"] = layer.title
config["queryable"] = True
if layer.storeType == "remoteStore":
service = layer.remote_service
source_params = {}
if service.type in ('REST_MAP', 'REST_IMG'):
source_params = {
"ptype": service.ptype,
"remote": True,
"url": service.service_url,
"name": service.name,
"title": "[R] %s" % service.title}
maplayer = GXPLayer(
name=layer.alternate,
ows_url=layer.ows_url,
layer_params=json.dumps(config),
source_params=json.dumps(source_params)
)
else:
maplayer = GXPLayer(
name=layer.alternate,
ows_url=layer.ows_url,
layer_params=json.dumps(config))
# Update count for popularity ranking,
# but do not includes admins or resource owners
if request.user != layer.owner and not request.user.is_superuser:
Layer.objects.filter(
id=layer.id).update(popular_count=F('popular_count') + 1)
# center/zoom don't matter; the viewer will center on the layer bounds
map_obj = GXPMap(
projection=getattr(
settings,
'DEFAULT_MAP_CRS',
'EPSG:3857'))
NON_WMS_BASE_LAYERS = [
la for la in default_map_config(request)[1] if la.ows_url is None]
if request.method == "POST":
if layer.metadata_uploaded_preserve: # layer metadata cannot be edited
out = {
'success': False,
'errors': METADATA_UPLOADED_PRESERVE_ERROR
}
return HttpResponse(
json.dumps(out),
content_type='application/json',
status=400)
layer_form = LayerForm(request.POST, instance=layer, prefix="resource")
if not layer_form.is_valid():
out = {
'success': False,
'errors': layer_form.errors
}
return HttpResponse(
json.dumps(out),
content_type='application/json',
status=400)
attribute_form = layer_attribute_set(
request.POST,
instance=layer,
prefix="layer_attribute_set",
queryset=Attribute.objects.order_by('display_order'))
category_form = CategoryForm(request.POST, prefix="category_choice_field", initial=int(
request.POST["category_choice_field"]) if "category_choice_field" in request.POST else None)
tkeywords_form = TKeywordForm(
request.POST,
prefix="tkeywords")
else:
layer_form = LayerForm(instance=layer, prefix="resource")
attribute_form = layer_attribute_set(
instance=layer,
prefix="layer_attribute_set",
queryset=Attribute.objects.order_by('display_order'))
category_form = CategoryForm(
prefix="category_choice_field",
initial=topic_category.id if topic_category else None)
# Keywords from THESAURI management
layer_tkeywords = layer.tkeywords.all()
tkeywords_list = ''
lang = 'en' # TODO: use user's language
if layer_tkeywords and len(layer_tkeywords) > 0:
tkeywords_ids = layer_tkeywords.values_list('id', flat=True)
if hasattr(settings, 'THESAURI'):
for el in settings.THESAURI:
thesaurus_name = el['name']
try:
t = Thesaurus.objects.get(identifier=thesaurus_name)
for tk in t.thesaurus.filter(pk__in=tkeywords_ids):
tkl = tk.keyword.filter(lang=lang)
if len(tkl) > 0:
tkl_ids = ",".join(
map(str, tkl.values_list('id', flat=True)))
tkeywords_list += "," + \
tkl_ids if len(
tkeywords_list) > 0 else tkl_ids
except BaseException:
tb = traceback.format_exc()
logger.error(tb)
tkeywords_form = TKeywordForm(
prefix="tkeywords",
initial={'tkeywords': tkeywords_list})
if request.method == "POST" and layer_form.is_valid() and attribute_form.is_valid(
) and category_form.is_valid() and tkeywords_form.is_valid():
new_poc = layer_form.cleaned_data['poc']
new_author = layer_form.cleaned_data['metadata_author']
if new_poc is None:
if poc is None:
poc_form = ProfileForm(
request.POST,
prefix="poc",
instance=poc)
else:
poc_form = ProfileForm(request.POST, prefix="poc")
if poc_form.is_valid():
if len(poc_form.cleaned_data['profile']) == 0:
# FIXME use form.add_error in django > 1.7
errors = poc_form._errors.setdefault(
'profile', ErrorList())
errors.append(
_('You must set a point of contact for this resource'))
poc = None
if poc_form.has_changed and poc_form.is_valid():
new_poc = poc_form.save()
if new_author is None:
if metadata_author is None:
author_form = ProfileForm(request.POST, prefix="author",
instance=metadata_author)
else:
author_form = ProfileForm(request.POST, prefix="author")
if author_form.is_valid():
if len(author_form.cleaned_data['profile']) == 0:
# FIXME use form.add_error in django > 1.7
errors = author_form._errors.setdefault(
'profile', ErrorList())
errors.append(
_('You must set an author for this resource'))
metadata_author = None
if author_form.has_changed and author_form.is_valid():
new_author = author_form.save()
new_category = TopicCategory.objects.get(
id=category_form.cleaned_data['category_choice_field'])
for form in attribute_form.cleaned_data:
la = Attribute.objects.get(id=int(form['id'].id))
la.description = form["description"]
la.attribute_label = form["attribute_label"]
la.visible = True if form["attribute_label"] else False # form["visible"]
la.display_order = form["display_order"]
la.save()
if new_poc is not None or new_author is not None:
if new_poc is not None:
layer.poc = new_poc
if new_author is not None:
layer.metadata_author = new_author
new_keywords = layer_form.cleaned_data['keywords']
if new_keywords is not None:
layer.keywords.clear()
layer.keywords.add(*new_keywords)
new_regions = [x.strip() for x in layer_form.cleaned_data['regions']]
if new_regions is not None:
layer.regions.clear()
layer.regions.add(*new_regions)
the_layer = layer_form.instance
the_layer.save()
up_sessions = UploadSession.objects.filter(layer=the_layer.id)
if up_sessions.count() > 0 and up_sessions[0].user != the_layer.owner:
up_sessions.update(user=the_layer.owner)
if new_category is not None:
Layer.objects.filter(id=the_layer.id).update(
category=new_category
)
if getattr(settings, 'SLACK_ENABLED', False):
try:
from geonode.contrib.slack.utils import build_slack_message_layer, send_slack_messages
send_slack_messages(
build_slack_message_layer(
"layer_edit", the_layer))
except BaseException:
logger.error("Could not send slack message.")
if not ajax:
return HttpResponseRedirect(
reverse(
'layer_detail',
args=(
layer.service_typename,
)))
message = layer.alternate
try:
# Keywords from THESAURI management
tkeywords_to_add = []
tkeywords_cleaned = tkeywords_form.clean()
if tkeywords_cleaned and len(tkeywords_cleaned) > 0:
tkeywords_ids = []
for i, val in enumerate(tkeywords_cleaned):
try:
cleaned_data = [value for key, value in tkeywords_cleaned[i].items(
) if 'tkeywords-tkeywords' in key.lower() and 'autocomplete' not in key.lower()]
tkeywords_ids.extend(map(int, cleaned_data[0]))
except BaseException:
pass
if hasattr(settings, 'THESAURI'):
for el in settings.THESAURI:
thesaurus_name = el['name']
try:
t = Thesaurus.objects.get(
identifier=thesaurus_name)
for tk in t.thesaurus.all():
tkl = tk.keyword.filter(pk__in=tkeywords_ids)
if len(tkl) > 0:
tkeywords_to_add.append(tkl[0].keyword_id)
except BaseException:
tb = traceback.format_exc()
logger.error(tb)
layer.tkeywords.add(*tkeywords_to_add)
except BaseException:
tb = traceback.format_exc()
logger.error(tb)
return HttpResponse(json.dumps({'message': message}))
if settings.ADMIN_MODERATE_UPLOADS:
if not request.user.is_superuser:
layer_form.fields['is_published'].widget.attrs.update(
{'disabled': 'true'})
can_change_metadata = request.user.has_perm(
'change_resourcebase_metadata',
layer.get_self_resource())
try:
is_manager = request.user.groupmember_set.all().filter(role='manager').exists()
except BaseException:
is_manager = False
if not is_manager or not can_change_metadata:
layer_form.fields['is_approved'].widget.attrs.update(
{'disabled': 'true'})
if poc is not None:
layer_form.fields['poc'].initial = poc.id
poc_form = ProfileForm(prefix="poc")
poc_form.hidden = True
else:
poc_form = ProfileForm(prefix="poc")
poc_form.hidden = False
if metadata_author is not None:
layer_form.fields['metadata_author'].initial = metadata_author.id
author_form = ProfileForm(prefix="author")
author_form.hidden = True
else:
author_form = ProfileForm(prefix="author")
author_form.hidden = False
viewer = json.dumps(map_obj.viewer_json(
request, * (NON_WMS_BASE_LAYERS + [maplayer])))
metadataxsl = False
if "geonode.contrib.metadataxsl" in settings.INSTALLED_APPS:
metadataxsl = True
metadata_author_groups = []
if request.user.is_superuser or request.user.is_staff:
metadata_author_groups = GroupProfile.objects.all()
else:
try:
all_metadata_author_groups = chain(
request.user.group_list_all().distinct(),
GroupProfile.objects.exclude(
access="private").exclude(access="public-invite"))
except BaseException:
all_metadata_author_groups = GroupProfile.objects.exclude(
access="private").exclude(access="public-invite")
[metadata_author_groups.append(item) for item in all_metadata_author_groups
if item not in metadata_author_groups]
return render(request, template, context={
"resource": layer,
"layer": layer,
"layer_form": layer_form,
"poc_form": poc_form,
"author_form": author_form,
"attribute_form": attribute_form,
"category_form": category_form,
"tkeywords_form": tkeywords_form,
"viewer": viewer,
"preview": getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'geoext'),
"crs": getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857'),
"metadataxsl": metadataxsl,
"freetext_readonly": getattr(
settings,
'FREETEXT_KEYWORDS_READONLY',
False),
"metadata_author_groups": metadata_author_groups,
"GROUP_MANDATORY_RESOURCES":
getattr(settings, 'GROUP_MANDATORY_RESOURCES', False),
})
@login_required
def layer_metadata_advanced(request, layername):
return layer_metadata(
request,
layername,
template='layers/layer_metadata_advanced.html')
@login_required
def layer_change_poc(request, ids, template='layers/layer_change_poc.html'):
layers = Layer.objects.filter(id__in=ids.split('_'))
if settings.MONITORING_ENABLED:
for _l in layers:
if hasattr(_l, 'alternate'):
request.add_resource('layer', _l.alternate)
if request.method == 'POST':
form = PocForm(request.POST)
if form.is_valid():
for layer in layers:
layer.poc = form.cleaned_data['contact']
layer.save()
# Process the data in form.cleaned_data
# ...
# Redirect after POST
return HttpResponseRedirect('/admin/maps/layer')
else:
form = PocForm() # An unbound form
return render(
request, template, context={'layers': layers, 'form': form})
@login_required
def layer_replace(request, layername, template='layers/layer_replace.html'):
layer = _resolve_layer(
request,
layername,
'base.change_resourcebase',
_PERMISSION_MSG_MODIFY)
if request.method == 'GET':
ctx = {
'charsets': CHARSETS,
'resource': layer,
'is_featuretype': layer.is_vector(),
'is_layer': True,
}
return render(request, template, context=ctx)
elif request.method == 'POST':
form = LayerUploadForm(request.POST, request.FILES)
tempdir = None
out = {}
if form.is_valid():
try:
tempdir, base_file = form.write_files()
if layer.is_vector() and is_raster(base_file):
out['success'] = False
out['errors'] = _(
"You are attempting to replace a vector layer with a raster.")
elif (not layer.is_vector()) and is_vector(base_file):
out['success'] = False
out['errors'] = _(
"You are attempting to replace a raster layer with a vector.")
else:
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
# delete geoserver's store before upload
# cascading_delete(gs_catalog, layer.alternate)
out['ogc_backend'] = geoserver.BACKEND_PACKAGE
elif check_ogc_backend(qgis_server.BACKEND_PACKAGE):
try:
qgis_layer = QGISServerLayer.objects.get(
layer=layer)
qgis_layer.delete()
except QGISServerLayer.DoesNotExist:
pass
out['ogc_backend'] = qgis_server.BACKEND_PACKAGE
saved_layer = file_upload(
base_file,
title=layer.title,
abstract=layer.abstract,
is_approved=layer.is_approved,
is_published=layer.is_published,
name=layer.name,
user=layer.owner,
# user=request.user,
license=layer.license.name if layer.license else None,
category=layer.category,
keywords=list(layer.keywords.all()),
regions=list(layer.regions.values_list('name', flat=True)),
# date=layer.date,
overwrite=True,
charset=form.cleaned_data["charset"],
)
out['success'] = True
out['url'] = reverse(
'layer_detail', args=[
saved_layer.service_typename])
except BaseException as e:
logger.exception(e)
tb = traceback.format_exc()
out['success'] = False
out['errors'] = str(tb)
finally:
if tempdir is not None:
shutil.rmtree(tempdir)
else:
errormsgs = []
for e in form.errors.values():
errormsgs.append([escape(v) for v in e])
out['errors'] = form.errors
out['errormsgs'] = errormsgs
if out['success']:
status_code = 200
else:
status_code = 400
return HttpResponse(
json.dumps(out),
content_type='application/json',
status=status_code)
@login_required
def layer_remove(request, layername, template='layers/layer_remove.html'):
layer = _resolve_layer(
request,
layername,
'base.delete_resourcebase',
_PERMISSION_MSG_DELETE)
if (request.method == 'GET'):
return render(request, template, context={
"layer": layer
})
if (request.method == 'POST'):
try:
with transaction.atomic():
# Using Tastypie
# from geonode.api.resourcebase_api import LayerResource
# res = LayerResource()
# request_bundle = res.build_bundle(request=request)
# layer_bundle = res.build_bundle(request=request, obj=layer)
# layer_json = res.serialize(None,
# res.full_dehydrate(layer_bundle),
# "application/json")
# delete_layer.delay(instance=layer_json)
result = delete_layer.delay(layer_id=layer.id)
result.wait(10)
except TimeoutError:
# traceback.print_exc()
pass
except Exception as e:
traceback.print_exc()
message = '{0}: {1}.'.format(
_('Unable to delete layer'), layer.alternate)
if 'referenced by layer group' in getattr(e, 'message', ''):
message = _(
'This layer is a member of a layer group, you must remove the layer from the group '
'before deleting.')
messages.error(request, message)
return render(
request, template, context={"layer": layer})
return HttpResponseRedirect(reverse("layer_browse"))
else:
return HttpResponse("Not allowed", status=403)
@login_required
def layer_granule_remove(
request,
granule_id,
layername,
template='layers/layer_granule_remove.html'):
layer = _resolve_layer(
request,
layername,
'base.delete_resourcebase',
_PERMISSION_MSG_DELETE)
if (request.method == 'GET'):
return render(request, template, context={
"granule_id": granule_id,
"layer": layer
})
if (request.method == 'POST'):
try:
cat = gs_catalog
cat._cache.clear()
store = cat.get_store(layer.name)
coverages = cat.mosaic_coverages(store)
cat.mosaic_delete_granule(
coverages['coverages']['coverage'][0]['name'], store, granule_id)
except Exception as e:
traceback.print_exc()
message = '{0}: {1}.'.format(
_('Unable to delete layer'), layer.alternate)
if 'referenced by layer group' in getattr(e, 'message', ''):
message = _(
'This layer is a member of a layer group, you must remove the layer from the group '
'before deleting.')
messages.error(request, message)
return render(
request, template, context={"layer": layer})
return HttpResponseRedirect(
reverse(
'layer_detail', args=(
layer.service_typename,)))
else:
return HttpResponse("Not allowed", status=403)
def layer_thumbnail(request, layername):
if request.method == 'POST':
layer_obj = _resolve_layer(request, layername)
try:
try:
preview = json.loads(request.body).get('preview', None)
except BaseException:
preview = None
if preview and preview == 'react':
format, image = json.loads(
request.body)['image'].split(';base64,')
image = base64.b64decode(image)
else:
image = _render_thumbnail(request.body)
if not image:
return
filename = "layer-%s-thumb.png" % layer_obj.uuid
layer_obj.save_thumbnail(filename, image)
return HttpResponse('Thumbnail saved')
except BaseException:
return HttpResponse(
content='error saving thumbnail',
status=500,
content_type='text/plain'
)
def get_layer(request, layername):
"""Get Layer object as JSON"""
# Function to treat Decimal in json.dumps.
# http://stackoverflow.com/a/16957370/1198772
def decimal_default(obj):
if isinstance(obj, decimal.Decimal):
return float(obj)
raise TypeError
logger.debug('Call get layer')
if request.method == 'GET':
layer_obj = _resolve_layer(request, layername)
logger.debug(layername)
response = {
'typename': layername,
'name': layer_obj.name,
'title': layer_obj.title,
'url': layer_obj.get_tiles_url(),
'bbox_string': layer_obj.bbox_string,
'bbox_x0': layer_obj.bbox_x0,
'bbox_x1': layer_obj.bbox_x1,
'bbox_y0': layer_obj.bbox_y0,
'bbox_y1': layer_obj.bbox_y1,
}
return HttpResponse(json.dumps(
response,
ensure_ascii=False,
default=decimal_default
),
content_type='application/javascript')
def layer_metadata_detail(
request,
layername,
template='layers/layer_metadata_detail.html'):
layer = _resolve_layer(
request,
layername,
'view_resourcebase',
_PERMISSION_MSG_METADATA)
group = None
if layer.group:
try:
group = GroupProfile.objects.get(slug=layer.group.name)
except GroupProfile.DoesNotExist:
group = None
site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
return render(request, template, context={
"resource": layer,
"group": group,
'SITEURL': site_url
})
def layer_metadata_upload(
request,
layername,
template='layers/layer_metadata_upload.html'):
layer = _resolve_layer(
request,
layername,
'base.change_resourcebase',
_PERMISSION_MSG_METADATA)
site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
return render(request, template, context={
"resource": layer,
"layer": layer,
'SITEURL': site_url
})
def layer_sld_upload(
request,
layername,
template='layers/layer_style_upload.html'):
layer = _resolve_layer(
request,
layername,
'base.change_resourcebase',
_PERMISSION_MSG_METADATA)
site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
return render(request, template, context={
"resource": layer,
"layer": layer,
'SITEURL': site_url
})
@login_required
def layer_batch_metadata(request, ids):
return batch_modify(request, ids, 'Layer')
def layer_view_counter(layer_id, viewer):
_l = Layer.objects.get(id=layer_id)
_u = get_user_model().objects.get(username=viewer)
_l.view_count_up(_u, do_local=True)
| gpl-3.0 | 7,374,107,946,159,023,000 | 37.921281 | 116 | 0.550383 | false |
nboley/grit | grit/simulator/reads_simulator.py | 1 | 21238 | """
Copyright (c) 2011-2015 Nathan Boley
This file is part of GRIT.
GRIT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GRIT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GRIT. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import os
import os.path
import numpy
import pickle
import pysam
import math
from random import random
from collections import defaultdict
import tempfile
DEFAULT_QUALITY_SCORE = 'r'
DEFAULT_BASE = 'A'
DEFAULT_FRAG_LENGTH = 150
DEFAULT_READ_LENGTH = 100
DEFAULT_NUM_FRAGS = 100
NUM_NORM_SDS = 4
FREQ_GTF_STRINGS = [ 'freq', 'frac' ]
# add slide dir to sys.path and import frag_len mod
#sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), ".." ))
sys.path.insert(0, "/home/nboley/grit/grit/")
import grit.frag_len as frag_len
from grit.files.gtf import load_gtf
from grit.files.reads import clean_chr_name
def fix_chr_name(x):
return "chr" + clean_chr_name(x)
def get_transcript_sequence(transcript, fasta):
""" get the mRNA sequence of the transcript from the gene seq
"""
trans_seq = []
for start, stop in transcript.exons:
seq = fasta.fetch(fix_chr_name(transcript.chrm), start, stop+1)
trans_seq.append( seq.upper() )
trans_seq = "".join(trans_seq)
return trans_seq
def get_cigar( transcript, start, stop ):
"""loop through introns within the read and add #N to the cigar for each
intron add #M for portions of read which map to exons
"""
def calc_len(interval):
return interval[1]-interval[0]+1
cigar = []
# find the exon index of the start
genome_start = transcript.genome_pos(start)
start_exon = next(i for i, (e_start, e_stop) in enumerate(transcript.exons)
if genome_start >= e_start and genome_start <= e_stop)
genome_stop = transcript.genome_pos(stop-1)
stop_exon = next(i for i, (e_start, e_stop) in enumerate(transcript.exons)
if genome_stop >= e_start and genome_stop <= e_stop)
if start_exon == stop_exon:
return "%iM" % (stop-start)
tl = 0
# add the first overlap match
skipped_bases = sum(calc_len(e) for e in transcript.exons[:start_exon+1])
cigar.append("%iM" % (skipped_bases-start))
tl += skipped_bases-start
# add the first overlap intron
cigar.append("%iN" % calc_len(transcript.introns[start_exon]))
# add the internal exon and intron matches
for i in xrange(start_exon+1, stop_exon):
cigar.append("%iM" % calc_len(transcript.exons[i]))
cigar.append("%iN" % calc_len(transcript.introns[i]))
tl += calc_len(transcript.exons[i])
# add the last overlap match
skipped_bases = sum(e[1]-e[0]+1 for e in transcript.exons[:stop_exon])
cigar.append("%iM" % (stop-skipped_bases))
tl += stop - skipped_bases
assert tl == (stop-start)
return "".join(cigar)
def build_sam_line( transcript, read_len, offset, read_identifier, quality_string ):
"""build a single ended SAM formatted line with given inforamtion
"""
# set flag to indcate strandedness of read matching that of the transcript
flag = 0
if transcript.strand == '+': flag += 16
# adjust start position to correct genomic position
start = transcript.genome_pos(offset)
# set cigar string corresponding to transcript and read offset
cigar = get_cigar( transcript, offset, (offset + read_len) )
# calculate insert size by difference of genomic offset and genomic offset+read_len
insert_size = transcript.genome_pos(offset+read_len) - transcript.genome_pos(offset)
# get slice of seq from transcript
seq = ( transcript.seq[ offset : (offset + read_len) ]
if transcript.seq != None else '*' )
# initialize sam lines with read identifiers and then add appropriate fields
sam_line = '\t'.join( (
read_identifier, str( flag ), fix_chr_name(transcript.chrm),
str(start+1),
'255', cigar, "*", '0', str( insert_size ), seq, quality_string,
"NM:i:0", "NH:i:1" ) ) + "\n"
return sam_line
def build_sam_lines( transcript, read_len, frag_len, offset,
read_identifier, read_quals ):
"""build paired end SAM formatted lines with given information
"""
# set ordered quals and reverse the qualities for the read on the negative strand
ordered_quals = read_quals
# determine whether read1 should be the 5' read or visa verses
# and initialize attributes that are specific to a read number
# instead of 5' or 3' attribute
if transcript.strand == '+':
up_strm_read, dn_strm_read = (0, 1)
flag = [ 99, 147 ]
ordered_quals[1] = ordered_quals[1][::-1]
else:
up_strm_read, dn_strm_read = (1, 0)
flag = [ 83, 163 ]
ordered_quals[0] = ordered_quals[0][::-1]
# get slice of seq from transcript
seq = ['*', '*']
if transcript.seq != None:
seq[ up_strm_read ] = transcript.seq[offset:(offset + read_len)]
seq[ dn_strm_read ] = transcript.seq[
(offset + frag_len - read_len):(offset + frag_len)]
# adjust five and three prime read start positions to correct genomic positions
start = [ transcript.start, transcript.start ]
start[ up_strm_read ] = transcript.genome_pos(offset)
start[ dn_strm_read ] = transcript.genome_pos(offset + frag_len - read_len)
# set cigar string for five and three prime reads
cigar = [ None, None ]
cigar[ up_strm_read ] = get_cigar( transcript, offset, (offset+read_len) )
cigar[ dn_strm_read ] = get_cigar(
transcript, (offset+frag_len-read_len), (offset + frag_len))
# calculate insert size by difference of the mapped start and end
insert_size = (
transcript.genome_pos(offset+read_len) - transcript.genome_pos(offset))
insert_size = [ insert_size, insert_size ]
insert_size[ dn_strm_read ] *= -1
# initialize sam lines with read identifiers and then add appropriate fields
sam_lines = [ read_identifier + '\t', read_identifier + '\t' ]
for i in (0,1):
other_i = 0 if i else 1
sam_lines[i] += '\t'.join( (
str( flag[i] ), fix_chr_name(transcript.chrm),
str( start[i]+1 ),"255",
cigar[i], "=", str( start[other_i]+1 ), str( insert_size[i] ),
seq[i], ordered_quals[i], "NM:i:0", "NH:i:1" ) ) + "\n"
return sam_lines
def write_fastq_lines( fp1, fp2, transcript, read_len, frag_len, offset,
read_identifier ):
"""STUB for writing fastq lines to running through alignment pipeline
"""
pass
def simulate_reads( genes, fl_dist, fasta, quals, num_frags, single_end,
full_fragment, read_len, assay='RNAseq'):
"""write a SAM format file with the specified options
"""
# global variable that stores the current read number, we use this to
# generate a unique id for each read.
global curr_read_index
curr_read_index = 1
def sample_fragment_length( fl_dist, transcript ):
"""Choose a random fragment length from fl_dist
"""
if assay == 'CAGE':
return read_len
# if the fl_dist is constant
if isinstance( fl_dist, int ):
assert fl_dist <= transcript.calc_length(), 'Transcript which ' + \
'cannot contain a valid fragment was included in transcripts.'
return fl_dist
# Choose a valid fragment length from the distribution
while True:
fl_index = fl_dist.fl_density_cumsum.searchsorted( random() ) - 1
fl = fl_index + fl_dist.fl_min
# if fragment_length is valid return it
if fl <= transcript.calc_length():
return fl
assert False
def sample_read_offset( transcript, fl ):
# calculate maximum offset
max_offset = transcript.calc_length() - fl
if assay in ('CAGE', 'RAMPAGE'):
if transcript.strand == '+': return 0
else: return max_offset
elif assay == 'RNAseq':
return int( random() * max_offset )
elif assay == 'PASseq':
if transcript.strand == '-': return 0
else: return max_offset
def get_random_qual_score( read_len ):
# if no quality score were provided
if not quals:
return DEFAULT_QUALITY_SCORE * read_len
# else return quality string from input quality file
# scores are concatenated to match read_len if necessary
else:
qual_string = ''
while len( qual_string ) < read_len:
qual_string += str( quals[ int(random() * len(quals) ) ] )
return qual_string[0:read_len]
def get_random_read_pos( transcript ):
while True:
# find a valid fragment length
fl = sample_fragment_length( fl_dist, transcript )
if (fl >= read_len) or full_fragment: break
# find a valid random read start position
offset = sample_read_offset( transcript, fl )
# get a unique string for this fragment
global curr_read_index
read_identifier = 'SIM:%015d:%s' % (curr_read_index, transcript.id)
curr_read_index += 1
return fl, offset, read_identifier
def build_random_sam_line( transcript, read_len ):
"""build a random single ended sam line
"""
fl, offset, read_identifier = get_random_read_pos( transcript )
if full_fragment:
read_len = fl
# get a random quality scores
if transcript.seq == None:
read_qual = '*'
else:
read_qual = get_random_qual_score( read_len )
# build the sam lines
return build_sam_line(
transcript, read_len, offset, read_identifier, read_qual )
def build_random_sam_lines( transcript, read_len ):
"""build random paired end sam lines
"""
fl, offset, read_identifier = get_random_read_pos( transcript )
# adjust read length so that paired end read covers the entire fragment
if full_fragment:
read_len = int( math.ceil( fl / float(2) ) )
# get two random quality scores
if transcript.seq == None:
read_quals = ['*', '*']
else:
read_quals = [ get_random_qual_score( read_len ),
get_random_qual_score( read_len ) ]
sam_lines = build_sam_lines(
transcript, read_len, fl, offset, read_identifier, read_quals )
return sam_lines
def get_fl_min():
if isinstance( fl_dist, int ):
return fl_dist
else:
return fl_dist.fl_min
def calc_scale_factor(t):
if assay in ('RNAseq',):
length = t.calc_length()
if length < fl_dist.fl_min: return 0
fl_min, fl_max = fl_dist.fl_min, min(length, fl_dist.fl_max)
allowed_fl_lens = numpy.arange(fl_min, fl_max+1)
weights = fl_dist.fl_density[
fl_min-fl_dist.fl_min:fl_max-fl_dist.fl_min+1]
mean_fl_len = float((allowed_fl_lens*weights).sum())
return length - mean_fl_len
elif assay in ('CAGE', 'RAMPAGE', 'PASseq'):
return 1.0
# initialize the transcript objects, and calculate their relative weights
transcript_weights = []
transcripts = []
contig_lens = defaultdict(int)
min_transcript_length = get_fl_min()
for gene in genes:
contig_lens[fix_chr_name(gene.chrm)] = max(
gene.stop+1000, contig_lens[fix_chr_name(gene.chrm)])
for transcript in gene.transcripts:
if fasta != None:
transcript.seq = get_transcript_sequence(transcript, fasta)
else:
transcript.seq = None
if transcript.fpkm != None:
weight = transcript.fpkm*calc_scale_factor(transcript)
elif transcript.frac != None:
assert len(genes) == 1
weight = transcript.frac
else:
weight = 1./len(gene.transcripts)
#assert False, "Transcript has neither an FPKM nor a frac"
transcripts.append( transcript )
transcript_weights.append( weight )
#assert False
assert len( transcripts ) > 0, "No valid trancripts."
# normalize the transcript weights to be on 0,1
transcript_weights = numpy.array(transcript_weights, dtype=float)
transcript_weights = transcript_weights/transcript_weights.sum()
transcript_weights_cumsum = transcript_weights.cumsum()
# update the contig lens from the fasta file, if available
if fasta != None:
for name, length in zip(fasta.references, fasta.lengths):
if fix_chr_name(name) in contig_lens:
contig_lens[fix_chr_name(name)] = max(
length, contig_lens[name])
# create the output directory
bam_prefix = assay + ".sorted"
with tempfile.NamedTemporaryFile( mode='w+' ) as sam_fp:
# write out the header
for contig, contig_len in contig_lens.iteritems():
data = ["@SQ", "SN:%s" % contig, "LN:%i" % contig_len]
sam_fp.write("\t".join(data) + "\n")
while curr_read_index <= num_frags:
# pick a transcript to randomly take a read from. Note that they
# should be chosen in proportion to the *expected number of reads*,
# not their relative frequencies.
transcript_index = \
transcript_weights_cumsum.searchsorted( random(), side='left' )
transcript = transcripts[ transcript_index ]
if single_end:
sam_line_s = build_random_sam_line( transcript, read_len )
else:
sam_line_s = build_random_sam_lines( transcript, read_len )
sam_fp.writelines( sam_line_s )
# create sorted bam file and index it
sam_fp.flush()
#sam_fp.seek(0)
#print sam_fp.read()
call = 'samtools view -bS {} | samtools sort - {}'
os.system( call.format( sam_fp.name, bam_prefix ) )
os.system( 'samtools index {}.bam'.format( bam_prefix ) )
return
def build_objs( gtf_fp, fl_dist_const,
fl_dist_norm, full_fragment,
read_len, fasta_fn, qual_fn ):
genes = load_gtf( gtf_fp )
gtf_fp.close()
def build_normal_fl_dist( fl_mean, fl_sd ):
fl_min = max( 0, fl_mean - (fl_sd * NUM_NORM_SDS) )
fl_max = fl_mean + (fl_sd * NUM_NORM_SDS)
fl_dist = frag_len.build_normal_density( fl_min, fl_max, fl_mean, fl_sd )
return fl_dist
if fl_dist_norm:
fl_dist = build_normal_fl_dist( fl_dist_norm[0], fl_dist_norm[1] )
assert fl_dist.fl_max > read_len or full_fragment, \
'Invalid fragment length distribution and read length!!!'
else:
assert read_len < fl_dist_const or full_fragment, \
'Invalid read length and constant fragment length!!!'
fl_dist = fl_dist_const
if fasta_fn:
# create indexed fasta file handle object with pysam
fasta = pysam.Fastafile( fasta_fn )
else:
fasta = None
# if quals_fn is None, quals remains empty and reads will default to
# all base qualities of DEFAULT_BASE_QUALITY_SCORE
quals = []
if qual_fn:
with open( quals_fn ) as quals_fp:
for line in quals_fp:
quals.append( line.strip() )
quals = numpy.array( quals )
return genes, fl_dist, fasta, quals
def parse_arguments():
import argparse
parser = argparse.ArgumentParser(\
description='Produce simulated reads in a perfecty aligned BAM file.' )
# gtf is the only required argument
parser.add_argument( 'gtf', type=file, \
help='GTF file from which to produce simulated reads ' + \
'(Note: Only the first trascript from this file will ' + \
'be simulated)' )
parser.add_argument(
'--assay', choices=['RNAseq', 'RAMPAGE', 'CAGE', 'PASseq'],
default='RNAseq', help='Which assay type to simulate from' )
# fragment length distribution options
parser.add_argument( '--fl-dist-const', type=int, default=DEFAULT_FRAG_LENGTH, \
help='Constant length fragments. (default: ' + \
'%(default)s)' )
parser.add_argument( '--fl-dist-norm', \
help='Mean and standard deviation (format "mn:sd") ' + \
'used to create normally distributed fragment lengths.' )
# files providing quality and sequnce information
parser.add_argument( '--fasta', '-f', \
help='Fasta file from which to create reads ' + \
'(default: all sequences are "' + DEFAULT_BASE + \
'" * length of sequence)' )
parser.add_argument( '--quality', '-q', \
help='Flat file containing one FASTQ quality score ' + \
'per line, created with get_quals.sh. (default: ' + \
'quality strings are "' + str(DEFAULT_QUALITY_SCORE) + \
'" * length of sequence.)' )
# type and number of fragments requested
parser.add_argument(
'--num-frags', '-n', type=int, default=1000,
help='Total number of fragments to create across all trascripts')
parser.add_argument('--single-end', action='store_true', default=False,
help='Produce single-end reads.' )
parser.add_argument('--paired-end', dest='single_end', action='store_false',
help='Produce paired-end reads. (default)' )
# XXX not sure if this works
#parser.add_argument(
# '--full-fragment', action='store_true', default=False,
# help='Produce reads spanning the entire fragment.')
parser.add_argument( '--read-len', '-r', type=int, default=DEFAULT_READ_LENGTH, \
help='Length of reads to produce in base pairs ' + \
'(default: %(default)s)' )
# output options
parser.add_argument( '--out_prefix', '-o', default='simulated_reads', \
help='Prefix for output FASTQ/BAM file ' + \
'(default: %(default)s)' )
parser.add_argument( '--verbose', '-v', default=False, action='store_true', \
help='Print status information.' )
args = parser.parse_args()
# set to false, but we may want to bring this option back
args.full_fragment = False
global VERBOSE
VERBOSE = args.verbose
if args.assay == 'CAGE':
args.read_len = 28
args.single_end = True
# parse normal distribution argument
if args.fl_dist_norm:
try:
mean, sd = args.fl_dist_norm.split( ':' )
args.fl_dist_norm = [ int( mean ), int( sd ) ]
except ValueError:
args.fl_dist_norm = None
print >> sys.stderr, \
"WARNING: User input mean and sd are not formatted correctly.\n"+\
"\tUsing default values.\n"
return ( args.gtf, args.fl_dist_const, args.fl_dist_norm,
args.fasta, args.quality, args.num_frags,
args.single_end, args.full_fragment,
args.read_len, args.out_prefix, args.assay )
def main():
( gtf_fp, fl_dist_const, fl_dist_norm, fasta_fn, qual_fn,
num_frags, single_end, full_fragment, read_len, out_prefix, assay )\
= parse_arguments()
try: os.mkdir(out_prefix)
except OSError:
ofname = os.path.join(out_prefix, assay + '.sorted.bam')
if os.path.isfile(ofname):
raise OSError, "File '%s' already exists" % ofname
os.chdir(out_prefix)
genes, fl_dist, fasta, quals = build_objs(
gtf_fp, fl_dist_const,
fl_dist_norm, full_fragment, read_len,
fasta_fn, qual_fn )
"""
for gene in genes:
for t in gene.transcripts:
t.chrm = "chr" + t.chrm
print t.build_gtf_lines(gene.id, {})
assert False
"""
simulate_reads( genes, fl_dist, fasta, quals, num_frags, single_end,
full_fragment, read_len, assay=assay )
if __name__ == "__main__":
main()
| gpl-3.0 | -4,307,114,510,354,580,500 | 37.33574 | 88 | 0.584377 | false |
Netflix-Skunkworks/iep-apps | atlas-slotting/src/scripts/lift-data.py | 1 | 4221 | #!/usr/bin/env python3
# Copyright 2014-2019 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import gzip
import json
import pprint
from argparse import Namespace
from datetime import datetime
from typing import Dict, List
import boto3
import requests
import sys
from boto3.dynamodb.types import Binary
from botocore.exceptions import ClientError, ProfileNotFound
def parse_args() -> Namespace:
parser = argparse.ArgumentParser(description='Lift slotting data from Edda into DynamoDB')
parser.add_argument('--profile', type=str, required=True,
help='AWS credentials profile used to write to the Atlas Slotting DynamoDB table')
parser.add_argument('--region', type=str, nargs='+', required=True,
choices=['eu-west-1', 'us-east-1', 'us-west-1', 'us-west-2'],
help='List of AWS regions where data will be lifted from Edda into DynamoDB')
parser.add_argument('--edda_name', type=str, required=True,
help='Edda DNS name, with a region placeholder, where data will be read')
parser.add_argument('--slotting_table', type=str, required=True,
help='Atlas Slotting DynamoDB table name, where data will be written')
parser.add_argument('--app_name', type=str, nargs='+', required=True,
help='List of application names that will be lifted')
parser.add_argument('--dryrun', action='store_true', required=False, default=False,
help='Enable dryrun mode, to preview changes')
return parser.parse_args()
def get_edda_data(args: Namespace, region: str) -> List[Dict]:
url = f'http://{args.edda_name.format(region)}/api/v2/group/autoScalingGroups;_expand'
r = requests.get(url)
if not r.ok:
print(f'ERROR: Failed to load Edda data from {url}')
sys.exit(1)
else:
return [asg for asg in r.json() if asg['name'].split('-')[0] in args.app_name]
def get_ddb_table(args: Namespace, region: str):
try:
session = boto3.session.Session(profile_name=args.profile)
except ProfileNotFound:
print(f'ERROR: AWS profile {args.profile} does not exist')
sys.exit(1)
dynamodb = session.resource('dynamodb', region_name=region)
table = dynamodb.Table(args.slotting_table)
try:
table.table_status
except ClientError as e:
code = e.response['Error']['Code']
if code == 'ExpiredTokenException':
print(f'ERROR: Security token in AWS profile {args.profile} has expired')
elif code == 'ResourceNotFoundException':
print(f'ERROR: Table {args.slotting_table} does not exist in {region}')
else:
pprint.pprint(e.response)
sys.exit(1)
return table
def lift_data(args: Namespace, region: str):
asgs = get_edda_data(args, region)
table = get_ddb_table(args, region)
for asg in asgs:
item = {
'name': asg['name'],
'active': True,
'data': Binary(gzip.compress(bytes(json.dumps(asg), encoding='utf-8'))),
'timestamp': int(datetime.utcnow().timestamp() * 1000)
}
if args.dryrun:
print(f'DRYRUN: PUT {asg["name"]}')
else:
print(f'PUT {asg["name"]}')
table.put_item(Item=item)
def main():
args = parse_args()
print('==== config ====')
print(f'AWS Profile: {args.profile}')
print(f'Source Edda: {args.edda_name}')
print(f'Destination Table: {args.slotting_table}')
for region in args.region:
print(f'==== {region} ====')
lift_data(args, region)
if __name__ == "__main__":
main()
| apache-2.0 | -9,102,378,163,587,709,000 | 34.175 | 106 | 0.637764 | false |
nemonik/CoCreateLite | ccl-cookbook/files/default/cocreatelite/cocreate/views/playgrounds.py | 1 | 5229 | from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.core.urlresolvers import reverse
from ..models import VMPlayground
from ..forms import VMPlaygroundForm, VMPlaygroundDescriptionForm, VMPlaygroundUserAccessForm, VMPlaygroundGroupAccessForm
from . import util
from ..util import single_user_mode
"""
View controllers for playground data
"""
@single_user_mode
def index(request):
"""
Show the list of playgrounds for this user.
"""
# determine all of the playgrounds this user has access to
groupids = [group.id for group in request.user.groups.all()]
print ("Group ids: " + str(groupids))
playgrounds = VMPlayground.objects.filter(creator = request.user) | VMPlayground.objects.filter(access_users__id = request.user.id) | VMPlayground.objects.filter(access_groups__id__in = groupids)
# determine all of the demo boxes from a set of playgrounds
demos = []
for playground in playgrounds:
demos = demos + playground.getDemos()
context = {
"playgrounds": playgrounds,
"demos": demos
}
return render(request, "playgrounds.html", util.fillContext(context, request))
@single_user_mode
def add(request):
"""
Add a new playground.
"""
if request.method == 'GET':
form = VMPlaygroundForm()
elif request.method == 'POST':
form = VMPlaygroundForm(request.POST)
if form.is_valid():
# hooray, let's create the playground
playground = VMPlayground.objects.create(
name = form.data['name'],
creator = request.user,
description = form.data['description'],
description_is_markdown = form.data.get('description_is_markdown', False),
environment = form.data['environment'],
)
playground.save()
return HttpResponseRedirect(reverse("playground", args=[playground.id]))
else:
pass
opts = {"form": form}
return render(request, "addPlayground.html", util.fillContext(opts, request))
@single_user_mode
def remove(request, playground_id):
"""
Remove a playground.
"""
playground = get_object_or_404(VMPlayground, pk = playground_id)
for sandbox in playground.sandboxes.all():
sandox.delete()
playground.delete()
return HttpResponseRedirect(reverse("playgrounds"))
@single_user_mode
def playground(request, playground_id):
"""
Show the details for this playground.
"""
playground = get_object_or_404(VMPlayground, pk = playground_id)
opts = {"playground": playground}
return render(request, "newPlaygroundDetails.html", util.fillContext(opts, request))
@single_user_mode
def alterUserAccess(request, playground_id):
"""
Alter the access control list for a playground.
"""
playground = get_object_or_404(VMPlayground, pk = playground_id)
if request.method == 'GET':
form = VMPlaygroundUserAccessForm(instance = playground)
elif request.method == 'POST':
form = VMPlaygroundUserAccessForm(request.POST, instance=playground)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("playground", args=[playground.id]))
else:
pass
opts = {"form": form, "playground": playground }
return render(request, "alterPlaygroundUserAccess.html", util.fillContext(opts, request))
@single_user_mode
def alterGroupAccess(request, playground_id):
"""
Alter the access control list for a playground.
"""
playground = get_object_or_404(VMPlayground, pk = playground_id)
if request.method == 'GET':
form = VMPlaygroundGroupAccessForm(instance = playground)
elif request.method == 'POST':
form = VMPlaygroundGroupAccessForm(request.POST, instance=playground)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("playground", args=[playground.id]))
else:
pass
opts = {"form": form, "playground": playground }
return render(request, "alterPlaygroundGroupAccess.html", util.fillContext(opts, request))
@single_user_mode
def editDesc(request, playground_id):
"""
Alter or edit the description of the playground
"""
playground = get_object_or_404(VMPlayground, pk = playground_id)
if request.method == 'GET':
form = VMPlaygroundDescriptionForm(instance = playground)
elif request.method == 'POST':
form = VMPlaygroundDescriptionForm(request.POST)
if form.is_valid():
playground.description_is_markdown = form.data['description_is_markdown']
playground.description = form.data['description']
playground.save()
return HttpResponseRedirect(reverse("playground", args=[playground.id]))
else:
pass
opts = {"form": form, "playground": playground }
return render(request, "editPlaygroundDesc.html", util.fillContext(opts, request))
| bsd-3-clause | 5,452,664,651,074,782,000 | 32.954545 | 199 | 0.643526 | false |
roscopecoltran/scraper | .staging/meta-engines/xlinkBook/update/spider.py | 1 | 7851 | #!/usr/bin/env python
#author: wowdd1
#mail: [email protected]
#data: 2014.12.09
import requests
import json
from bs4 import BeautifulSoup;
import os,sys
import time
import re
from all_subject import subject_dict, need_update_subject_list
reload(sys)
sys.setdefaultencoding("utf-8")
sys.path.append("..")
from record import Category
class Spider:
google = None
baidu = None
bing = None
yahoo = None
db_dir = None
zh_re = None
shcool = None
subject = None
url = None
count = None
deep_mind = None
category = ''
category_obj = None
proxies = {
"http": "http://127.0.0.1:8087",
"https": "http://127.0.0.1:8087",
}
proxies2 = {
"http": "http://127.0.0.1:8787",
"https": "http://127.0.0.1:8787",
}
def __init__(self):
self.google = "https://www.google.com.hk/?gws_rd=cr,ssl#safe=strict&q="
self.baidu = "http://www.baidu.com/s?word="
self.bing = "http://cn.bing.com/search?q=a+b&go=Submit&qs=n&form=QBLH&pq="
self.yahoo = "https://search.yahoo.com/search;_ylt=Atkyc2y9pQQo09zbTUWM4CWbvZx4?p="
self.db_dir = os.path.abspath('.') + "/../" + "db/"
self.zh_re=re.compile(u"[\u4e00-\u9fa5]+")
self.school = None
self.subject = None
self.url = None
self.count = 0
self.deep_mind = False
self.category_obj = Category()
def doWork(self):
return
def requestWithProxy(self, url):
return requests.get(url, proxies=self.proxies, verify=False)
def requestWithProxy2(self, url):
return requests.get(url, proxies=self.proxies2, verify=False)
def format_subject(self, subject):
match_list = []
for (k, v) in subject_dict.items():
if subject.find('/') != -1 and subject.lower()[0:subject.find('/')].strip().find(k.lower()) != -1:
match_list.append(k)
elif subject.find('/') == -1 and subject.lower().strip().find(k.lower()) != -1:
match_list.append(k)
result = subject
if len(match_list) > 1:
max_len = 0
for key in match_list:
if key.lower() == subject[0: subject.find(' ')].lower().strip():
result = subject_dict[key]
break
if len(key) > max_len:
max_len = len(key)
result = subject_dict[key]
elif len(match_list) == 1:
#print subject_dict[match_list[0]]
result = subject_dict[match_list[0]]
#print subject
if result != subject and subject.find('/') != -1:
last_index = 0
while subject.find('/', last_index + 1) != -1:
last_index = subject.find('/', last_index + 1)
return result + subject[subject.find('/') : last_index + 1]
elif result != subject:
return result + "/"
else:
if subject.strip()[len(subject) - 1 : ] != '/':
return subject + "/"
else:
return subject
def need_update_subject(self, subject):
subject_converted = self.format_subject(subject)
if subject_converted[len(subject_converted) - 1 : ] == '/':
subject_converted = subject_converted[0 : len(subject_converted) - 1]
for item in need_update_subject_list:
if subject_converted.find(item) != -1:
return True
print subject + " not config in all_subject.py, ignore it"
return False
def replace_sp_char(self, text):
while text.find('/') != -1:
text = text[text.find('/') + 1 : ]
return text.replace(",","").replace("&","").replace(":","").replace("-"," ").replace(" "," ").replace(" ","-").lower()
def get_file_name(self, subject, school):
dir_name = self.format_subject(subject)
return self.db_dir + dir_name + self.replace_sp_char(subject) + "-" + school + time.strftime("%Y")
def create_dir_by_file_name(self, file_name):
if os.path.exists(file_name) == False:
index = 0
for i in range(0, len(file_name)):
if file_name[i] == "/":
index = i
if index > 0:
if os.path.exists(file_name[0:index]) == False:
print "creating " + file_name[0:index] + " dir"
os.makedirs(file_name[0:index])
def open_db(self, file_name, append=False):
self.create_dir_by_file_name(file_name)
flag = 'w'
if append:
flag = 'a'
try:
f = open(file_name, flag)
except IOError, err:
print str(err)
return f
def do_upgrade_db(self, file_name):
tmp_file = file_name + ".tmp"
if os.path.exists(file_name) and os.path.exists(tmp_file):
print "upgrading..."
#os.system("diff -y --suppress-common-lines -EbwBi " + file_name + " " + file_name + ".tmp " + "| colordiff")
#print "remove " + file_name[file_name.find("db"):]
os.remove(file_name)
#print "rename " + file_name[file_name.find("db"):] + ".tmp"
os.rename(tmp_file, file_name)
print "upgrade done"
elif os.path.exists(tmp_file):
print "upgrading..."
#print "rename " + file_name[file_name.find("db"):] + ".tmp"
os.rename(tmp_file, file_name)
print "upgrade done"
else:
print "upgrade error"
def cancel_upgrade(self, file_name):
if os.path.exists(file_name + ".tmp"):
os.remove(file_name + ".tmp")
def close_db(self, f):
f.close()
def write_db(self, f, course_num, course_name, url, describe=""):
#if url == "":
# url = self.google + course_num + " " + course_name
if self.category != '' and describe.find('category:') == -1:
describe += ' category:' + self.category
f.write(course_num.strip() + " | " + course_name.replace("|","") + " | " + url + " | " + describe + "\n")
def get_storage_format(self,course_num, course_name, url, describe=""):
if url == "":
url = self.google + course_num + " " + course_name
return course_num.strip() + " | " + course_name.replace("|","") + " | " + url + " | " + describe
def countFileLineNum(self, file_name):
if os.path.exists(file_name):
line_count = len(open(file_name,'rU').readlines())
return line_count
return 0
def truncateUrlData(self, dir_name):
print "truncateUrlData ...."
self.create_dir_by_file_name(get_url_file_name(dir_name))
f = open(get_url_file_name(dir_name), "w+")
f.truncate()
f.close
def delZh(self, text):
if isinstance(text, unicode):
list_u = self.zh_re.findall(text)
if len(list_u) > 0 :
last_ele = list_u[len(list_u) - 1]
last_pos = text.find(last_ele)
first_pos = text.find(list_u[0])
title = ""
if first_pos == 0:
title = text[last_pos + len(last_ele):]
else:
title = text[0:first_pos] + text[last_pos + len(last_ele):].strip()
if title.find("|") != -1:
title = title.replace("|", "").strip()
return title
return text
def getKeyValue(self, option):
value_pos = option.find("value=") + 7
return option[value_pos : option.find('"', value_pos)], option[option.find(">") + 1 : option.find("</", 2)].replace("&", "").replace("\n", "").strip()
| mit | -5,365,539,054,781,821,000 | 34.524887 | 162 | 0.515858 | false |
MuckRock/muckrock | muckrock/organization/tests/test_models.py | 1 | 8505 | """
Tests the models of the organization application
"""
# Django
from django.test import TestCase
# Standard Library
from datetime import date
# Third Party
from nose.tools import assert_false, assert_raises, assert_true, eq_
# MuckRock
from muckrock.core.factories import UserFactory
from muckrock.foia.exceptions import InsufficientRequestsError
from muckrock.organization.factories import (
EntitlementFactory,
FreeEntitlementFactory,
MembershipFactory,
OrganizationEntitlementFactory,
OrganizationFactory,
)
class TestOrganization(TestCase):
"""Tests for Organization methods"""
def test_has_member(self):
"""Test has_member method"""
org = OrganizationFactory()
users = UserFactory.create_batch(2)
MembershipFactory(user=users[0], organization=org)
assert_true(org.has_member(users[0]))
assert_false(org.has_member(users[1]))
def test_has_admin(self):
"""Test has_admin method"""
org = OrganizationFactory()
users = UserFactory.create_batch(2)
MembershipFactory(user=users[0], organization=org, admin=True)
MembershipFactory(user=users[1], organization=org, admin=False)
assert_true(org.has_admin(users[0]))
assert_false(org.has_admin(users[1]))
def test_make_requests(self):
"""Test Org make_requests method"""
org = OrganizationFactory(monthly_requests=10, number_requests=10)
request_count = org.make_requests(5)
org.refresh_from_db()
eq_(request_count, {"monthly": 5, "regular": 0})
eq_(org.monthly_requests, 5)
eq_(org.number_requests, 10)
request_count = org.make_requests(10)
org.refresh_from_db()
eq_(request_count, {"monthly": 5, "regular": 5})
eq_(org.monthly_requests, 0)
eq_(org.number_requests, 5)
request_count = org.make_requests(4)
org.refresh_from_db()
eq_(request_count, {"monthly": 0, "regular": 4})
eq_(org.monthly_requests, 0)
eq_(org.number_requests, 1)
with assert_raises(InsufficientRequestsError):
request_count = org.make_requests(2)
org.refresh_from_db()
eq_(org.monthly_requests, 0)
eq_(org.number_requests, 1)
def ent_json(entitlement, date_update):
"""Helper function for serializing entitlement data"""
return {
"name": entitlement.name,
"slug": entitlement.slug,
"description": entitlement.description,
"resources": entitlement.resources,
"date_update": date_update,
}
class TestSquareletUpdateData(TestCase):
"""Test cases for updating organization data from squarelet"""
def test_create_subscription(self):
"""Create a new subscription"""
ent = OrganizationEntitlementFactory()
organization = OrganizationFactory()
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, date(2019, 2, 21))],
"max_users": 5,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 50)
eq_(organization.monthly_requests, 50)
def test_cancel_subscription(self):
"""Cancel a subscription"""
ent = FreeEntitlementFactory()
organization = OrganizationFactory(
entitlement=OrganizationEntitlementFactory(),
date_update=date(2019, 2, 21),
requests_per_month=50,
monthly_requests=33,
)
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, None)],
"max_users": 5,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 0)
eq_(organization.monthly_requests, 0)
def test_upgrade_subscription(self):
"""Upgrade a subscription"""
ent = EntitlementFactory(
name="Plus", resources=dict(minimum_users=5, base_requests=100)
)
organization = OrganizationFactory(
entitlement=OrganizationEntitlementFactory(),
date_update=date(2019, 2, 21),
requests_per_month=50,
monthly_requests=33,
)
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, date(2019, 2, 21))],
"max_users": 5,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 100)
eq_(organization.monthly_requests, 83)
def test_downgrade_subscription(self):
"""Downgrade a subscription"""
# Downgrades only happen at monthly restore
ent = OrganizationEntitlementFactory()
plus = EntitlementFactory(
name="Plus", resources=dict(minimum_users=5, base_requests=100)
)
organization = OrganizationFactory(
entitlement=plus,
date_update=date(2019, 2, 21),
requests_per_month=100,
monthly_requests=83,
)
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, date(2019, 3, 21))],
"max_users": 5,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 50)
eq_(organization.monthly_requests, 50)
def test_increase_max_users(self):
"""Increase max users"""
ent = OrganizationEntitlementFactory()
organization = OrganizationFactory(
entitlement=ent,
date_update=date(2019, 2, 21),
requests_per_month=50,
monthly_requests=33,
)
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, date(2019, 2, 21))],
"max_users": 9,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 70)
eq_(organization.monthly_requests, 53)
def test_decrease_max_users(self):
"""Decrease max users"""
ent = OrganizationEntitlementFactory()
organization = OrganizationFactory(
entitlement=ent,
date_update=date(2019, 2, 21),
requests_per_month=75,
monthly_requests=33,
)
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, date(2019, 2, 21))],
"max_users": 7,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 60)
eq_(organization.monthly_requests, 33)
def test_monthly_restore(self):
"""Monthly restore"""
ent = OrganizationEntitlementFactory()
organization = OrganizationFactory(
entitlement=ent,
date_update=date(2019, 2, 21),
requests_per_month=50,
monthly_requests=33,
)
organization.update_data(
{
"name": organization.name,
"slug": organization.slug,
"individual": False,
"private": False,
"entitlements": [ent_json(ent, date(2019, 3, 21))],
"max_users": 5,
"card": "",
}
)
organization.refresh_from_db()
eq_(organization.requests_per_month, 50)
eq_(organization.monthly_requests, 50)
| agpl-3.0 | -5,786,665,629,773,698,000 | 31.965116 | 75 | 0.554968 | false |
Grokzen/redisco | redisco/containers.py | 1 | 34087 | # -*- coding: utf-8 -*-
# doctest: +ELLIPSIS
import collections
from . import default_expire_time
def _parse_values(values):
(_values,) = values if len(values) == 1 else (None,)
if _values and type(_values) == type([]):
return _values
return values
class Container(object):
"""
Base class for all containers. This class should not
be used and does not provide anything except the ``db``
member.
:members:
"""
def __init__(self, key, db=None, pipeline=None):
self._db = db
self.key = key
self.pipeline = pipeline
def clear(self):
"""
Remove the container from the redis storage
>>> s = Set('test')
>>> s.add('1')
1
>>> s.clear()
>>> s.members
set()
"""
del self.db[self.key]
def set_expire(self, time=None):
"""
Allow the key to expire after ``time`` seconds.
>>> s = Set("test")
>>> s.add("1")
1
>>> s.set_expire(1)
>>> # from time import sleep
>>> # sleep(1)
>>> # s.members
# set([])
>>> s.clear()
:param time: time expressed in seconds. If time is not specified, then ``default_expire_time`` will be used.
:rtype: None
"""
if time is None:
time = default_expire_time
self.db.expire(self.key, time)
@property
def db(self):
if self.pipeline is not None:
return self.pipeline
if self._db is not None:
return self._db
if hasattr(self, 'db_cache') and self.db_cache:
return self.db_cache
else:
from redisco import connection
self.db_cache = connection
return self.db_cache
class Set(Container):
"""
.. default-domain:: set
This class represent a Set in redis.
"""
def __repr__(self):
return "<%s '%s' %s>" % (self.__class__.__name__, self.key,
self.members)
def sadd(self, *values):
"""
Add the specified members to the Set.
:param values: a list of values or a simple value.
:rtype: integer representing the number of value added to the set.
>>> s = Set("test")
>>> s.clear()
>>> s.add(["1", "2", "3"])
3
>>> s.add(["4"])
1
>>> import testfixtures
>>> testfixtures.compare(s.members, {'4', '3', '2', '1'})
<identity>
>>> s.clear()
"""
return self.db.sadd(self.key, *_parse_values(values))
def srem(self, *values):
"""
Remove the values from the Set if they are present.
:param values: a list of values or a simple value.
:rtype: boolean indicating if the values have been removed.
>>> s = Set("test")
>>> s.add(["1", "2", "3"])
3
>>> s.srem(["1", "3"])
2
>>> s.clear()
"""
return self.db.srem(self.key, *_parse_values(values))
def spop(self):
"""
Remove and return (pop) a random element from the Set.
:rtype: String representing the value poped.
>>> s = Set("test")
>>> s.add("1")
1
>>> s.spop()
'1'
>>> s.members
set()
"""
return self.db.spop(self.key)
#def __repr__(self):
# return "<%s '%s' %s>" % (self.__class__.__name__, self.key,
# self.members)
def isdisjoint(self, other):
"""
Return True if the set has no elements in common with other.
:param other: another ``Set``
:rtype: boolean
>>> s1 = Set("key1")
>>> s2 = Set("key2")
>>> s1.add(['a', 'b', 'c'])
3
>>> s2.add(['c', 'd', 'e'])
3
>>> s1.isdisjoint(s2)
False
>>> s1.clear()
>>> s2.clear()
"""
return not bool(self.db.sinter([self.key, other.key]))
def issubset(self, other_set):
"""
Test whether every element in the set is in other.
:param other_set: another ``Set`` to compare to.
>>> s1 = Set("key1")
>>> s2 = Set("key2")
>>> s1.add(['a', 'b', 'c'])
3
>>> s2.add('b')
1
>>> s2.issubset(s1)
True
>>> s1.clear()
>>> s2.clear()
"""
return self <= other_set
def __le__(self, other_set):
return self.db.sinter([self.key, other_set.key]) == self.all()
def __lt__(self, other_set):
"""Test whether the set is a true subset of other."""
return self <= other_set and self != other_set
def __eq__(self, other_set):
"""
Test equality of:
1. keys
2. members
"""
if other_set.key == self.key:
return True
slen, olen = len(self), len(other_set)
if olen == slen:
return self.members == other_set.members
else:
return False
def __ne__(self, other_set):
return not self.__eq__(other_set)
def issuperset(self, other_set):
"""
Test whether every element in other is in the set.
:param other_set: another ``Set`` to compare to.
>>> s1 = Set("key1")
>>> s2 = Set("key2")
>>> s1.add(['a', 'b', 'c'])
3
>>> s2.add('b')
1
>>> s1.issuperset(s2)
True
>>> s1.clear()
>>> s2.clear()
"""
return self >= other_set
def __ge__(self, other_set):
"""Test whether every element in other is in the set."""
return self.db.sinter([self.key, other_set.key]) == other_set.all()
def __gt__(self, other_set):
"""Test whether the set is a true superset of other."""
return self >= other_set and self != other_set
# SET Operations
def union(self, key, *other_sets):
"""
Return a new ``Set`` representing the union of *n* sets.
:param key: String representing the key where to store the result (the union)
:param other_sets: list of other ``Set``.
:rtype: ``Set``
>>> s1 = Set('key1')
>>> s2 = Set('key2')
>>> s1.add(['a', 'b', 'c'])
3
>>> s2.add(['d', 'e'])
2
>>> s3 = s1.union('key3', s2)
>>> s3.key
'key3'
>>> import testfixtures
>>> testfixtures.compare(s3.members, {'a', 'c', 'b', 'e', 'd'})
<identity>
>>> s1.clear()
>>> s2.clear()
>>> s3.clear()
"""
if not isinstance(key, str):
raise ValueError("Expect a string as key")
key = str(key)
self.db.sunionstore(key, [self.key] + [o.key for o in other_sets])
return Set(key)
def intersection(self, key, *other_sets):
"""
Return a new ``Set`` representing the intersection of *n* sets.
:param key: String representing the key where to store the result (the union)
:param other_sets: list of other ``Set``.
:rtype: Set
>>> s1 = Set('key1')
>>> s2 = Set('key2')
>>> s1.add(['a', 'b', 'c'])
3
>>> s2.add(['c', 'e'])
2
>>> s3 = s1.intersection('key3', s2)
>>> s3.key
'key3'
>>> s3.members
{'c'}
>>> s1.clear()
>>> s2.clear()
>>> s3.clear()
"""
if not isinstance(key, str):
raise ValueError("Expect a string as key")
key = str(key)
self.db.sinterstore(key, [self.key] + [o.key for o in other_sets])
return Set(key)
def difference(self, key, *other_sets):
"""
Return a new ``Set`` representing the difference of *n* sets.
:param key: String representing the key where to store the result (the union)
:param other_sets: list of other ``Set``.
:rtype: Set
>>> s1 = Set('key1')
>>> s2 = Set('key2')
>>> s1.add(['a', 'b', 'c'])
3
>>> s2.add(['c', 'e'])
2
>>> s3 = s1.difference('key3', s2)
>>> s3.key
'key3'
>>> import testfixtures
>>> testfixtures.compare(s3.members, {'a', 'b'})
<identity>
>>> s1.clear()
>>> s2.clear()
>>> s3.clear()
"""
if not isinstance(key, str):
raise ValueError("Expect a string as key")
key = str(key)
self.db.sdiffstore(key, [self.key] + [o.key for o in other_sets])
return Set(key)
def update(self, *other_sets):
"""Update the set, adding elements from all other_sets.
:param other_sets: list of ``Set``
:rtype: None
"""
self.db.sunionstore(self.key, [self.key] + [o.key for o in other_sets])
def __ior__(self, other_set):
self.db.sunionstore(self.key, [self.key, other_set.key])
return self
def intersection_update(self, *other_sets):
"""
Update the set, keeping only elements found in it and all other_sets.
:param other_sets: list of ``Set``
:rtype: None
"""
self.db.sinterstore(self.key, [o.key for o in [self.key] + other_sets])
def __iand__(self, other_set):
self.db.sinterstore(self.key, [self.key, other_set.key])
return self
def difference_update(self, *other_sets):
"""
Update the set, removing elements found in others.
:param other_sets: list of ``Set``
:rtype: None
"""
self.db.sdiffstore(self.key, [o.key for o in [self.key] + other_sets])
def __isub__(self, other_set):
self.db.sdiffstore(self.key, [self.key, other_set.key])
return self
def all(self):
return self.db.smembers(self.key)
members = property(all)
"""
return the real content of the Set.
"""
def copy(self, key):
"""
Copy the set to another key and return the new Set.
.. WARNING::
If the new key already contains a value, it will be overwritten.
"""
copy = Set(key=key, db=self.db)
copy.clear()
copy |= self
return copy
def __iter__(self):
return self.members.__iter__()
def sinter(self, *other_sets):
"""
Performs an intersection between Sets and return the *RAW* result.
.. NOTE::
This function return an actual ``set`` object (from python) and not a ``Set``. See func:``intersection``.
"""
return self.db.sinter([self.key] + [s.key for s in other_sets])
def sunion(self, *other_sets):
"""
Performs a union between two sets and returns the *RAW* result.
.. NOTE::
This function return an actual ``set`` object (from python) and not a ``Set``.
"""
return self.db.sunion([self.key] + [s.key for s in other_sets])
def sdiff(self, *other_sets):
"""
Performs a difference between two sets and returns the *RAW* result.
.. NOTE::
This function return an actual ``set`` object (from python) and not a ``Set``.
See function difference.
"""
return self.db.sdiff([self.key] + [s.key for s in other_sets])
def scard(self):
"""
Returns the cardinality of the Set.
:rtype: String containing the cardinality.
"""
return self.db.scard(self.key)
def sismember(self, value):
"""
Return ``True`` if the provided value is in the ``Set``.
"""
return self.db.sismember(self.key, value)
def srandmember(self):
"""
Return a random member of the set.
>>> s = Set("test")
>>> s.add(['a', 'b', 'c'])
3
>>> s.srandmember() # doctest: +ELLIPSIS
'...'
>>> # 'a', 'b' or 'c'
"""
return self.db.srandmember(self.key)
add = sadd
"""see sadd"""
pop = spop
"""see spop"""
remove = srem
"""see srem"""
__contains__ = sismember
__len__ = scard
class List(Container):
"""
This class represent a list object as seen in redis.
"""
def all(self):
"""
Returns all items in the list.
"""
return self.lrange(0, -1)
members = property(all)
"""Return all items in the list."""
def llen(self):
"""
Returns the length of the list.
"""
return self.db.llen(self.key)
__len__ = llen
def __getitem__(self, index):
if isinstance(index, int):
return self.lindex(index)
elif isinstance(index, slice):
indices = index.indices(len(self))
return self.lrange(indices[0], indices[1] - 1)
else:
raise TypeError
def __setitem__(self, index, value):
self.lset(index, value)
def lrange(self, start, stop):
"""
Returns a range of items.
:param start: integer representing the start index of the range
:param stop: integer representing the size of the list.
>>> l = List("test")
>>> l.push(['a', 'b', 'c', 'd'])
4
>>> l.lrange(1, 2)
['b', 'c']
>>> l.clear()
"""
return self.db.lrange(self.key, start, stop)
def lpush(self, *values):
"""
Push the value into the list from the *left* side
:param values: a list of values or single value to push
:rtype: long representing the number of values pushed.
>>> l = List("test")
>>> l.lpush(['a', 'b'])
2
>>> l.clear()
"""
return self.db.lpush(self.key, *_parse_values(values))
def rpush(self, *values):
"""
Push the value into the list from the *right* side
:param values: a list of values or single value to push
:rtype: long representing the size of the list.
>>> l = List("test")
>>> l.lpush(['a', 'b'])
2
>>> l.rpush(['c', 'd'])
4
>>> l.members
['b', 'a', 'c', 'd']
>>> l.clear()
"""
return self.db.rpush(self.key, *_parse_values(values))
def extend(self, iterable):
"""
Extend list by appending elements from the iterable.
:param iterable: an iterable objects.
"""
self.rpush(*[e for e in iterable])
def count(self, value):
"""
Return number of occurrences of value.
:param value: a value tha *may* be contained in the list
"""
return self.members.count(value)
def lpop(self):
"""
Pop the first object from the left.
:return: the popped value.
"""
return self.db.lpop(self.key)
def rpop(self):
"""
Pop the first object from the right.
:return: the popped value.
"""
return self.db.rpop(self.key)
def rpoplpush(self, key):
"""
Remove an element from the list,
atomically add it to the head of the list indicated by key
:param key: the key of the list receiving the popped value.
:return: the popped (and pushed) value
>>> l = List('list1')
>>> l.push(['a', 'b', 'c'])
3
>>> l.rpoplpush('list2')
'c'
>>> l2 = List('list2')
>>> l2.members
['c']
>>> l.clear()
>>> l2.clear()
"""
return self.db.rpoplpush(self.key, key)
def lrem(self, value, num=1):
"""
Remove first occurrence of value.
:return: 1 if the value has been removed, 0 otherwise
"""
return self.db.lrem(self.key, num, value)
def reverse(self):
"""
Reverse the list in place.
:return: None
"""
r = self[:]
r.reverse()
self.clear()
self.extend(r)
def copy(self, key):
"""Copy the list to a new list.
..WARNING:
If destination key already contains a value, it clears it before copying.
"""
copy = List(key, self.db)
copy.clear()
copy.extend(self)
return copy
def ltrim(self, start, end):
"""
Trim the list from start to end.
:return: None
"""
return self.db.ltrim(self.key, start, end)
def lindex(self, idx):
"""
Return the value at the index *idx*
:param idx: the index to fetch the value.
:return: the value or None if out of range.
"""
return self.db.lindex(self.key, idx)
def lset(self, idx, value=0):
"""
Set the value in the list at index *idx*
:return: True is the operation succeed.
>>> l = List('test')
>>> l.push(['a', 'b', 'c'])
3
>>> l.lset(0, 'e')
True
>>> l.members
['e', 'b', 'c']
>>> l.clear()
"""
return self.db.lset(self.key, idx, value)
def __iter__(self):
return self.members.__iter__()
def __repr__(self):
return "<%s '%s' %s>" % (self.__class__.__name__, self.key, self.members)
__len__ = llen
remove = lrem
trim = ltrim
shift = lpop
unshift = lpush
pop = rpop
pop_onto = rpoplpush
push = rpush
append = rpush
class TypedList(object):
"""Create a container to store a list of objects in Redis.
Arguments:
key -- the Redis key this container is stored at
target_type -- can be a Python object or a redisco model class.
Optional Arguments:
type_args -- additional args to pass to type constructor (tuple)
type_kwargs -- additional kwargs to pass to type constructor (dict)
If target_type is not a redisco model class, the target_type should
also a callable that casts the (string) value of a list element into
target_type. E.g. str, int, float -- using this format:
target_type(string_val_of_list_elem, *type_args, **type_kwargs)
target_type also accepts a string that refers to a redisco model.
"""
def __init__(self, key, target_type, type_args=[], type_kwargs={}, **kwargs):
self.list = List(key, **kwargs)
self.klass = self.value_type(target_type)
self._klass_args = type_args
self._klass_kwargs = type_kwargs
from .models.base import Model
self._redisco_model = issubclass(self.klass, Model)
def value_type(self, target_type):
if isinstance(target_type, str):
t = target_type
from .models.base import get_model_from_key
target_type = get_model_from_key(target_type)
if target_type is None:
raise ValueError("Unknown Redisco class %s" % t)
return target_type
def typecast_item(self, value):
if self._redisco_model:
return self.klass.objects.get_by_id(value)
else:
return self.klass(value, *self._klass_args, **self._klass_kwargs)
def typecast_iter(self, values):
if self._redisco_model:
return [o for o in [self.klass.objects.get_by_id(v) for v in values] if o is not None]
else:
return [self.klass(v, *self._klass_args, **self._klass_kwargs) for v in values]
def all(self):
"""Returns all items in the list."""
return self.typecast_iter(self.list.all())
def __len__(self):
return len(self.list)
def __getitem__(self, index):
val = self.list[index]
if isinstance(index, slice):
return self.typecast_iter(val)
else:
return self.typecast_item(val)
def typecast_stor(self, value):
if self._redisco_model:
return value.id
else:
return value
def append(self, value):
self.list.append(self.typecast_stor(value))
def extend(self, iter):
self.list.extend(map(lambda i: self.typecast_stor(i), iter))
def __setitem__(self, index, value):
self.list[index] = self.typecast_stor(value)
def __iter__(self):
for i in range(len(self.list)):
yield self[i]
def __repr__(self):
return repr(self.typecast_iter(self.list))
class SortedSet(Container):
"""
This class represents a SortedSet in redis.
Use it if you want to arrange your set in any order.
"""
def __getitem__(self, index):
if isinstance(index, slice):
return self.zrange(index.start, index.stop)
else:
return self.zrange(index, index)[0]
def score(self, member):
"""
Returns the score of member.
"""
return self.zscore(member)
def __contains__(self, val):
return self.zscore(val) is not None
@property
def members(self):
"""
Returns the members of the set.
"""
return self.zrange(0, -1)
@property
def revmembers(self):
"""
Returns the members of the set in reverse.
"""
return self.zrevrange(0, -1)
def __iter__(self):
return self.members.__iter__()
def __reversed__(self):
return self.revmembers.__iter__()
# def __repr__(self):
# return "<%s '%s' %s>" % (self.__class__.__name__, self.key,
# self.members)
@property
def _min_score(self):
"""
Returns the minimum score in the SortedSet.
"""
try:
return self.zscore(self.__getitem__(0))
except IndexError:
return None
@property
def _max_score(self):
"""
Returns the maximum score in the SortedSet.
"""
try:
self.zscore(self.__getitem__(-1))
except IndexError:
return None
def lt(self, v, limit=None, offset=None):
"""
Returns the list of the members of the set that have scores
less than v.
:param v: the score to compare to.
:param limit: limit the result to *n* elements
:param offset: Skip the first *n* elements
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore("-inf", "(%f" % v,
start=offset, num=limit)
def le(self, v, limit=None, offset=None):
"""
Returns the list of the members of the set that have scores
less than or equal to v.
:param v: the score to compare to.
:param limit: limit the result to *n* elements
:param offset: Skip the first *n* elements
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore("-inf", v,
start=offset, num=limit)
def gt(self, v, limit=None, offset=None, withscores=False):
"""Returns the list of the members of the set that have scores
greater than v.
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore("(%f" % v, "+inf",
start=offset, num=limit, withscores=withscores)
def ge(self, v, limit=None, offset=None, withscores=False):
"""Returns the list of the members of the set that have scores
greater than or equal to v.
:param v: the score to compare to.
:param limit: limit the result to *n* elements
:param offset: Skip the first *n* elements
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore("%f" % v, "+inf",
start=offset, num=limit, withscores=withscores)
def between(self, min, max, limit=None, offset=None):
"""
Returns the list of the members of the set that have scores
between min and max.
.. Note::
The min and max are inclusive when comparing the values.
:param min: the minimum score to compare to.
:param max: the maximum score to compare to.
:param limit: limit the result to *n* elements
:param offset: Skip the first *n* elements
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.add('c', 30)
1
>>> s.between(20, 30)
['b', 'c']
>>> s.clear()
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore(min, max,
start=offset, num=limit)
def zadd(self, members, score=1):
"""
Add members in the set and assign them the score.
:param members: a list of item or a single item
:param score: the score the assign to the item(s)
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.clear()
"""
_members = []
if not isinstance(members, dict):
_members = [score, members]
else:
for member, score in members.items():
_members += [score, member]
return self.db.zadd(self.key, *_members)
def zrem(self, *values):
"""
Remove the values from the SortedSet
:return: True if **at least one** value is successfully
removed, False otherwise
>>> s = SortedSet('foo')
>>> s.add('a', 10)
1
>>> s.zrem('a')
1
>>> s.members
[]
>>> s.clear()
"""
return self.db.zrem(self.key, *_parse_values(values))
def zincrby(self, att, value=1):
"""
Increment the score of the item by ``value``
:param att: the member to increment
:param value: the value to add to the current score
:returns: the new score of the member
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.zincrby("a", 10)
20.0
>>> s.clear()
"""
return self.db.zincrby(self.key, att, value)
def zrevrank(self, member):
"""
Returns the ranking in reverse order for the member
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.revrank('a')
1
>>> s.clear()
"""
return self.db.zrevrank(self.key, member)
def zrange(self, start, stop, withscores=False):
"""
Returns all the elements including between ``start`` (non included) and
``stop`` (included).
:param withscore: True if the score of the elements should
also be returned
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.add('c', 30)
1
>>> s.zrange(1, 3)
['b', 'c']
>>> s.zrange(1, 3, withscores=True)
[('b', 20.0), ('c', 30.0)]
>>> s.clear()
"""
return self.db.zrange(self.key, start, stop, withscores=withscores)
def zrevrange(self, start, end, **kwargs):
"""
Returns the range of items included between ``start`` and ``stop``
in reverse order (from high to low)
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.add('c', 30)
1
>>> s.zrevrange(1, 2)
['b', 'a']
>>> s.clear()
"""
return self.db.zrevrange(self.key, start, end, **kwargs)
def zrangebyscore(self, min, max, **kwargs):
"""
Returns the range of elements included between the scores (min and max)
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.add('c', 30)
1
>>> s.zrangebyscore(20, 30)
['b', 'c']
>>> s.clear()
"""
return self.db.zrangebyscore(self.key, min, max, **kwargs)
def zrevrangebyscore(self, max, min, **kwargs):
"""
Returns the range of elements included between the scores (min and max)
>>> s = SortedSet("foo")
>>> s.add('a', 10)
1
>>> s.add('b', 20)
1
>>> s.add('c', 30)
1
>>> s.zrangebyscore(20, 20)
['b']
>>> s.clear()
"""
return self.db.zrevrangebyscore(self.key, max, min, **kwargs)
def zcard(self):
"""
Returns the cardinality of the SortedSet.
>>> s = SortedSet("foo")
>>> s.add("a", 1)
1
>>> s.add("b", 2)
1
>>> s.add("c", 3)
1
>>> s.zcard()
3
>>> s.clear()
"""
return self.db.zcard(self.key)
def zscore(self, elem):
"""
Return the score of an element
>>> s = SortedSet("foo")
>>> s.add("a", 10)
1
>>> s.score("a")
10.0
>>> s.clear()
"""
return self.db.zscore(self.key, elem)
def zremrangebyrank(self, start, stop):
"""
Remove a range of element between the rank ``start`` and
``stop`` both included.
:return: the number of item deleted
>>> s = SortedSet("foo")
>>> s.add("a", 10)
1
>>> s.add("b", 20)
1
>>> s.add("c", 30)
1
>>> s.zremrangebyrank(1, 2)
2
>>> s.members
['a']
>>> s.clear()
"""
return self.db.zremrangebyrank(self.key, start, stop)
def zremrangebyscore(self, min_value, max_value):
"""
Remove a range of element by between score ``min_value`` and
``max_value`` both included.
:returns: the number of items deleted.
>>> s = SortedSet("foo")
>>> s.add("a", 10)
1
>>> s.add("b", 20)
1
>>> s.add("c", 30)
1
>>> s.zremrangebyscore(10, 20)
2
>>> s.members
['c']
>>> s.clear()
"""
return self.db.zremrangebyscore(self.key, min_value, max_value)
def zrank(self, elem):
"""
Returns the rank of the element.
>>> s = SortedSet("foo")
>>> s.add("a", 10)
1
>>> s.zrank("a")
0
>>> s.clear()
"""
return self.db.zrank(self.key, elem)
def eq(self, value):
"""
Returns the elements that have ``value`` for score.
"""
return self.zrangebyscore(value, value)
__len__ = zcard
revrank = zrevrank
score = zscore
rank = zrank
incr_by = zincrby
add = zadd
remove = zrem
class NonPersistentList(object):
def __init__(self, l):
self._list = l
@property
def members(self):
return self._list
def __iter__(self):
return iter(self.members)
def __len__(self):
return len(self._list)
class Hash(Container, collections.MutableMapping):
def __iter__(self):
return self.hgetall().__iter__()
def __repr__(self):
return "<%s '%s' %s>" % (self.__class__.__name__,
self.key, self.hgetall())
def _set_dict(self, new_dict):
self.clear()
self.update(new_dict)
def hlen(self):
"""
Returns the number of elements in the Hash.
"""
return self.db.hlen(self.key)
def hset(self, member, value):
"""
Set ``member`` in the Hash at ``value``.
:returns: 1 if member is a new field and the value has been
stored, 0 if the field existed and the value has been
updated.
>>> h = Hash("foo")
>>> h.hset("bar", "value")
1
>>> h.clear()
"""
return self.db.hset(self.key, member, value)
def hdel(self, *members):
"""
Delete one or more hash field.
:param members: on or more fields to remove.
:return: the number of fields that were removed
>>> h = Hash("foo")
>>> h.hset("bar", "value")
1
>>> h.hdel("bar")
1
>>> h.clear()
"""
return self.db.hdel(self.key, *_parse_values(members))
def hkeys(self):
"""
Returns all fields name in the Hash
"""
return self.db.hkeys(self.key)
def hgetall(self):
"""
Returns all the fields and values in the Hash.
:rtype: dict
"""
return self.db.hgetall(self.key)
def hvals(self):
"""
Returns all the values in the Hash
:rtype: list
"""
return self.db.hvals(self.key)
def hget(self, field):
"""
Returns the value stored in the field, None if the field doesn't exist.
"""
return self.db.hget(self.key, field)
def hexists(self, field):
"""
Returns ``True`` if the field exists, ``False`` otherwise.
"""
return self.db.hexists(self.key, field)
def hincrby(self, field, increment=1):
"""
Increment the value of the field.
:returns: the value of the field after incrementation
>>> h = Hash("foo")
>>> h.hincrby("bar", 10)
10
>>> h.hincrby("bar", 2)
12
>>> h.clear()
"""
return self.db.hincrby(self.key, field, increment)
def hmget(self, fields):
"""
Returns the values stored in the fields.
"""
return self.db.hmget(self.key, fields)
def hmset(self, mapping):
"""
Sets or updates the fields with their corresponding values.
:param mapping: a dict with keys and values
"""
return self.db.hmset(self.key, mapping)
keys = hkeys
values = hvals
_get_dict = hgetall
__getitem__ = hget
__setitem__ = hset
__delitem__ = hdel
__len__ = hlen
__contains__ = hexists
dict = property(_get_dict, _set_dict)
| mit | 7,334,591,816,882,946,000 | 25.160399 | 117 | 0.503476 | false |
FirmlyReality/docklet | web/webViews/cluster.py | 2 | 15450 | from flask import session, redirect, request
from webViews.view import normalView
from webViews.dockletrequest import dockletRequest
from webViews.dashboard import *
from webViews.checkname import checkname
import time, re
class addClusterView(normalView):
template_path = "addCluster.html"
@classmethod
def get(self):
masterips = dockletRequest.post_to_all()
images = dockletRequest.post("/image/list/",{},masterips[0].split("@")[0]).get("images")
desc = dockletRequest.getdesc(masterips[0].split("@")[1])
result = dockletRequest.post("/user/usageQuery/")
quota = result.get("quota")
usage = result.get("usage")
default = result.get("default")
restcpu = int(quota['cpu']) - int(usage['cpu'])
restmemory = int(quota['memory']) - int(usage['memory'])
restdisk = int(quota['disk']) - int(usage['disk'])
if restcpu >= int(default['cpu']):
defaultcpu = default['cpu']
elif restcpu <= 0:
defaultcpu = "0"
else:
defaultcpu = str(restcpu)
if restmemory >= int(default['memory']):
defaultmemory = default['memory']
elif restmemory <= 0:
defaultmemory = "0"
else:
defaultmemory = str(restmemory)
if restdisk >= int(default['disk']):
defaultdisk = default['disk']
elif restdisk <= 0:
defaultdisk = "0"
else:
defaultdisk = str(restdisk)
defaultsetting = {
'cpu': defaultcpu,
'memory': defaultmemory,
'disk': defaultdisk
}
if (result):
return self.render(self.template_path, user = session['username'],masterips = masterips, images = images, quota = quota, usage = usage, defaultsetting = defaultsetting, masterdesc=desc)
else:
self.error()
class createClusterView(normalView):
template_path = "dashboard.html"
error_path = "error.html"
@classmethod
def post(self):
masterip = self.masterip
index1 = self.image.rindex("_")
index2 = self.image[:index1].rindex("_")
checkname(self.clustername)
data = {
"clustername": self.clustername,
'imagename': self.image[:index2],
'imageowner': self.image[index2+1:index1],
'imagetype': self.image[index1+1:],
}
result = dockletRequest.post("/cluster/create/", dict(data, **(request.form)), masterip)
if(result.get('success', None) == "true"):
return redirect("/dashboard/")
#return self.render(self.template_path, user = session['username'])
else:
return self.render(self.error_path, message = result.get('message'))
class descriptionMasterView(normalView):
template_path = "description.html"
@classmethod
def get(self):
return self.render(self.template_path, description=self.desc)
class descriptionImageView(normalView):
template_path = "description.html"
@classmethod
def get(self):
masterip = self.masterip
index1 = self.image.rindex("_")
index2 = self.image[:index1].rindex("_")
data = {
"imagename": self.image[:index2],
"imageowner": self.image[index2+1:index1],
"imagetype": self.image[index1+1:]
}
result = dockletRequest.post("/image/description/", data, masterip)
if(result):
description = result.get("message")
return self.render(self.template_path, description = description)
else:
self.error()
class scaleoutView(normalView):
error_path = "error.html"
@classmethod
def post(self):
masterip = self.masterip
index1 = self.image.rindex("_")
index2 = self.image[:index1].rindex("_")
data = {
"clustername": self.clustername,
'imagename': self.image[:index2],
'imageowner': self.image[index2+1:index1],
'imagetype': self.image[index1+1:]
}
result = dockletRequest.post("/cluster/scaleout/", dict(data, **(request.form)), masterip)
if(result.get('success', None) == "true"):
return redirect("/config/")
else:
return self.render(self.error_path, message = result.get('message'))
class scaleinView(normalView):
error_path = "error.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"clustername": self.clustername,
"containername":self.containername
}
result = dockletRequest.post("/cluster/scalein/", data, masterip)
if(result.get('success', None) == "true"):
return redirect("/config/")
else:
return self.render(self.error_path, message = result.get('message'))
class listClusterView(normalView):
template_path = "listCluster.html"
@classmethod
def get(self):
masterip = self.masterip
result = dockletRequest.post("/cluster/list/", {}, masterip)
clusters = result.get("clusters")
if(result):
return self.render(self.template_path, user = session['username'], clusters = clusters)
else:
self.error()
class startClusterView(normalView):
template_path = "dashboard.html"
error_path = "error.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"clustername": self.clustername
}
result = dockletRequest.post("/cluster/start/", data, masterip)
if(result.get('success', None) == "true"):
return redirect("/dashboard/")
#return self.render(self.template_path, user = session['username'])
else:
return self.render(self.error_path, message = result.get('message'))
class stopClusterView(normalView):
template_path = "dashboard.html"
error_path = "error.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"clustername": self.clustername
}
result = dockletRequest.post("/cluster/stop/", data, masterip)
if(result.get('success', None) == "true"):
return redirect("/dashboard/")
else:
return self.render(self.error_path, message = result.get('message'))
class flushClusterView(normalView):
success_path = "opsuccess.html"
failed_path = "opfailed.html"
@classmethod
def get(self):
data = {
"clustername": self.clustername,
"from_lxc": self.containername
}
result = dockletRequest.post("/cluster/flush/", data)
if(result):
if result.get('success') == "true":
return self.render(self.success_path, user = session['username'])
else:
return self.render(self.failed_path, user = session['username'])
else:
self.error()
class deleteClusterView(normalView):
template_path = "dashboard.html"
error_path = "error.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"clustername": self.clustername
}
result = dockletRequest.post("/cluster/delete/", data, masterip)
if(result.get('success', None) == "true"):
return redirect("/dashboard/")
else:
return self.render(self.error_path, message = result.get('message'))
class detailClusterView(normalView):
template_path = "listcontainer.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"clustername": self.clustername
}
result = dockletRequest.post("/cluster/info/", data, masterip)
if(result):
message = result.get('message')
containers = message['containers']
status = message['status']
return self.render(self.template_path, containers = containers, user = session['username'], clustername = self.clustername, status = status)
else:
self.error()
class saveImageView(normalView):
template_path = "saveconfirm.html"
success_path = "opsuccess.html"
error_path = "error.html"
@classmethod
def post(self):
masterip = self.masterip
data = {
"clustername": self.clustername,
"image": self.imagename,
"containername": self.containername,
"description": self.description,
"isforce": self.isforce
}
result = dockletRequest.post("/cluster/save/", data, masterip)
if(result):
if result.get('success') == 'true':
#return self.render(self.success_path, user = session['username'])
return redirect("/config/")
#res = detailClusterView()
#res.clustername = self.clustername
#return res.as_view()
else:
if result.get('reason') == "exists":
return self.render(self.template_path, containername = self.containername, clustername = self.clustername, image = self.imagename, user = session['username'], description = self.description, masterip=masterip)
else:
return self.render(self.error_path, message = result.get('message'))
else:
self.error()
class shareImageView(normalView):
template_path = "dashboard.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"image": self.image
}
result = dockletRequest.post("/image/share/", data, masterip)
if(result):
return redirect("/config/")
else:
self.error()
class unshareImageView(normalView):
template_path = "dashboard.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"image": self.image
}
result = dockletRequest.post("/image/unshare/", data, masterip)
if(result):
return redirect("/config/")
else:
self.error()
class copyImageView(normalView):
error_path = "error.html"
@classmethod
def post(self):
masterip = self.masterip
data = {
"image": self.image,
"target": self.target
}
result = dockletRequest.post("/image/copy/", data, masterip)
if result:
if result.get('success') == 'true':
return redirect("/config/")
else:
return self.render(self.error_path,message=result.get('message'))
else:
self.error()
class deleteImageView(normalView):
template_path = "dashboard.html"
@classmethod
def get(self):
masterip = self.masterip
data = {
"image": self.image
}
result = dockletRequest.post("/image/delete/", data, masterip)
if(result):
return redirect("/config/")
else:
self.error()
class addproxyView(normalView):
@classmethod
def post(self):
masterip = self.masterip
data = {
"clustername": self.clustername,
"ip": self.ip,
"port": self.port
}
result = dockletRequest.post("/addproxy/", data, masterip)
if(result):
return redirect("/config/")
else:
self.error()
class deleteproxyView(normalView):
@classmethod
def get(self):
masterip = self.masterip
data = {
"clustername":self.clustername
}
result = dockletRequest.post("/deleteproxy/", data, masterip)
if(result):
return redirect("/config/")
else:
self.error()
@classmethod
def post(self):
return self.get()
class configView(normalView):
@classmethod
def get(self):
masterips = dockletRequest.post_to_all()
allimages = dockletRequest.post_to_all('/image/list/')
for master in allimages:
allimages[master] = allimages[master].get('images')
allclusters = dockletRequest.post_to_all("/cluster/list/")
for master in allclusters:
allclusters[master] = allclusters[master].get('clusters')
allclusters_info = {}
clusters_info = {}
data={}
for master in allclusters:
allclusters_info[master] = {}
for cluster in allclusters[master]:
data["clustername"] = cluster
result = dockletRequest.post("/cluster/info/", data, master.split("@")[0]).get("message")
allclusters_info[master][cluster] = result
result = dockletRequest.post("/user/usageQuery/")
quota = result.get("quota")
usage = result.get("usage")
default = result.get("default")
restcpu = int(quota['cpu']) - int(usage['cpu'])
restmemory = int(quota['memory']) - int(usage['memory'])
restdisk = int(quota['disk']) - int(usage['disk'])
if restcpu >= int(default['cpu']):
defaultcpu = default['cpu']
elif restcpu <= 0:
defaultcpu = "0"
else:
defaultcpu = str(restcpu)
if restmemory >= int(default['memory']):
defaultmemory = default['memory']
elif restmemory <= 0:
defaultmemory = "0"
else:
defaultmemory = str(restmemory)
if restdisk >= int(default['disk']):
defaultdisk = default['disk']
elif restdisk <= 0:
defaultdisk = "0"
else:
defaultdisk = str(restdisk)
defaultsetting = {
'cpu': defaultcpu,
'memory': defaultmemory,
'disk': defaultdisk
}
return self.render("config.html", allimages = allimages, allclusters = allclusters_info, mysession=dict(session), quota = quota, usage = usage, defaultsetting = defaultsetting, masterips = masterips)
@classmethod
def post(self):
return self.get()
class addPortMappingView(normalView):
template_path = "error.html"
@classmethod
def post(self):
data = {"clustername":request.form["clustername"],"node_name":request.form["node_name"],"node_ip":request.form["node_ip"],"node_port":request.form["node_port"]}
result = dockletRequest.post('/port_mapping/add/',data, self.masterip)
success = result.get("success")
if success == "true":
return redirect("/config/")
else:
return self.render(self.template_path, message = result.get("message"))
@classmethod
def get(self):
return self.post()
class delPortMappingView(normalView):
template_path = "error.html"
@classmethod
def post(self):
data = {"clustername":self.clustername,"node_name":self.node_name,"node_port":self.node_port}
result = dockletRequest.post('/port_mapping/delete/',data, self.masterip)
success = result.get("success")
if success == "true":
return redirect("/config/")
else:
return self.render(self.template_path, message = result.get("message"))
@classmethod
def get(self):
return self.post()
| bsd-3-clause | -8,347,585,795,301,260,000 | 32.660131 | 229 | 0.57288 | false |
codeforamerica/westsac-urban-land-locator | farmsList/public/views.py | 1 | 5434 | # -*- coding: utf-8 -*-
'''Public section, including homepage and signup.'''
from flask import (Blueprint, request, render_template, flash, url_for,
redirect, session)
from flask_mail import Message
from flask.ext.login import login_user, login_required, logout_user
from farmsList.extensions import mail, login_manager
from farmsList.user.models import User
from farmsList.public.forms import LoginForm, ContactLandOwnerForm
from farmsList.public.models import Farmland
from farmsList.user.forms import RegisterForm
from farmsList.user.models import Email
from farmsList.utils import flash_errors
from farmsList.database import db
blueprint = Blueprint('public', __name__, static_folder="../static")
@login_manager.user_loader
def load_user(id):
return User.get_by_id(int(id))
@blueprint.route("/", methods=["GET", "POST"])
def home():
form = LoginForm(request.form)
# Handle logging in
if request.method == 'POST':
if form.validate_on_submit():
login_user(form.user)
flash("You are logged in.", 'success')
redirect_url = request.args.get("next") or url_for("user.members")
return redirect(redirect_url)
else:
flash_errors(form)
return render_template("public/home.html", form=form)
@blueprint.route('/logout/')
@login_required
def logout():
logout_user()
flash('You are logged out.', 'info')
return redirect(url_for('public.home'))
@blueprint.route("/register/", methods=['GET', 'POST'])
def register():
form = RegisterForm(request.form, csrf_enabled=False)
if form.validate_on_submit():
new_user = User.create(username=form.username.data,
email=form.email.data,
password=form.password.data,
active=True)
flash("Thank you for registering. You can now log in.", 'success')
return redirect(url_for('public.home'))
else:
flash_errors(form)
return render_template('public/register.html', form=form)
@blueprint.route("/contact-land-owner/<int:farmlandId>", methods=["GET", "POST"])
def contactLandOwner(farmlandId):
form = ContactLandOwnerForm(request.form)
farmland = Farmland.query.filter(Farmland.id == farmlandId).all()[0]
if form.validate_on_submit():
address = "Unknown" if farmland.address is None else farmland.address
mainBodyContent = ("<p style=\"margin-left: 50px;\">"
"<b>Name:</b> " + form.name.data + "<br>"
"<b>Email:</b> " + form.email.data + "<br>"
"<b>Phone:</b> " + form.phone.data + "<br>"
"</p>"
"<p style=\"margin-left: 50px;\">"
"<b>What is your past experience farming?</b><br>"
"" + form.experience.data + "</p>"
"<p><br>Thanks,<br>"
"Acres"
"</p>")
# msg = Message("Inquiry: " + address + " Property", recipients=["[email protected]")
msg = Message("Inquiry: " + address + " Property", recipients=[farmland.email])
msg.html = ("<html>"
"<body>"
"<p>Someone has contacted you about your " + address + " property:</p>"
"" + mainBodyContent + ""
"</body>"
"</html>")
mail.send(msg)
Email.create(sender=msg.sender,
recipients=",".join(msg.recipients),
body=msg.html)
msg = Message("Inquiry: " + address + " Property", recipients=[form.email.data])
msg.html = ("<html>"
"<body>"
"<p>Just a note that we sent your request for more information about the " + address + " property to " + farmland.ownerName + ":</p>"
"" + mainBodyContent + ""
"</body>"
"</html>")
mail.send(msg)
Email.create(sender=msg.sender,
recipients=",".join(msg.recipients),
body=msg.html)
flash("Thanks for your inquiry! We sent your email for more information about the property. " + farmland.ownerName + " will follow up with you shortly.", 'info')
return redirect(url_for('public.home'))
else:
flash_errors(form)
return render_template("public/contact-land-owner.html", form=form, farmland=farmland)
@blueprint.route("/farmland-details/<int:farmlandId>")
def farmlandDetails(farmlandId):
return render_template("public/farmland-details.html")
@blueprint.route("/farmland-approval/<int:farmlandId>")
def farmlandApproval(farmlandId):
return render_template("public/farmland-approval.html")
@blueprint.route("/find-land/")
def find_land():
form = LoginForm(request.form)
# Handle logging in
if request.method == 'POST':
if form.validate_on_submit():
login_user(form.user)
flash("You are logged in.", 'success')
redirect_url = request.args.get("next") or url_for("user.members")
return redirect(redirect_url)
else:
flash_errors(form)
return render_template("public/find_land.html", form=form)
| bsd-3-clause | -4,999,825,131,017,609,000 | 42.822581 | 169 | 0.573979 | false |
basicthinker/Sexain-MemController | gem5-stable/src/mem/SimpleMemory.py | 1 | 3222 | # Copyright (c) 2012-2013 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2005-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
# Andreas Hansson
from m5.params import *
from AbstractMemory import *
class SimpleMemory(AbstractMemory):
type = 'SimpleMemory'
cxx_header = "mem/simple_mem.hh"
port = SlavePort("Slave ports")
latency = Param.Latency('40ns', "Latency on row buffer hit")
latency_miss = Param.Latency('80ns', "Latency on row buffer miss")
latency_var = Param.Latency('0ns', "Request to response latency variance")
# The memory bandwidth limit default is set to 12.8GB/s which is
# representative of a x64 DDR3-1600 channel.
bandwidth = Param.MemoryBandwidth('12.8GB/s',
"Combined read and write bandwidth")
lat_att_operate = Param.Latency('3ns', "ATT operation latency")
lat_buffer_operate = Param.Latency('3ns',
"Version buffer operation latency")
lat_nvm_read = Param.Latency('128ns', "NVM read latency")
lat_nvm_write = Param.Latency('368ns', "NVM write latency")
disable_timing = Param.Bool(True, "If THNVM is not timed")
| apache-2.0 | 5,142,405,133,285,989,000 | 50.967742 | 78 | 0.753569 | false |
ubc/compair | alembic/versions/316f3b73962c_modified_criteria_tables.py | 1 | 2136 | """modified criteria tables
Revision ID: 316f3b73962c
Revises: 2fe3d8183c34
Create Date: 2014-09-10 15:42:55.963855
"""
# revision identifiers, used by Alembic.
revision = '316f3b73962c'
down_revision = '2fe3d8183c34'
import logging
from alembic import op
import sqlalchemy as sa
from sqlalchemy import UniqueConstraint, exc
from sqlalchemy.sql import text
from compair.models import convention
def upgrade():
try:
with op.batch_alter_table('Criteria', naming_convention=convention,
table_args=(UniqueConstraint('name'))) as batch_op:
batch_op.drop_constraint('uq_Criteria_name', type_='unique')
except exc.InternalError:
with op.batch_alter_table('Criteria', naming_convention=convention,
table_args=(UniqueConstraint('name'))) as batch_op:
batch_op.drop_constraint('name', type_='unique')
except ValueError:
logging.warning('Drop unique constraint is not support for SQLite, dropping uq_Critiera_name ignored!')
# set existing criteria's active attribute to True using server_default
with op.batch_alter_table('CriteriaAndCourses', naming_convention=convention) as batch_op:
batch_op.add_column(sa.Column('active', sa.Boolean(), default=True, server_default='1', nullable=False))
with op.batch_alter_table('Criteria', naming_convention=convention) as batch_op:
batch_op.add_column(sa.Column('public', sa.Boolean(), default=False, server_default='0', nullable=False))
# set the first criteria as public
t = {"name": "Which is better?", "public": True}
op.get_bind().execute(text("Update Criteria set public=:public where name=:name"), **t)
def downgrade():
with op.batch_alter_table('Criteria', naming_convention=convention,
table_args=(UniqueConstraint('name'))) as batch_op:
batch_op.create_unique_constraint('uq_Criteria_name', ['name'])
batch_op.drop_column('public')
with op.batch_alter_table('CriteriaAndCourses', naming_convention=convention) as batch_op:
batch_op.drop_column('active')
| gpl-3.0 | -3,058,094,704,712,623,600 | 40.882353 | 113 | 0.684925 | false |
BT-jmichaud/l10n-switzerland | l10n_ch_payment_slip/tests/test_payment_slip.py | 1 | 9506 | # -*- coding: utf-8 -*-
# © 2014-2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import time
import re
import odoo.tests.common as test_common
from odoo.report import render_report
class TestPaymentSlip(test_common.TransactionCase):
_compile_get_ref = re.compile(r'[^0-9]')
def make_bank(self):
company = self.env.ref('base.main_company')
self.assertTrue(company)
partner = self.env.ref('base.main_partner')
self.assertTrue(partner)
bank = self.env['res.bank'].create(
{
'name': 'BCV',
'ccp': '01-1234-1',
'bic': '23452345',
'clearing': '234234',
}
)
bank_account = self.env['res.partner.bank'].create(
{
'partner_id': partner.id,
'bank_id': bank.id,
'bank_bic': bank.bic,
'acc_number': '01-1234-1',
'bvr_adherent_num': '1234567',
'print_bank': True,
'print_account': True,
'print_partner': True,
}
)
bank_account.onchange_acc_number_set_swiss_bank()
self.assertEqual(bank_account.ccp, '01-1234-1')
return bank_account
def make_invoice(self):
if not hasattr(self, 'bank_account'):
self.bank_account = self.make_bank()
account_model = self.env['account.account']
account_debtor = account_model.search([('code', '=', '1100')])
if not account_debtor:
account_debtor = account_model.create({
'code': 1100,
'name': 'Debitors',
'user_type_id':
self.env.ref('account.data_account_type_receivable').id,
'reconcile': True,
})
account_sale = account_model.search([('code', '=', '3200')])
if not account_sale:
account_sale = account_model.create({
'code': 3200,
'name': 'Goods sales',
'user_type_id':
self.env.ref('account.data_account_type_revenue').id,
'reconcile': False,
})
invoice = self.env['account.invoice'].create({
'partner_id': self.env.ref('base.res_partner_12').id,
'reference_type': 'none',
'name': 'A customer invoice',
'account_id': account_debtor.id,
'type': 'out_invoice',
'partner_bank_id': self.bank_account.id
})
self.env['account.invoice.line'].create({
'account_id': account_sale.id,
'product_id': False,
'quantity': 1,
'price_unit': 862.50,
'invoice_id': invoice.id,
'name': 'product that cost 862.50 all tax included',
})
invoice.action_invoice_open()
# waiting for the cache to refresh
attempt = 0
while not invoice.move_id:
invoice.refresh()
time.sleep(0.1)
attempt += 1
if attempt > 20:
break
return invoice
def test_invoice_confirmation(self):
"""Test that confirming an invoice generate slips correctly"""
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
for line in invoice.move_id.line_ids:
if line.account_id.user_type_id.type in ('payable', 'receivable'):
self.assertTrue(line.transaction_ref)
else:
self.assertFalse(line.transaction_ref)
for line in invoice.move_id.line_ids:
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
if line.account_id.user_type_id.type in ('payable', 'receivable'):
self.assertTrue(slip)
self.assertEqual(slip.amount_total, 862.50)
self.assertEqual(slip.invoice_id.id, invoice.id)
else:
self.assertFalse(slip)
def test_slip_validity(self):
"""Test that confirming slip are valid"""
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
for line in invoice.move_id.line_ids:
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
if line.account_id.user_type_id.type in ('payable', 'receivable'):
self.assertTrue(slip.reference)
self.assertTrue(slip.scan_line)
self.assertTrue(slip.slip_image)
self.assertTrue(slip.a4_pdf)
inv_num = line.invoice_id.number
line_ident = self._compile_get_ref.sub(
'', "%s%s" % (inv_num, line.id)
)
self.assertIn(line_ident, slip.reference.replace(' ', ''))
def test_print_report(self):
invoice = self.make_invoice()
data, format = render_report(
self.env.cr,
self.env.uid,
[invoice.id],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice',
{},
context={'force_pdf': True},
)
self.assertTrue(data)
self.assertEqual(format, 'pdf')
def test_print_multi_report_merge_in_memory(self):
# default value as in memory
self.assertEqual(self.env.user.company_id.merge_mode, 'in_memory')
invoice1 = self.make_invoice()
invoice2 = self.make_invoice()
data, format = render_report(
self.env.cr,
self.env.uid,
[invoice1.id, invoice2.id],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice',
{},
context={'force_pdf': True},
)
self.assertTrue(data)
self.assertEqual(format, 'pdf')
def test_print_multi_report_merge_on_disk(self):
self.env.user.company_id.merge_mode = 'on_disk'
invoice1 = self.make_invoice()
invoice2 = self.make_invoice()
data, format = render_report(
self.env.cr,
self.env.uid,
[invoice1.id, invoice2.id],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice',
{},
context={'force_pdf': True},
)
self.assertTrue(data)
self.assertEqual(format, 'pdf')
def test_address_format(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
address_lines = slip._get_address_lines(com_partner)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'', u'73377 Le Bourget du Lac']
)
def test_address_format_no_country(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
com_partner.country_id = False
address_lines = slip._get_address_lines(com_partner)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'', u'73377 Le Bourget du Lac']
)
def test_address_format_special_format(self):
""" Test special formating without street2 """
ICP = self.env['ir.config_parameter']
ICP.set_param(
'bvr.address.format',
"%(street)s\n%(zip)s %(city)s"
)
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
com_partner.country_id = False
address_lines = slip._get_address_lines(com_partner)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'73377 Le Bourget du Lac']
)
def test_address_length(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
address_lines = slip._get_address_lines(com_partner)
f_size = 11
len_tests = [
(15, (11, None)),
(23, (11, None)),
(26, (10, None)),
(27, (10, None)),
(30, (9, None)),
(32, (8, 34)),
(34, (8, 34)),
(40, (8, 34))]
for text_len, result in len_tests:
com_partner.name = 'x' * text_len
res = slip._get_address_font_size(
f_size, address_lines, com_partner)
self.assertEqual(res, result, "Wrong result for len %s" % text_len)
def test_print_bvr(self):
invoice = self.make_invoice()
bvr = invoice.print_bvr()
self.assertEqual(bvr['report_name'],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice')
self.assertEqual(bvr['report_file'],
'l10n_ch_payment_slip.one_slip_per_page')
| agpl-3.0 | 7,121,606,143,357,714,000 | 35.417625 | 79 | 0.523935 | false |
ncphillips/django_rpg | rpg_base/models/encounter.py | 1 | 1907 | from django.db import models
class EncounterManager(models.Manager):
def enemy_npcs(self):
pass
def friendly_npcs(self):
pass
def players(self):
return super(EncounterManager, self).get_queryset().filter(character__player_owned=True)
class Encounter(models.Model):
name = models.CharField(max_length=75)
campaign = models.ForeignKey("Campaign")
is_running = models.BooleanField(default=False)
round = models.PositiveIntegerField(default=0)
objects = EncounterManager()
class Meta:
app_label = "rpg_base"
def __unicode__(self):
return self.name
def start(self):
"""
Sets `is_running` to True, and initiative and NPCs.
"""
for row in self.charactertemplateinencounter_set.all():
num = row.num
template = row.character_template
encounter = row.encounter
characters = template.create_characters(encounter.campaign, num=num)
for character in characters:
CharacterInEncounter.objects.create(character=character,
encounter=encounter,
hp_current=character.hp,
initiative=0)
# TODO Roll everyone's initiative.
self.is_running = True
self.save()
def end(self):
# Sum experience from enemy NPCs
# Split experience amongst players
self.is_running = False
self.save()
class CharacterInEncounter(models.Model):
"""
Characters have a rolled Initiative specific to an encounter, as well as
Hit Points.
"""
character = models.ForeignKey("Character")
encounter = models.ForeignKey(Encounter)
hp_current = models.IntegerField()
initiative = models.PositiveIntegerField | mit | -1,455,354,487,229,714,200 | 27.477612 | 96 | 0.598846 | false |
dbbhattacharya/kitsune | kitsune/search/tests/test_json.py | 1 | 2106 | from nose.tools import eq_
from kitsune.search.tests.test_es import ElasticTestCase
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.tests import LocalizingClient
class JSONTest(ElasticTestCase):
client_class = LocalizingClient
def test_json_format(self):
"""JSON without callback should return application/json"""
response = self.client.get(reverse('search'), {
'q': 'bookmarks',
'format': 'json',
})
eq_(response['Content-Type'], 'application/json')
def test_json_callback_validation(self):
"""Various json callbacks -- validation"""
q = 'bookmarks'
format = 'json'
callbacks = (
('callback', 200),
('validCallback', 200),
('obj.method', 200),
('obj.someMethod', 200),
('arr[1]', 200),
('arr[12]', 200),
("alert('xss');foo", 400),
("eval('nastycode')", 400),
("someFunc()", 400),
('x', 200),
('x123', 200),
('$', 200),
('_func', 200),
('"></script><script>alert(\'xss\')</script>', 400),
('">', 400),
('var x=something;foo', 400),
('var x=', 400),
)
for callback, status in callbacks:
response = self.client.get(reverse('search'), {
'q': q,
'format': format,
'callback': callback,
})
eq_(response['Content-Type'], 'application/x-javascript')
eq_(response.status_code, status,
'callback "{0}": expected {1} got {2}'
.format(callback, status, response.status_code))
def test_json_empty_query(self):
"""Empty query returns JSON format"""
# Test with flags for advanced search or not
a_types = (0, 1, 2)
for a in a_types:
response = self.client.get(reverse('search'), {
'format': 'json', 'a': a,
})
eq_(response['Content-Type'], 'application/json')
| bsd-3-clause | 2,101,940,854,229,355,800 | 32.428571 | 69 | 0.501899 | false |
SymbiFlow/edalize | edalize/trellis.py | 1 | 3499 | # Copyright edalize contributors
# Licensed under the 2-Clause BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-2-Clause
import os.path
from edalize.edatool import Edatool
from edalize.yosys import Yosys
from importlib import import_module
class Trellis(Edatool):
argtypes = ['vlogdefine', 'vlogparam']
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
yosys_help = Yosys.get_doc(api_ver)
trellis_help = {
'lists' : [
{'name' : 'nextpnr_options',
'type' : 'String',
'desc' : 'Additional options for nextpnr'},
{'name' : 'yosys_synth_options',
'type' : 'String',
'desc' : 'Additional options for the synth_ecp5 command'},
]}
combined_members = []
combined_lists = trellis_help['lists']
yosys_members = yosys_help['members']
yosys_lists = yosys_help['lists']
combined_members.extend(m for m in yosys_members if m['name'] not in [i['name'] for i in combined_members])
combined_lists.extend(l for l in yosys_lists if l['name'] not in [i['name'] for i in combined_lists])
return {'description' : "Project Trellis enables a fully open-source flow for ECP5 FPGAs using Yosys for Verilog synthesis and nextpnr for place and route",
'members' : combined_members,
'lists' : combined_lists}
def configure_main(self):
# Write yosys script file
(src_files, incdirs) = self._get_fileset_files()
yosys_synth_options = self.tool_options.get('yosys_synth_options', [])
yosys_synth_options = ["-nomux"] + yosys_synth_options
yosys_edam = {
'files' : self.files,
'name' : self.name,
'toplevel' : self.toplevel,
'parameters' : self.parameters,
'tool_options' : {'yosys' : {
'arch' : 'ecp5',
'yosys_synth_options' : yosys_synth_options,
'yosys_as_subtool' : True,
}
}
}
yosys = getattr(import_module("edalize.yosys"), 'Yosys')(yosys_edam, self.work_root)
yosys.configure()
lpf_files = []
for f in src_files:
if f.file_type == 'LPF':
lpf_files.append(f.name)
elif f.file_type == 'user':
pass
if not lpf_files:
lpf_files = ['empty.lpf']
with open(os.path.join(self.work_root, lpf_files[0]), 'a'):
os.utime(os.path.join(self.work_root, lpf_files[0]), None)
elif len(lpf_files) > 1:
raise RuntimeError("trellis backend supports only one LPF file. Found {}".format(', '.join(lpf_files)))
# Write Makefile
nextpnr_options = self.tool_options.get('nextpnr_options', [])
template_vars = {
'name' : self.name,
'lpf_file' : lpf_files[0],
'nextpnr_options' : nextpnr_options,
}
self.render_template('trellis-makefile.j2',
'Makefile',
template_vars)
| bsd-2-clause | 4,870,784,944,533,456,000 | 40.164706 | 168 | 0.497285 | false |
markiskander/biomaj | biomaj/user.py | 1 | 7346 | from builtins import str
from builtins import object
import bcrypt
import logging
from biomaj.mongo_connector import MongoConnector
from biomaj.config import BiomajConfig
class BmajUser(object):
'''
Biomaj User
'''
def __init__(self, user):
if MongoConnector.db is None:
MongoConnector(BiomajConfig.global_config.get('GENERAL','db.url'),
BiomajConfig.global_config.get('GENERAL','db.name'))
self.users = MongoConnector.users
self.id = user
self.user = self.users.find_one({'id': user})
ldap_server = None
con = None
if not self.user and BiomajConfig.global_config.get('GENERAL','use_ldap') == '1':
# Check if in ldap
#import ldap
from ldap3 import Server, Connection, AUTH_SIMPLE, STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, SEARCH_SCOPE_WHOLE_SUBTREE, GET_ALL_INFO
try:
ldap_host = BiomajConfig.global_config.get('GENERAL','ldap.host')
ldap_port = BiomajConfig.global_config.get('GENERAL','ldap.port')
#con = ldap.initialize('ldap://' + ldap_host + ':' + str(ldap_port))
ldap_server = Server(ldap_host, port = int(ldap_port), get_info = GET_ALL_INFO)
con = Connection(ldap_server, auto_bind = True, client_strategy = STRATEGY_SYNC, check_names=True)
except Exception as err:
logging.error(str(err))
self.user = None
ldap_dn = BiomajConfig.global_config.get('GENERAL','ldap.dn')
base_dn = 'ou=People,' + ldap_dn
filter = "(&(|(uid=" + user + ")(mail=" + user + ")))"
try:
#con.simple_bind_s()
attrs = ['mail']
#results = con.search_s(base_dn, ldap.SCOPE_SUBTREE, filter, attrs)
con.search(base_dn,filter, SEARCH_SCOPE_WHOLE_SUBTREE, attributes=attrs)
if con.response:
ldapMail = None
#for dn, entry in results:
for r in con.response:
user_dn = str(r['dn'])
#if 'mail' not in entry:
if 'mail' not in r['attributes']:
logging.error('Mail not set for user '+user)
else:
#ldapMail = entry['mail'][0]
ldapMail = r['attributes']['mail'][0]
self.user = {
'id' : user,
'email': ldapMail,
'is_ldap': True
}
self.user['_id'] = self.users.insert(self.user)
else:
self.user = None
except Exception as err:
logging.error(str(err))
if con:
con.unbind()
@staticmethod
def user_remove(user_name):
'''
Remove a user from db
:param user_name: user name
:type user_name: str
'''
MongoConnector.users.remove({'id': user_name})
@staticmethod
def user_banks(user_name):
'''
Get user banks name
:param user_name: user identifier
:type user_name: str
:return: list of bank name
'''
banks = MongoConnector.banks.find({'properties.owner': user_name}, {'name':1})
return banks
@staticmethod
def list():
'''
Get users
'''
return MongoConnector.users.find()
def check_password(self, password):
if self.user is None:
return False
if self.user['is_ldap']:
#import ldap
con = None
ldap_server = None
#try:
# ldap_host = BiomajConfig.global_config.get('GENERAL','ldap.host')
# ldap_port = BiomajConfig.global_config.get('GENERAL','ldap.port')
# con = ldap.initialize('ldap://' + ldap_host + ':' + str(ldap_port))
from ldap3 import Server, Connection, AUTH_SIMPLE, STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, SEARCH_SCOPE_WHOLE_SUBTREE, GET_ALL_INFO
from ldap3.core.exceptions import LDAPBindError
try:
ldap_host = BiomajConfig.global_config.get('GENERAL','ldap.host')
ldap_port = BiomajConfig.global_config.get('GENERAL','ldap.port')
#con = ldap.initialize('ldap://' + ldap_host + ':' + str(ldap_port))
ldap_server = Server(ldap_host, port = int(ldap_port), get_info = GET_ALL_INFO)
con = Connection(ldap_server, auto_bind = True, client_strategy = STRATEGY_SYNC, check_names=True)
except Exception as err:
logging.error(str(err))
return False
ldap_dn = BiomajConfig.global_config.get('GENERAL','ldap.dn')
base_dn = 'ou=People,' + ldap_dn
filter = "(&(|(uid=" + self.user['id'] + ")(mail=" + self.user['id'] + ")))"
#try:
# con.simple_bind_s()
#except Exception as err:
# logging.error(str(err))
# return False
try:
attrs = ['mail']
con.search(base_dn,filter, SEARCH_SCOPE_WHOLE_SUBTREE, attributes = attrs)
#results = con.search_s(base_dn, ldap.SCOPE_SUBTREE, filter, attrs)
user_dn = None
ldapMail = None
ldapHomeDirectory = None
for r in con.response:
user_dn = str(r['dn'])
ldapMail = r['attributes']['mail'][0]
#for dn, entry in results:
# user_dn = str(dn)
# ldapMail = entry['mail'][0]
con.unbind()
con = Connection(ldap_server, auto_bind = True, read_only=True, client_strategy = STRATEGY_SYNC, user=user_dn, password=password, authentication=AUTH_SIMPLE, check_names=True)
con.unbind()
#con.simple_bind_s(user_dn, password)
#con.unbind_s()
if user_dn:
return True
except LDAPBindError as err:
logging.error('Bind error: '+str(err))
return False
except Exception as err:
logging.error('Bind error: '+str(err))
return False
else:
hashed = bcrypt.hashpw(password, self.user['hashed_password'])
if hashed == self.user['hashed_password']:
return True
else:
return False
def remove(self):
if self.user is None:
return False
self.users.remove({'_id': self.user['_id']})
return True
def create(self, password, email=''):
'''
Create a new user
'''
hashed = bcrypt.hashpw(password, bcrypt.gensalt())
if self.user is None:
self.user = {
'id' : self.id,
'hashed_password': hashed,
'email': email,
'is_ldap': False
}
self.user['_id'] = self.users.insert(self.user)
| agpl-3.0 | 3,449,686,806,786,648,600 | 39.142077 | 191 | 0.498366 | false |
dietrichc/streamline-ppc-reports | examples/dfp/v201405/creative_service/get_creatives_by_statement.py | 1 | 2307 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all image creatives.
To create an image creative, run create_creatives.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: CreativeService.getCreativesByStatement
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
creative_service = client.GetService('CreativeService', version='v201405')
# Create statement object to only select image creatives.
values = [{
'key': 'creativeType',
'value': {
'xsi_type': 'TextValue',
'value': 'ImageCreative'
}
}]
query = 'WHERE creativeType = :creativeType'
statement = dfp.FilterStatement(query, values)
# Get creatives by statement.
while True:
response = creative_service.getCreativesByStatement(
statement.ToStatement())
creatives = response['results']
if creatives:
# Display results.
for creative in creatives:
print ('Creative with id \'%s\', name \'%s\', and type \'%s\' was '
'found.' % (creative['id'], creative['name'],
creative['Creative.Type']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| apache-2.0 | 8,719,420,815,159,008,000 | 31.041667 | 77 | 0.691374 | false |
sssundar/Drone | rotation/viz.py | 1 | 5332 | # Python script to visualize rotation about a non-body axis.
# Let the lab frame be the inertial frame S.
# Let the origin of the rigid body be O, in the inertial frame S'.
# Let r_ss' be the vector from S to S'.
# Let the body frame relative to O be S''.
# Consider a fixed point on the body, r_s' in S', and r_s'' in S''.
# Assume the body is subject to zero external torques.
# It must be rotating about a fixed axis, n, by Euler's rotation theorem.
# It must have a constant angular velocity about that axis by d/dt L = sum(T_external) = 0 and L = Jw about the rotation axis.
# Let R be the rotation matrix mapping a vector in S'' to S', with inverse R^T
# We know r_s' = R r_s''
# We know d/dt r_s' = (dR/dt R^T) * (R r_s'') = (dR/dt R^T) r_s'
# Therefore we expect (dR/dt R^T) to be the operator (w x) in the S' frame.
# The goal of this script is to visualize this.
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy as np
from numpy import pi as pi
from numpy import cos as c
from numpy import sin as s
from numpy import dot as dot
from numpy import transpose as transpose
# The axis phi is a rotation about the z axis in the body frame (yaw)
# The axis theta is a rotation about the y axis in the phi-rotated body frame (pitch)
# The axis psi is a rotation about the x axis in the phi, theta-rotated body frame (roll)
def R(phi, theta, psi):
R = np.zeros((3,3))
R[0,0] = c(phi)*c(theta)
R[1,0] = s(phi)*c(theta)
R[2,0] = -s(theta)
R[0,1] = -s(phi)*c(psi) + c(phi)*s(theta)*s(psi)
R[1,1] = c(phi)*c(psi) + s(phi)*s(theta)*s(psi)
R[2,1] = c(theta)*s(psi)
R[0,2] = s(phi)*s(psi) + c(phi)*s(theta)*c(psi)
R[1,2] = -c(phi)*s(psi) + s(phi)*s(theta)*c(psi)
R[2,2] = c(theta)*c(psi)
return R
# Rotate z-axis (0,0,1) by pi radians about x-axis. Should end up at (0,0,-1) cutting across y.
# Rotate (0,0,-1) by pi radians about y-axis. Should end up at (0,0,1) again, cutting across x.
# Try both at the same time. Should still end up at (0,0,1).
def test_R():
e3_spp = np.array((0,0,1))
vectors = []
for k in np.linspace(0,pi,100):
vectors.append(dot(R(0,0,k), e3_spp))
e3_spp = vectors[-1]
for k in np.linspace(0,pi,100):
vectors.append(dot(R(0,k,0), e3_spp))
e3_spp = vectors[-1]
for k in np.linspace(0,pi,100):
vectors.append(dot(R(0,k,k), e3_spp))
xs = [k[0] for k in vectors]
ys = [k[1] for k in vectors]
zs = [k[2] for k in vectors]
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot(xs=xs,ys=ys,zs=zs)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
plt.show()
# Sets values lower than epsilon to zero.
# Prints the result with precision 0.3f.
def sanitize_matrix(A):
print ""
epsilon = 0.001
for r in xrange(3):
text = ""
for c in xrange(3):
if abs(A[r, c]) < epsilon:
A[r,c] = 0
text += "%6.2f,\t" % A[r,c]
print text[:-2]
print ""
def sanitize_vector(a):
print ""
epsilon = 0.001
text = ""
for r in xrange(3):
if abs(a[r]) < epsilon:
a[r] = 0
text += "%6.2f,\t" % a[r]
print text[:-2]
print ""
def vectorize(W):
v = np.zeros(3)
v[0] = W[1,0]
v[1] = W[0,2]
v[2] = W[2,1]
return v
# This is the (w x) operator, W, with respect to changing body yaw, pitch, and roll.
# It is dR/dt R^T. The arguments are the current Euler angles and their time derivatives.
def W(phi, theta, psi, dphi, dtheta, dpsi):
Rp = np.zeros((3,3))
Rp[0,0] = (-s(phi)*dphi)*c(theta)
Rp[0,0] += c(phi)*(-s(theta)*dtheta)
Rp[1,0] = (c(phi)*dphi)*c(theta)
Rp[1,0] += s(phi)*(-s(theta)*dtheta)
Rp[2,0] = -c(theta)*dtheta
Rp[0,1] = (-c(phi)*dphi)*c(psi)
Rp[0,1] += -s(phi)*(-s(psi)*dpsi)
Rp[0,1] += (-s(phi)*dphi)*s(theta)*s(psi)
Rp[0,1] += c(phi)*(c(theta)*dtheta)*s(psi)
Rp[0,1] += c(phi)*s(theta)*(c(psi)*dpsi)
Rp[1,1] = (-s(phi)*dphi)*c(psi)
Rp[1,1] += c(phi)*(-s(psi)*dpsi)
Rp[1,1] += (c(phi)*dphi)*s(theta)*s(psi)
Rp[1,1] += s(phi)*(c(theta)*dtheta)*s(psi)
Rp[1,1] += s(phi)*s(theta)*(c(psi)*dpsi)
Rp[2,1] = (-s(theta)*dtheta)*s(psi)
Rp[2,1] += c(theta)*(c(psi)*dpsi)
Rp[0,2] = (c(phi)*dphi)*s(psi)
Rp[0,2] += s(phi)*(c(psi)*dpsi)
Rp[0,2] += (-s(phi)*dphi)*s(theta)*c(psi)
Rp[0,2] += c(phi)*(c(theta)*dtheta)*c(psi)
Rp[0,2] += c(phi)*s(theta)*(-s(psi)*dpsi)
Rp[1,2] = (s(phi)*dphi)*s(psi)
Rp[1,2] += -c(phi)*(c(psi)*dpsi)
Rp[1,2] += (c(phi)*dphi)*s(theta)*c(psi)
Rp[1,2] += s(phi)*(c(theta)*dtheta)*c(psi)
Rp[1,2] += s(phi)*s(theta)*(-s(psi)*dpsi)
Rp[2,2] = (-s(theta)*dtheta)*c(psi)
Rp[2,2] += c(theta)*(-s(psi)*dpsi)
w_i = vectorize(dot(Rp, transpose(R(phi,theta,psi))))
w_b = dot(transpose(R(phi,theta,psi)), w_i)
return (w_i, w_b)
def test_W():
# Is the effective w for a rotation of x rad/s about ek just.. ek*x,
# regardless of the angle about axis ek? We expect W = -W^T as well.
# sanitize_matrix(W(3*pi/12,0,0,2*pi,0,0)[0])
# sanitize_matrix(W(0,3*pi/12,0,0,2*pi,0)[0])
# sanitize_matrix(W(0,0,3*pi/12,0,0,2*pi)[0])
# Let's see what it looks like once we've rotated a bit.
# It's still skew antisymmetric with zero trace! This looks like the operation (w x)!!!!
phi, theta, psi = (pi/4, 3*pi/12, -pi)
w_i, w_b = W(phi, theta, psi, pi, 2*pi, 3*pi)
def Main():
test_W()
if __name__ == "__main__":
Main()
| gpl-3.0 | 8,605,246,386,922,294,000 | 29.295455 | 126 | 0.597524 | false |
mozilla/normandy | normandy/recipes/tests/test_checks.py | 1 | 4355 | from datetime import timedelta
from django.db.utils import ProgrammingError
import pytest
import requests.exceptions
from normandy.recipes import checks, signing
from normandy.recipes.tests import ActionFactory, RecipeFactory, SignatureFactory, UserFactory
@pytest.mark.django_db
class TestSignaturesUseGoodCertificates(object):
def test_it_works(self):
assert checks.signatures_use_good_certificates(None) == []
def test_it_fails_if_a_signature_does_not_verify(self, mocker, settings):
settings.CERTIFICATES_EXPIRE_EARLY_DAYS = None
recipe = RecipeFactory(approver=UserFactory(), signed=True)
mock_verify_x5u = mocker.patch("normandy.recipes.checks.signing.verify_x5u")
mock_verify_x5u.side_effect = signing.BadCertificate("testing exception")
errors = checks.signatures_use_good_certificates(None)
mock_verify_x5u.assert_called_once_with(recipe.signature.x5u, None)
assert len(errors) == 1
assert errors[0].id == checks.ERROR_BAD_SIGNING_CERTIFICATE
assert recipe.approved_revision.name in errors[0].msg
def test_it_ignores_signatures_without_x5u(self):
recipe = RecipeFactory(approver=UserFactory(), signed=True)
recipe.signature.x5u = None
recipe.signature.save()
actions = ActionFactory(signed=True)
actions.signature.x5u = None
actions.signature.save()
assert checks.signatures_use_good_certificates(None) == []
def test_it_ignores_signatures_not_in_use(self, mocker, settings):
settings.CERTIFICATES_EXPIRE_EARLY_DAYS = None
recipe = RecipeFactory(approver=UserFactory(), signed=True)
SignatureFactory(x5u="https://example.com/bad_x5u") # unused signature
mock_verify_x5u = mocker.patch("normandy.recipes.checks.signing.verify_x5u")
def side_effect(x5u, *args):
if "bad" in x5u:
raise signing.BadCertificate("testing exception")
return True
mock_verify_x5u.side_effect = side_effect
errors = checks.signatures_use_good_certificates(None)
mock_verify_x5u.assert_called_once_with(recipe.signature.x5u, None)
assert errors == []
def test_it_passes_expire_early_setting(self, mocker, settings):
settings.CERTIFICATES_EXPIRE_EARLY_DAYS = 7
recipe = RecipeFactory(approver=UserFactory(), signed=True)
mock_verify_x5u = mocker.patch("normandy.recipes.checks.signing.verify_x5u")
errors = checks.signatures_use_good_certificates(None)
mock_verify_x5u.assert_called_once_with(recipe.signature.x5u, timedelta(7))
assert errors == []
def test_it_reports_x5u_network_errors(self, mocker):
RecipeFactory(approver=UserFactory(), signed=True)
mock_verify_x5u = mocker.patch("normandy.recipes.checks.signing.verify_x5u")
mock_verify_x5u.side_effect = requests.exceptions.ConnectionError
errors = checks.signatures_use_good_certificates(None)
mock_verify_x5u.assert_called_once()
assert len(errors) == 1
assert errors[0].id == checks.ERROR_COULD_NOT_VERIFY_CERTIFICATE
@pytest.mark.django_db
class TestRecipeSignatureAreCorrect:
def test_it_warns_if_a_field_isnt_available(self, mocker):
"""This is to allow for un-applied to migrations to not break running migrations."""
RecipeFactory(approver=UserFactory(), signed=True)
mock_canonical_json = mocker.patch("normandy.recipes.models.Recipe.canonical_json")
mock_canonical_json.side_effect = ProgrammingError("error for testing")
errors = checks.recipe_signatures_are_correct(None)
assert len(errors) == 1
assert errors[0].id == checks.WARNING_COULD_NOT_CHECK_SIGNATURES
@pytest.mark.django_db
class TestActionSignatureAreCorrect:
def test_it_warns_if_a_field_isnt_available(self, mocker):
"""This is to allow for un-applied to migrations to not break running migrations."""
ActionFactory(signed=True)
mock_canonical_json = mocker.patch("normandy.recipes.models.Action.canonical_json")
mock_canonical_json.side_effect = ProgrammingError("error for testing")
errors = checks.action_signatures_are_correct(None)
assert len(errors) == 1
assert errors[0].id == checks.WARNING_COULD_NOT_CHECK_SIGNATURES
| mpl-2.0 | -1,509,006,960,226,237,200 | 44.842105 | 94 | 0.7031 | false |
Dwii/Master-Thesis | implementation/Palabos/cavity_benchmark/plot_benchmark.py | 1 | 1854 | # Display a list of *.dat files in a bar chart.
# Based on an example from https://chrisalbon.com/python/matplotlib_grouped_bar_plot.html
import sys
import os
import matplotlib.pyplot as plt
import numpy as np
if len(sys.argv) > 3 and (len(sys.argv)-3) % 2 :
print("usage: python3 {0} <benchmark> <image path> (<dat1> <legend1> [<dat2> <legend2>] .. [<datN> <legendN>] ) ".format(os.path.basename(sys.argv[0])))
exit(1)
benchmark = sys.argv[1]
image_path = sys.argv[2]
groups = (len(sys.argv)-3)/2
# Load benchark
domains = ()
nb_setups = 0
for line in open(benchmark,'r'):
n, snx, sny, snz = line.split()
domains += ( r"{0}$^3$".format(snx), ) #+= ( "{0}x{1}x{2}".format(snx, sny, snz), )
nb_setups += 1
# Setting the positions and width for the bars
pos = list(range(nb_setups))
width = 1 / (groups+2)
# Plotting the bars
fig, ax = plt.subplots(figsize=(10,5))
prop_iter = iter(plt.rcParams['axes.prop_cycle'])
legends = ()
maxLups = 0
for i, argi in enumerate(range(3, len(sys.argv), 2)):
mlups = np.array(list(map(float, open(sys.argv[argi])))) / 1E6
legends += ( sys.argv[argi+1], )
maxLups = max(maxLups, max(mlups))
plt.bar([p + width*i for p in pos],
mlups,
width,
alpha=0.5,
color=next(prop_iter)['color'])
# Set the y axis label
ax.set_ylabel('MLUPS')
ax.set_xlabel('Taille du sous-domaine')
# Set the chart's title
#ax.set_title(title)
# Set the position of the x ticks
ax.set_xticks([p + 1.5 * width for p in pos])
# Set the labels for the x ticks
ax.set_xticklabels(domains)
# Setting the x-axis and y-axis limits
plt.xlim(min(pos)-width, max(pos)+width*4)
#plt.ylim([0, maxLups] )
# Adding the legend and showing the plot
plt.legend(legends, loc='upper center')
ax.yaxis.grid()
plt.savefig(image_path)
plt.tight_layout()
plt.show() | mit | -7,959,568,732,201,215,000 | 26.279412 | 156 | 0.641855 | false |
AxelTLarsson/robot-localisation | robot_localisation/main.py | 1 | 6009 | """
This module contains the logic to run the simulation.
"""
import sys
import os
import argparse
import numpy as np
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from robot_localisation.grid import Grid, build_transition_matrix
from robot_localisation.robot import Robot, Sensor
from robot_localisation.hmm_filter import FilterState
def help_text():
"""
Return a helpful text explaining usage of the program.
"""
return """
------------------------------- HMM Filtering ---------------------------------
Type a command to get started. Type 'quit' or 'q' to quit.
Valid commands (all commands are case insensitive):
ENTER move the robot one step further in the simulation,
will also output current pose and estimated
position of the robot
help show this help text
show T show the transition matrix T
show f show the filter column vector
show O show the observation matrix
quit | q quit the program
-------------------------------------------------------------------------------
"""
def main():
parser = argparse.ArgumentParser(description='Robot localisation with HMM')
parser.add_argument(
'-r', '--rows',
type=int,
help='the number of rows on the grid, default is 4',
default=4)
parser.add_argument(
'-c', '--columns',
type=int,
help='the number of columns on the grid, default is 4',
default=4)
args = parser.parse_args()
# Initialise the program
size = (args.rows, args.columns)
the_T_matrix = build_transition_matrix(*size)
the_filter = FilterState(transition=the_T_matrix)
the_sensor = Sensor()
the_grid = Grid(*size)
the_robot = Robot(the_grid, the_T_matrix)
sensor_value = None
obs = None
print(help_text())
print("Grid size is {} x {}".format(size[0], size[1]))
print(the_robot)
print("The sensor says: {}".format(sensor_value))
filter_est = the_grid.index_to_pose(the_filter.belief_state)
pos_est = (filter_est[0], filter_est[1])
print("The HMM filter thinks the robot is at {}".format(filter_est))
print("The Manhattan distance is: {}".format(
manhattan(the_robot.get_position(), pos_est)))
np.set_printoptions(linewidth=1000)
# Main loop
while True:
user_command = str(input('> '))
if user_command.upper() == 'QUIT' or user_command.upper() == 'Q':
break
elif user_command.upper() == 'HELP':
print(help_text())
elif user_command.upper() == 'SHOW T':
print(the_T_matrix)
elif user_command.upper() == 'SHOW F':
print(the_filter.belief_matrix)
elif user_command.upper() == 'SHOW O':
print(obs)
elif not user_command:
# take a step then approximate etc.
the_robot.step()
sensor_value = the_sensor.get_position(the_robot)
obs = the_sensor.get_obs_matrix(sensor_value, size)
the_filter.forward(obs)
print(the_robot)
print("The sensor says: {}".format(sensor_value))
filter_est = the_grid.index_to_pose(the_filter.belief_state)
pos_est = (filter_est[0], filter_est[1])
print("The HMM filter thinks the robot is at {}".format(filter_est))
print("The Manhattan distance is: {}".format(
manhattan(the_robot.get_position(), pos_est)))
else:
print("Unknown command!")
def manhattan(pos1, pos2):
"""
Calculate the Manhattan distance between pos1 and pos2.
"""
x1, y1 = pos1
x2, y2 = pos2
return abs(x1-x2) + abs(y1-y2)
def automated_run():
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(10, 7))
navg = 20
nsteps = 10
for size in (2, 2), (3, 3), (4, 4), (5, 5), (10, 10):
avg_distances = np.zeros(shape=(nsteps+1,))
for n in range(navg):
distances = list()
none_values = list()
the_T_matrix = build_transition_matrix(*size)
the_filter = FilterState(transition=the_T_matrix)
the_sensor = Sensor()
the_grid = Grid(*size)
the_robot = Robot(the_grid, the_T_matrix)
# get the manhattan distance at the start
filter_est = the_grid.index_to_pose(the_filter.belief_state)
pos_est = (filter_est[0], filter_est[1])
distances.append(manhattan(the_robot.get_position(), pos_est))
for i in range(nsteps):
# take a step then approximate etc.
the_robot.step()
sensor_value = the_sensor.get_position(the_robot)
if sensor_value is None:
none_values.append(i) # keep track of where None was returned
obs = the_sensor.get_obs_matrix(sensor_value, size)
the_filter.forward(obs)
filter_est = the_grid.index_to_pose(the_filter.belief_state)
pos_est = (filter_est[0], filter_est[1])
distances.append(manhattan(the_robot.get_position(), pos_est))
avg_distances += np.array(distances)
avg_distances /= navg
base_line, = plt.plot(avg_distances, label="Grid size {}".format(size))
# for point in none_values:
# plt.scatter(point, distances[point], marker='o',
# color=base_line.get_color(), s=40)
plt.legend()
plt.xlim(0, nsteps)
plt.ylim(0,)
plt.ylabel("Manhattan distance")
plt.xlabel("Steps")
plt.title("Manhattan distance from true position and inferred position \n"
"from the hidden Markov model (average over %s runs)" % navg)
fig.savefig("automated_run.png")
plt.show()
if __name__ == '__main__':
main()
# automated_run()
| mit | 1,122,709,431,503,210,400 | 33.337143 | 82 | 0.564487 | false |
confpack/confpacker | libconfpacker/packagers/base/__init__.py | 1 | 4696 | from __future__ import absolute_import
from datetime import datetime
import logging
import os
import os.path
import subprocess
import yaml
from cpcommon import cd
from .task import Task
class Package(object):
def src_path(self, *path):
return os.path.join(self.src_directory, *path)
def __init__(self, name, src_directory, build_version):
self.logger = logging.getLogger("confpacker")
self.name = name
self.src_directory = src_directory
self.build_version = build_version
self.meta = self.load_meta()
self.main_tasks = self.load_tasks()
self.main_handlers = self.load_handlers(ignore_error=True)
self.vars = self.load_vars(ignore_error=True)
self.secrets = self.load_secrets(ignore_error=True)
self.files = self.scan_files()
self.templates = self.scan_templates()
def _load_yml_file(self, filepath, expected_type, ignore_error=False):
if not os.path.exists(filepath):
if ignore_error:
return expected_type()
raise LookupError("cannot find {}".format(filepath))
with open(filepath) as f:
thing = yaml.load(f.read())
if thing is None and ignore_error:
return expected_type()
if not isinstance(thing, expected_type):
raise TypeError("expected a {} but got a {} in {}".format(expected_type, type(thing), filepath))
return thing
def load_meta(self):
meta_path = self.src_path("meta.yml")
return self._load_yml_file(meta_path, dict, ignore_error=True)
def load_tasks(self, filename="main.yml", ignore_error=False):
tasks_path = self.src_path("tasks", filename)
return [Task(rt) for rt in self._load_yml_file(tasks_path, list, ignore_error=ignore_error)]
def load_handlers(self, filename="main.yml", ignore_error=False):
handlers_path = self.src_path("handlers", filename)
return self._load_yml_file(handlers_path, list, ignore_error=ignore_error)
def load_vars(self, filename="main.yml", directory="vars", ignore_error=False):
vars_path = self.src_path(directory, filename)
return self._load_yml_file(vars_path, dict, ignore_error=ignore_error)
def load_secrets(self, filename="main.yml", ignore_error=False):
# TODO: this is not yet implemented
return {}
def scan_directory_for_files(self, directory):
base_path = self.src_path(directory)
if not os.path.isdir(base_path):
return []
files = []
for root, dirs, files_in_dir in os.walk(base_path):
for filename in files_in_dir:
path = os.path.join(root, filename)
if path.startswith(base_path):
target_path = path[len(base_path):]
else:
# TODO: This may happen for a symlink. Need to be investigated
raise RuntimeError("file path {} does not start with src directory path {}?".format(path, self.src_directory))
files.append((path, target_path))
return files
def scan_files(self):
return self.scan_directory_for_files("files")
def scan_templates(self):
return self.scan_directory_for_files("templates")
class BasePackager(object):
def __init__(self, build_config, output_dir):
self.logger = logging.getLogger("confpacker")
self.build_config = build_config
self.output_dir = os.path.abspath(output_dir)
if not os.path.exists(self.output_dir):
os.mkdir(self.output_dir)
def get_source_git_sha(self):
with cd(self.build_config.src_directory):
if os.path.isdir(".git"):
sha = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).strip()
else:
sha = ""
return sha
def get_timestamp(self):
return datetime.now().strftime("%Y%m%d%H%M%S")
def get_build_version(self):
timestamp = self.get_timestamp()
git_sha = self.get_source_git_sha()
build_version = timestamp
if git_sha:
build_version = build_version + "-" + git_sha
return build_version
def build(self):
build_version = self.get_build_version()
this_out_dir = os.path.join(self.output_dir, build_version)
if os.path.exists(this_out_dir):
raise RuntimeError("{} already exists? this should not happen".format(this_out_dir))
os.mkdir(this_out_dir)
for pkg_name, pkg_src_path in self.build_config.package_paths.items():
package = Package(pkg_name, pkg_src_path, build_version)
this_package_out_dir = os.path.join(this_out_dir, pkg_name)
os.mkdir(this_package_out_dir)
self.build_one(package, build_version, this_package_out_dir)
def build_one(self, package, build_version, out_dir):
"""Builds one package
out_dir is for this package. The final should emit a file at <out_dir>/package.<typename>
"""
raise NotImplementedError
| apache-2.0 | -5,239,246,716,933,589,000 | 30.945578 | 120 | 0.672487 | false |
nonbeing/simplemonitor | Alerters/slack.py | 1 | 5883 | try:
import requests
requests_available = True
except:
requests_available = False
from alerter import Alerter
from pytz import timezone
import pytz
class SlackAlerter(Alerter):
"""Send alerts to a Slack webhook."""
def __init__(self, config_options):
if not requests_available:
print "Requests package is not available, cannot use SlackAlerter."
return
Alerter.__init__(self, config_options)
try:
url = config_options['url']
except:
raise RuntimeError("Required configuration fields missing")
if 'channel' in config_options:
channel = config_options['channel']
else:
channel = None
if url == "":
raise RuntimeError("missing url")
self.url = url
self.channel = channel
def send_alert(self, name, monitor):
"""Send the message."""
type = self.should_alert(monitor)
(days, hours, minutes, seconds) = self.get_downtime(monitor)
host = "on host %s" % self.hostname
if self.channel is not None:
message_json = {'channel': self.channel}
else:
message_json = {}
message_json['attachments'] = [{}]
if type == "":
return
elif type == "failure":
firstFailureTime = monitor.first_failure_time()
localized_firstFailureTime = pytz.utc.localize(firstFailureTime)
ist_converted_time = self.format_datetime(localized_firstFailureTime.astimezone(timezone('Asia/Kolkata')))
message_json['text'] = "API test run completed."
message_json['username'] = "V18SimpleMonitor"
message_json["icon_url"] = "http://i.imgur.com/XdABB5Z.png"
message_json['attachments'][0]['color'] = 'danger'
message_json['attachments'][0]['title'] = "Monitor {} failed!".format(name)
message_json['attachments'][0]['title_link'] = "https://rpm.newrelic.com/accounts/1259793/applications/16687249/transactions#id=5b225765625472616e73616374696f6e2f416374696f6e2f73657276696365732f696e6465782f6164756c742d736561726368222c22225d"
fields = [
{
'title': 'Failed at',
'value': ist_converted_time,
'short': True
},
{
'title': 'Downtime',
'value': "{}+{:02d}:{:02d}:{:02d}".format(days, hours, minutes, seconds),
'short': True
},
{
'title': 'Virtual failure count',
'value': monitor.virtual_fail_count(),
'short': True
},
{
'title': 'Host',
'value': self.hostname,
'short': True
},
{
'title': 'Additional info',
'value': monitor.get_result()
},
{
'title': 'Description',
'value': monitor.describe()
}
]
try:
if monitor.recover_info != "":
fields.append({
'title': 'Recovery info',
'value': "Recovery info: %s" % monitor.recover_info
})
message_json['attachments'][0]['color'] = 'warning'
except AttributeError:
pass
message_json['attachments'][0]['fields'] = fields
elif type == "success":
firstFailureTime = monitor.first_failure_time()
localized_firstFailureTime = pytz.utc.localize(firstFailureTime)
ist_converted_time = self.format_datetime(localized_firstFailureTime.astimezone(timezone('Asia/Kolkata')))
message_json['text'] = "API test run completed."
message_json['username'] = "V18SimpleMonitor"
message_json["icon_url"] = "http://i.imgur.com/XdABB5Z.png"
fields = [
{
'title': 'Failed at',
'value': ist_converted_time,
'short': True
},
{
'title': 'Downtime',
'value': "{}+{:02d}:{:02d}:{:02d}".format(days, hours, minutes, seconds),
'short': True
},
{
'title': 'Host',
'value': self.hostname,
'short': True
},
{
'title': 'Description',
'value': monitor.describe()
}
]
message_json['attachments'][0]['color'] = 'good'
message_json['attachments'][0]['fields'] = fields
message_json['attachments'][0]['title'] = "Monitor {} succeeded.".format(name)
message_json['attachments'][0]['title_link'] = "https://rpm.newrelic.com/accounts/1259793/applications/16687249/transactions#id=5b225765625472616e73616374696f6e2f416374696f6e2f73657276696365732f696e6465782f6164756c742d736561726368222c22225d"
else:
print "Unknown alert type %s" % type
return
if not self.dry_run:
try:
r = requests.post(self.url, json=message_json)
if not r.status_code == 200:
print "POST to slack webhook failed"
print r
except Exception, e:
print "Failed to post to slack webhook"
print e
print message_json
self.available = False
else:
print "dry_run: would send slack: %s" % message_json.__repr__()
| bsd-3-clause | 622,996,994,956,758,800 | 35.76875 | 253 | 0.496345 | false |
chromium/chromium | third_party/android_deps/libs/com_google_errorprone_error_prone_annotation/3pp/fetch.py | 5 | 1396 | #!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotation/2.7.1/error_prone_annotation-2.7.1.jar'
_FILE_NAME = 'error_prone_annotation-2.7.1.jar'
_FILE_VERSION = '2.7.1'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
| bsd-3-clause | 6,280,813,716,697,671,000 | 23.928571 | 134 | 0.648997 | false |
TheCoSMoCompany/biopredyn | Prototype/src/libsbml-5.10.0/src/bindings/python/test/sbml/TestSBMLDocument.py | 1 | 6600 | #
# @file TestSBMLDocument.py
# @brief SBMLDocument unit tests
#
# @author Akiya Jouraku (Python conversion)
# @author Ben Bornstein
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/sbml/test/TestSBMLDocument.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
import sys
import unittest
import libsbml
class TestSBMLDocument(unittest.TestCase):
def test_SBMLDocument_create(self):
d = libsbml.SBMLDocument()
self.assert_( d.getTypeCode() == libsbml.SBML_DOCUMENT )
self.assert_( d.getNotes() == None )
self.assert_( d.getAnnotation() == None )
self.assert_( d.getLevel() == 3 )
self.assert_( d.getVersion() == 1 )
self.assert_( d.getNumErrors() == 0 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_createWith(self):
d = libsbml.SBMLDocument(1,2)
self.assert_( d.getTypeCode() == libsbml.SBML_DOCUMENT )
self.assert_( d.getNotes() == None )
self.assert_( d.getAnnotation() == None )
self.assert_( d.getLevel() == 1 )
self.assert_( d.getVersion() == 2 )
self.assert_( d.getNumErrors() == 0 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,2)
d.setModel(m1)
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.setLevelAndVersion(1,1,False) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion_Error(self):
d = libsbml.SBMLDocument()
d.setLevelAndVersion(2,1,True)
m1 = libsbml.Model(2,1)
u = libsbml.Unit(2,1)
u.setKind(libsbml.UnitKind_forName("mole"))
u.setOffset(3.2)
ud = libsbml.UnitDefinition(2,1)
ud.setId( "ud")
ud.addUnit(u)
m1.addUnitDefinition(ud)
d.setModel(m1)
self.assert_( d.setLevelAndVersion(2,2,True) == False )
self.assert_( d.setLevelAndVersion(2,3,True) == False )
self.assert_( d.setLevelAndVersion(1,2,True) == False )
self.assert_( d.setLevelAndVersion(1,1,True) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion_UnitsError(self):
d = libsbml.SBMLDocument()
d.setLevelAndVersion(2,4,False)
m1 = d.createModel()
c = m1.createCompartment()
c.setId( "c")
p = m1.createParameter()
p.setId( "p")
p.setUnits( "mole")
r = m1.createAssignmentRule()
r.setVariable( "c")
r.setFormula( "p*p")
self.assert_( d.setLevelAndVersion(2,2,False) == True )
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.setLevelAndVersion(1,1,False) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setLevelAndVersion_Warning(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,2)
(m1).setSBOTerm(2)
d.setModel(m1)
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.setLevelAndVersion(1,1,False) == False )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel(self):
d = libsbml.SBMLDocument(2,4)
m1 = libsbml.Model(2,4)
m2 = libsbml.Model(2,4)
self.assert_( d.getModel() == None )
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
self.assert_( mout != None )
self.assert_( mout != m1 )
i = d.setModel(d.getModel())
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
self.assert_( mout != None )
self.assert_( mout != m1 )
i = d.setModel(m2)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
self.assert_( mout != None )
self.assert_( mout != m2 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel1(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,1)
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_VERSION_MISMATCH )
self.assert_( d.getModel() == None )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel2(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(1,2)
m1.createCompartment()
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_LEVEL_MISMATCH )
self.assert_( d.getModel() == None )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLDocument_setModel3(self):
d = libsbml.SBMLDocument(2,2)
m1 = libsbml.Model(2,2)
i = d.setModel(m1)
self.assert_( i == libsbml.LIBSBML_OPERATION_SUCCESS )
self.assert_( d.getModel() != None )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestSBMLDocument))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
| bsd-3-clause | 4,618,646,915,356,372,000 | 34.106383 | 79 | 0.630909 | false |
mclaughlin6464/pearce | bin/optimization/sloppy_joes_optimization_indiv_bins.py | 1 | 1573 | from pearce.emulator import OriginalRecipe, ExtraCrispy, SpicyBuffalo, LemonPepperWet
from pearce.mocks import cat_dict
import numpy as np
from os import path
from SloppyJoes import lazy_wrapper
training_file = '/scratch/users/swmclau2/xi_zheng07_cosmo_lowmsat/PearceRedMagicXiCosmoFixedNd.hdf5'
em_method = 'gp'
fixed_params = {'z':0.0, 'r': 0.19118072}
#emu = SpicyBuffalo(training_file, method = em_method, fixed_params=fixed_params,
# custom_mean_function = 'linear', downsample_factor = 0.01)
emu = OriginalRecipe(training_file, method = em_method, fixed_params=fixed_params,
custom_mean_function = 'linear', downsample_factor = 0.01)
def resids_bins(p, gps, xs, ys, yerrs):
res = []
p_np = np.array(p).reshape((len(gps), -1))
for gp, x, y,yerr, dy, p in zip(gps, xs, ys,yerrs, emu.downsample_y, p_np):
gp.set_parameter_vector(p)
gp.recompute()
r = (gp.predict(dy, x, return_cov=False)-y)/(yerr+1e-5)
res.append(r)
#print res[0].shape
return np.hstack(res)
def resids(p, gp, x, y, yerr):
p = np.array(p)
gp.set_parameter_vector(p)
gp.recompute()
res = (gp.predict(emu.downsample_y, x, return_cov=False)-y)/(yerr+1e-5)
#print res[0].shape
return res
n_hps = len(emu._emulator.get_parameter_vector())
#vals = np.ones((n_hps*emu.n_bins))
vals = np.ones((n_hps,))
args = (emu._emulator, emu.x, emu.y, emu.yerr)
result = lazy_wrapper(resids, vals, func_args = args, print_level = 3)\
print result
np.savetxt('sloppy_joes_result_indiv_bins.npy', result)
| mit | 905,506,763,249,726,100 | 33.195652 | 100 | 0.664336 | false |
Alexanderkorn/Automatisation | oude scripts/Self/IFScraper.py | 1 | 2199 | __author__ = 'alexander'
import urllib2
import os
from lib import pyperclip
def PageScrape(pageurl):
hdr= {'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.110 Safari/537.36'}
req = urllib2.Request(pageurl, "", hdr)
response = urllib2.urlopen(req)
html = response.read()
search = 'Gallery:'
for i in range(len(html)-len(search)):
if search == html[i:i+len(search)]:
foldername = html[i+len(search)-1:]
foldername = foldername.split('<')[3].split('>')[1]
while foldername[-1]=='.' or foldername[-1]==' ':
foldername = foldername[:-1]
search = 'original=\"'
imgnum = 1
imgcount = 0
for i in range(len(html)-len(search)):
if search == html[i:i+len(search)]:
imgcount += 1
print "\n\nThere are "+str(imgcount)+" pics in the gallery: "+foldername+"."
contnum = 2
contnum = raw_input("Would you like to download them all? 1=yes 2=no: ")
foldername = 'Downloads/'+foldername
if contnum == '1':
print '\n'
try:
os.makedirs(foldername)
except:
print "Error, make sure there is no directory with this script"
return 0
for i in range(len(html)-len(search)):
if search == html[i:i+len(search)]:
imgurl = html[i+len(search):]
imgurl = imgurl.split('"')[0]
if imgurl[-4] == '.':
imgname = foldername+'/'+str(imgnum)+imgurl[-4:]
else:
imgname = foldername+'/'+str(imgnum)+imgurl[-5:]
f = open(imgname, 'wb')
f.write(urllib2.urlopen(imgurl).read())
f.close()
print '\t'+str(imgnum)+'/'+str(imgcount)+ ' completed\n'
imgnum += 1
return 0
urltest = pyperclip.paste()
print "URL in clipboard: "+ urltest
use = raw_input("\nWould you like to use the above url? 1=yes 2=input other: ")
if use == '1':
url = urltest
else:
url = raw_input("\nEnter the url: ")
PageScrape(url)
| gpl-3.0 | -8,366,557,063,171,999,000 | 36.271186 | 135 | 0.554343 | false |
appleseedhq/gaffer | python/GafferSceneUI/__init__.py | 1 | 4899 | ##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2012-2016, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
__import__( "GafferImageUI" )
__import__( "GafferScene" )
from _GafferSceneUI import *
from HierarchyView import HierarchyView
from SceneInspector import SceneInspector
from PrimitiveInspector import PrimitiveInspector
from UVInspector import UVInspector
from FilterPlugValueWidget import FilterPlugValueWidget
from ScenePathPlugValueWidget import ScenePathPlugValueWidget
from TweakPlugValueWidget import TweakPlugValueWidget
import SceneHistoryUI
import SceneNodeUI
import SceneReaderUI
import SceneProcessorUI
import FilteredSceneProcessorUI
import PruneUI
import SubTreeUI
import OutputsUI
import OptionsUI
import OpenGLAttributesUI
import SceneWriterUI
import StandardOptionsUI
import StandardAttributesUI
import ShaderUI
import OpenGLShaderUI
import ObjectSourceUI
import TransformUI
import AttributesUI
import LightUI
import InteractiveRenderUI
import SphereUI
import MapProjectionUI
import MapOffsetUI
import CustomAttributesUI
import CustomOptionsUI
import SceneViewUI
import ShaderViewUI
import ShaderAssignmentUI
import ParentConstraintUI
import ParentUI
import PrimitiveVariablesUI
import DuplicateUI
import GridUI
import SetFilterUI
import DeleteGlobalsUI
import DeleteOptionsUI
import CopyOptionsUI
import DeleteSetsUI
import ExternalProceduralUI
import IsolateUI
import SelectionToolUI
import CropWindowToolUI
import CameraUI
import SetUI
import ClippingPlaneUI
import FilterUI
import PointsTypeUI
import ParametersUI
import TextUI
import AimConstraintUI
import CoordinateSystemUI
import DeleteAttributesUI
import SeedsUI
import UnionFilterUI
import PathFilterUI
import GroupUI
import OpenGLRenderUI
import PrimitiveVariableProcessorUI
import DeletePrimitiveVariablesUI
import MeshTypeUI
import DeleteOutputsUI
import InstancerUI
import ObjectToSceneUI
import FreezeTransformUI
import SceneElementProcessorUI
import PointConstraintUI
import BranchCreatorUI
import ConstraintUI
import PlaneUI
import CubeUI
import AttributeVisualiserUI
import FilterProcessorUI
import MeshToPointsUI
import RenderUI
import ShaderBallUI
import ShaderTweaksUI
import CameraTweaksUI
import LightToCameraUI
import FilterResultsUI
import TransformToolUI
import TranslateToolUI
import ScaleToolUI
import RotateToolUI
import MeshTangentsUI
import ResamplePrimitiveVariablesUI
import DeleteFacesUI
import DeleteCurvesUI
import DeletePointsUI
import CollectScenesUI
import EncapsulateUI
import GlobalShaderUI
import CameraToolUI
import ReverseWindingUI
import MeshDistortionUI
import DeleteObjectUI
import CopyAttributesUI
import CollectPrimitiveVariablesUI
import PrimitiveVariableExistsUI
import CollectTransformsUI
import UDIMQueryUI
import WireframeUI
import SetVisualiserUI
import LightFilterUI
import OrientationUI
import DeformerUI
import CopyPrimitiveVariablesUI
import MergeScenesUI
# then all the PathPreviewWidgets. note that the order
# of import controls the order of display.
from SceneReaderPathPreview import SceneReaderPathPreview
__import__( "IECore" ).loadConfig( "GAFFER_STARTUP_PATHS", subdirectory = "GafferSceneUI" )
| bsd-3-clause | -1,929,994,207,123,996,000 | 28.871951 | 91 | 0.812207 | false |
jdumas/autobib | pdftitle.py | 1 | 14035 | #!/usr/bin/env python2.7
# https://gist.github.com/nevesnunes/84b2eb7a2cf63cdecd170c139327f0d6
"""
Extract title from PDF file.
Dependencies:
pip install --user unidecode pyPDF PDFMiner
Usage:
find . -name "*.pdf" | xargs -I{} pdftitle -d tmp --rename {}
Limitations:
- No processing of CID keyed fonts. PDFMiner seems to decode them
in some methods (e.g. PDFTextDevice.render_string()).
- Some `LTTextLine` elements report incorrect height, leading to some
blocks of text being consider bigger than title text.
- Heuristics are used to judge invalid titles, implying the possibility of
false positives.
"""
import getopt
import os
import re
import string
import subprocess
import sys
import unidecode
from pyPdf import PdfFileReader
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LAParams, LTChar, LTFigure, LTTextBox, LTTextLine
__all__ = ['pdf_title']
def make_parsing_state(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('ParsingState', (), enums)
CHAR_PARSING_STATE = make_parsing_state('INIT_X', 'INIT_D', 'INSIDE_WORD')
def log(text):
if IS_LOG_ON:
print('--- ' + text)
IS_LOG_ON = False
MIN_CHARS = 6
MAX_WORDS = 20
MAX_CHARS = MAX_WORDS * 10
TOLERANCE = 1e-06
def sanitize(filename):
"""Turn string into a valid file name.
"""
# If the title was picked up from text, it may be too large.
# Preserve a certain number of words and characters
words = filename.split(' ')
filename = ' '.join(words[0:MAX_WORDS])
if len(filename) > MAX_CHARS:
filename = filename[0:MAX_CHARS]
# Preserve letters with diacritics
try:
filename = unidecode.unidecode(filename.encode('utf-8').decode('utf-8'))
except UnicodeDecodeError:
print("*** Skipping invalid title decoding for file %s! ***" % filename)
# Preserve subtitle and itemization separators
filename = re.sub(r',', ' ', filename)
filename = re.sub(r': ', ' - ', filename)
# Strip repetitions
filename = re.sub(r'\.pdf(\.pdf)*$', '', filename)
filename = re.sub(r'[ \t][ \t]*', ' ', filename)
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
return ''.join([c for c in filename if c in valid_chars])
def meta_title(filename):
"""Title from pdf metadata.
"""
docinfo = PdfFileReader(file(filename, 'rb')).getDocumentInfo()
if docinfo is None:
return ''
return docinfo.title if docinfo.title else ''
def junk_line(line):
"""Judge if a line is not appropriate for a title.
"""
too_small = len(line.strip()) < MIN_CHARS
is_placeholder_text = bool(re.search(r'^[0-9 \t-]+(abstract|introduction)?\s+$|^(abstract|unknown|title|untitled):?$', line.strip().lower()))
is_copyright_info = bool(re.search(r'paper\s+title|technical\s+report|proceedings|preprint|to\s+appear|submission|(integrated|international).*conference|transactions\s+on|symposium\s+on|downloaded\s+from\s+http', line.lower()))
# NOTE: Titles which only contain a number will be discarded
stripped_to_ascii = ''.join([c for c in line.strip() if c in string.ascii_letters])
ascii_length = len(stripped_to_ascii)
stripped_to_chars = re.sub(r'[ \t\n]', '', line.strip())
chars_length = len(stripped_to_chars)
is_serial_number = ascii_length < chars_length / 2
return too_small or is_placeholder_text or is_copyright_info or is_serial_number
def empty_str(s):
return len(s.strip()) == 0
def is_close(a, b, relative_tolerance=TOLERANCE):
return abs(a-b) <= relative_tolerance * max(abs(a), abs(b))
def update_largest_text(line, y0, size, largest_text):
log('update size: ' + str(size))
log('largest_text size: ' + str(largest_text['size']))
# Sometimes font size is not correctly read, so we
# fallback to text y0 (not even height may be calculated).
# In this case, we consider the first line of text to be a title.
if ((size == largest_text['size'] == 0) and (y0 - largest_text['y0'] < -TOLERANCE)):
return largest_text
# If it is a split line, it may contain a new line at the end
line = re.sub(r'\n$', ' ', line)
if (size - largest_text['size'] > TOLERANCE):
largest_text = {
'contents': line,
'y0': y0,
'size': size
}
# Title spans multiple lines
elif is_close(size, largest_text['size']):
largest_text['contents'] = largest_text['contents'] + line
largest_text['y0'] = y0
return largest_text
def extract_largest_text(obj, largest_text):
# Skip first letter of line when calculating size, as articles
# may enlarge it enough to be bigger then the title size.
# Also skip other elements such as `LTAnno`.
for i, child in enumerate(obj):
if isinstance(child, LTTextLine):
log('lt_obj child line: ' + str(child))
for j, child2 in enumerate(child):
if j > 1 and isinstance(child2, LTChar):
largest_text = update_largest_text(child.get_text(), child2.y0, child2.size, largest_text)
# Only need to parse size of one char
break
elif i > 1 and isinstance(child, LTChar):
log('lt_obj child char: ' + str(child))
largest_text = update_largest_text(obj.get_text(), child.y0, child.size, largest_text)
# Only need to parse size of one char
break
return largest_text
def extract_figure_text(lt_obj, largest_text):
"""
Extract text contained in a `LTFigure`.
Since text is encoded in `LTChar` elements, we detect separate lines
by keeping track of changes in font size.
"""
text = ''
line = ''
y0 = 0
size = 0
char_distance = 0
char_previous_x1 = 0
state = CHAR_PARSING_STATE.INIT_X
for child in lt_obj:
log('child: ' + str(child))
# Ignore other elements
if not isinstance (child, LTChar):
continue
char_y0 = child.y0
char_size = child.size
char_text = child.get_text()
decoded_char_text = unidecode.unidecode(char_text.encode('utf-8').decode('utf-8'))
log('char: ' + str(char_size) + ' ' + str(decoded_char_text))
# A new line was detected
if char_size != size:
log('new line')
largest_text = update_largest_text(line, y0, size, largest_text)
text += line + '\n'
line = char_text
y0 = char_y0
size = char_size
char_previous_x1 = child.x1
state = CHAR_PARSING_STATE.INIT_D
else:
# Spaces may not be present as `LTChar` elements,
# so we manually add them.
# NOTE: A word starting with lowercase can't be
# distinguished from the current word.
char_current_distance = abs(child.x0 - char_previous_x1)
log('char_current_distance: ' + str(char_current_distance))
log('char_distance: ' + str(char_distance))
log('state: ' + str(state))
# Initialization
if state == CHAR_PARSING_STATE.INIT_X:
char_previous_x1 = child.x1
state = CHAR_PARSING_STATE.INIT_D
elif state == CHAR_PARSING_STATE.INIT_D:
# Update distance only if no space is detected
if (char_distance > 0) and (char_current_distance < char_distance * 2.5):
char_distance = char_current_distance
if (char_distance < 0.1):
char_distance = 0.1
state = CHAR_PARSING_STATE.INSIDE_WORD
# If the x-position decreased, then it's a new line
if (state == CHAR_PARSING_STATE.INSIDE_WORD) and (child.x1 < char_previous_x1):
log('x-position decreased')
line += ' '
char_previous_x1 = child.x1
state = CHAR_PARSING_STATE.INIT_D
# Large enough distance: it's a space
elif (state == CHAR_PARSING_STATE.INSIDE_WORD) and (char_current_distance > char_distance * 8.5):
log('space detected')
log('char_current_distance: ' + str(char_current_distance))
log('char_distance: ' + str(char_distance))
line += ' '
char_previous_x1 = child.x1
# When larger distance is detected between chars, use it to
# improve our heuristic
elif (state == CHAR_PARSING_STATE.INSIDE_WORD) and (char_current_distance > char_distance) and (char_current_distance < char_distance * 2.5):
char_distance = char_current_distance
char_previous_x1 = child.x1
# Chars are sequential
else:
char_previous_x1 = child.x1
child_text = child.get_text()
if not empty_str(child_text):
line += child_text
return (largest_text, text)
def pdf_text(filename):
fp = open(filename, 'rb')
parser = PDFParser(fp)
doc = PDFDocument(parser, '')
parser.set_document(doc)
rsrcmgr = PDFResourceManager()
laparams = LAParams()
device = PDFPageAggregator(rsrcmgr, laparams=laparams)
interpreter = PDFPageInterpreter(rsrcmgr, device)
text = ''
largest_text = {
'contents': '',
'y0': 0,
'size': 0
}
for page in PDFPage.create_pages(doc):
interpreter.process_page(page)
layout = device.get_result()
for lt_obj in layout:
log('lt_obj: ' + str(lt_obj))
if isinstance(lt_obj, LTFigure):
(largest_text, figure_text) = extract_figure_text(lt_obj, largest_text)
text += figure_text
elif isinstance(lt_obj, (LTTextBox, LTTextLine)):
# Ignore body text blocks
stripped_to_chars = re.sub(r'[ \t\n]', '', lt_obj.get_text().strip())
if (len(stripped_to_chars) > MAX_CHARS * 2):
continue
largest_text = extract_largest_text(lt_obj, largest_text)
text += lt_obj.get_text() + '\n'
# Remove unprocessed CID text
largest_text['contents'] = re.sub(r'(\(cid:[0-9 \t-]*\))*', '', largest_text['contents'])
# Only parse the first page
return (largest_text, text)
def title_start(lines):
for i, line in enumerate(lines):
if not empty_str(line) and not junk_line(line):
return i
return 0
def title_end(lines, start, max_lines=2):
for i, line in enumerate(lines[start+1:start+max_lines+1], start+1):
if empty_str(line):
return i
return start + 1
def text_title(filename):
"""Extract title from PDF's text.
"""
(largest_text, lines_joined) = pdf_text(filename)
if empty_str(largest_text['contents']):
lines = lines_joined.strip().split('\n')
i = title_start(lines)
j = title_end(lines, i)
text = ' '.join(line.strip() for line in lines[i:j])
else:
text = largest_text['contents'].strip()
# Strip dots, which conflict with os.path's splittext()
text = re.sub(r'\.', '', text)
# Strip extra whitespace
text = re.sub(r'[\t\n]', '', text)
return text
def pdftotext_title(filename):
"""Extract title using `pdftotext`
"""
command = 'pdftotext {} -'.format(re.sub(' ', '\\ ', filename))
process = subprocess.Popen([command], \
shell=True, \
stdout=subprocess.PIPE, \
stderr=subprocess.PIPE)
out, err = process.communicate()
lines = out.strip().split('\n')
i = title_start(lines)
j = title_end(lines, i)
text = ' '.join(line.strip() for line in lines[i:j])
# Strip dots, which conflict with os.path's splittext()
text = re.sub(r'\.', '', text)
# Strip extra whitespace
text = re.sub(r'[\t\n]', '', text)
return text
def valid_title(title):
return not empty_str(title) and not junk_line(title) and empty_str(os.path.splitext(title)[1])
def pdf_title(filename):
"""Extract title using one of multiple strategies.
"""
try:
title = meta_title(filename)
if valid_title(title):
return title
except Exception as e:
print("*** Skipping invalid metadata for file %s! ***" % filename)
print(e)
try:
title = text_title(filename)
if valid_title(title):
return title
except Exception as e:
print("*** Skipping invalid parsing for file %s! ***" % filename)
print(e)
title = pdftotext_title(filename)
if valid_title(title):
return title
return os.path.basename(os.path.splitext(filename)[0])
if __name__ == "__main__":
opts, args = getopt.getopt(sys.argv[1:], 'nd:', ['dry-run', 'rename'])
dry_run = False
rename = False
target_dir = "."
for opt, arg in opts:
if opt in ['-n', '--dry-run']:
dry_run = True
elif opt in ['--rename']:
rename = True
elif opt in ['-d']:
target_dir = arg
if len(args) == 0:
print("Usage: %s [-d output] [--dry-run] [--rename] filenames" % sys.argv[0])
sys.exit(1)
for filename in args:
title = pdf_title(filename)
title = sanitize(' '.join(title.split()))
if rename:
new_name = os.path.join(target_dir, title + ".pdf")
print("%s => %s" % (filename, new_name))
if not dry_run:
if os.path.exists(new_name):
print("*** Target %s already exists! ***" % new_name)
else:
os.rename(filename, new_name)
else:
print(title)
| gpl-3.0 | 4,369,930,326,702,331,000 | 34.441919 | 231 | 0.591307 | false |
yaybu/touchdown | touchdown/provisioner/fuselage.py | 1 | 4827 | # Copyright 2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import re
from touchdown.core import argument, errors, resource, serializers
from . import provisioner
try:
import fuselage
from fuselage import argument as f_args, builder, bundle, resources
except ImportError:
raise errors.Error(
"You need the fuselage package to use the fuselage_bundle resource"
)
def underscore(title):
return re.sub(r"(?<=[a-z])(?=[A-Z])", u"_", title).lower()
arguments = {
f_args.Boolean: lambda resource_type, klass, arg: argument.Boolean(field=arg),
f_args.String: lambda resource_type, klass, arg: argument.String(field=arg),
f_args.FullPath: lambda resource_type, klass, arg: argument.String(field=arg),
f_args.File: lambda resource_type, klass, arg: argument.String(field=arg),
f_args.Integer: lambda resource_type, klass, arg: argument.Integer(field=arg),
f_args.Octal: lambda resource_type, klass, arg: argument.Integer(field=arg),
f_args.Dict: lambda resource_type, klass, arg: argument.Dict(field=arg),
f_args.List: lambda resource_type, klass, arg: argument.List(field=arg),
f_args.SubscriptionArgument: lambda resource_type, klass, arg: argument.List(
field=arg
),
f_args.PolicyArgument: lambda resource_type, klass, arg: argument.String(
field=arg, choices=resource_type.policies.keys()
),
}
class FuselageResource(resource.Resource):
@classmethod
def adapt(base_klass, resource_type):
args = {
"resource_name": underscore(resource_type.__resource_name__),
"fuselage_class": resource_type,
"root": argument.Resource(Bundle),
}
for arg, klass in resource_type.__args__.items():
args[arg] = arguments[klass.__class__](resource_type, klass, arg)
cls = type(resource_type.__resource_name__, (base_klass,), args)
def _(self, **kwargs):
arguments = {"parent": self}
arguments.update(kwargs)
resource = cls(**arguments)
if not self.resources:
self.resources = []
self.resources.append(resource)
self.add_dependency(resource)
return resource
setattr(Bundle, "add_%s" % cls.resource_name, _)
return cls
class BundleSerializer(serializers.Serializer):
def render(self, runner, value):
b = bundle.ResourceBundle()
for res in value:
b.add(res.fuselage_class(**serializers.Resource().render(runner, res)))
return builder.build(b)
def pending(self, runner, value):
for res in value:
if serializers.Resource().pending(runner, res):
return True
return False
class Bundle(provisioner.Provisioner):
resource_name = "fuselage_bundle"
always_apply = argument.Boolean()
resources = argument.List(
argument.Resource(FuselageResource),
field="script",
serializer=BundleSerializer(),
)
sudo = argument.Boolean(field="sudo", default=True)
class Describe(provisioner.Describe):
name = "describe"
resource = Bundle
def describe_object(self):
if self.resource.always_apply:
return {"Results": "Pending"}
if not self.resource.target:
# If target is not set we are probably dealing with an AMI... YUCK
# Bail out
return {"Result": "Pending"}
serializer = serializers.Resource()
if serializer.pending(self.runner, self.resource):
return {"Result": "Pending"}
kwargs = serializer.render(self.runner, self.resource)
try:
client = self.runner.get_plan(self.resource.target).get_client()
except errors.ServiceNotReady:
return {"Result": "Pending"}
try:
client.run_script(kwargs["script"], ["-s"])
except errors.CommandFailed as e:
if e.exit_code == 254:
return {"Result": "Success"}
return {"Result": "Pending"}
class Apply(provisioner.Apply):
resource = Bundle
for attr, value in vars(resources).items():
if type(value) == fuselage.resource.ResourceType:
locals()[attr] = FuselageResource.adapt(value)
| apache-2.0 | -1,469,942,013,499,236,900 | 30.966887 | 83 | 0.645743 | false |
yukisakurai/hhana | mva/plotting/utils.py | 1 | 4190 | import ROOT
from itertools import izip
from matplotlib import cm
from rootpy.plotting.style.atlas.labels import ATLAS_label
from rootpy.memory.keepalive import keepalive
from .. import ATLAS_LABEL
def set_colors(hists, colors='jet'):
if isinstance(colors, basestring):
colors = cm.get_cmap(colors, len(hists))
if hasattr(colors, '__call__'):
for i, h in enumerate(hists):
color = colors((i + 1) / float(len(hists) + 1))
h.SetColor(color)
else:
for h, color in izip(hists, colors):
h.SetColor(color)
def category_lumi_atlas(pad, category_label=None,
data_info=None, atlas_label=None,
textsize=20):
left, right, bottom, top = pad.margin_pixels
height = float(pad.height_pixels)
# draw the category label
if category_label:
label = ROOT.TLatex(
1. - pad.GetRightMargin(),
1. - (textsize - 2) / height,
category_label)
label.SetNDC()
label.SetTextFont(43)
label.SetTextSize(textsize)
label.SetTextAlign(31)
with pad:
label.Draw()
keepalive(pad, label)
# draw the luminosity label
if data_info is not None:
plabel = ROOT.TLatex(
1. - pad.GetLeftMargin() - 0.25,
1. - (top + textsize + 60) / height,
str(data_info))
plabel.SetNDC()
plabel.SetTextFont(43)
plabel.SetTextSize(textsize)
plabel.SetTextAlign(31)
with pad:
plabel.Draw()
keepalive(pad, plabel)
# draw the ATLAS label
if atlas_label is not False:
label = atlas_label or ATLAS_LABEL
ATLAS_label(pad.GetLeftMargin() + 0.03,
1. - (top + textsize + 15) / height,
sep=0.132, pad=pad, sqrts=None,
text=label,
textsize=textsize)
pad.Update()
pad.Modified()
def label_plot(pad, template, xaxis, yaxis,
ylabel='Events', xlabel=None,
units=None, data_info=None,
category_label=None,
atlas_label=None,
extra_label=None,
extra_label_position='left',
textsize=22):
# set the axis labels
binw = list(template.xwidth())
binwidths = list(set(['%.2g' % w for w in binw]))
if units is not None:
if xlabel is not None:
xlabel = '%s [%s]' % (xlabel, units)
if ylabel and len(binwidths) == 1 and binwidths[0] != '1':
# constant width bins
ylabel = '%s / %s %s' % (ylabel, binwidths[0], units)
elif ylabel and len(binwidths) == 1 and binwidths[0] != '1':
ylabel = '%s / %s' % (ylabel, binwidths[0])
if ylabel:
yaxis.SetTitle(ylabel)
if xlabel:
xaxis.SetTitle(xlabel)
left, right, bottom, top = pad.margin_pixels
height = float(pad.height_pixels)
category_lumi_atlas(pad, category_label, data_info, atlas_label)
# draw the extra label
if extra_label is not None:
if extra_label_position == 'left':
label = ROOT.TLatex(pad.GetLeftMargin() + 0.03,
1. - (top + 2 * (textsize + 40)) / height,
extra_label)
else: # right
label = ROOT.TLatex(1. - pad.GetRightMargin() - 0.03,
1. - (top + 2 * (textsize + 40)) / height,
extra_label)
label.SetTextAlign(31)
label.SetNDC()
label.SetTextFont(43)
label.SetTextSize(textsize)
with pad:
label.Draw()
keepalive(pad, label)
pad.Update()
pad.Modified()
# class rootpy.plotting.Legend(
# entries, pad=None,
# leftmargin=0.5, topmargin=0.05, rightmargin=0.05,
# entryheight=0.06, entrysep=0.02, margin=0.3,
# textfont=None, textsize=None, header=None)
def legend_params(position, textsize):
return dict(
leftmargin=0.48, topmargin=0.03, rightmargin=0.05,
entryheight=0.05,
entrysep=0.01,
margin=0.25,
textsize=textsize)
| gpl-3.0 | 7,927,911,205,312,527,000 | 30.742424 | 74 | 0.548449 | false |
O-T-L/PyOptimization | parameters/optimizer/epsilon_moea.py | 1 | 4120 | """
Copyright (C) 2014, 申瑞珉 (Ruimin Shen)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
def epsilon(config, problem):
if type(problem).__name__ == 'DTLZ1':
table = {
3: 0.033,
4: 0.052,
5: 0.059,
6: 0.0554,
8: 0.0549,
10: 0.0565,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ2':
table = {
2: 0.006,
3: 0.06,
4: 0.1312,
5: 0.1927,
6: 0.234,
8: 0.29,
10: 0.308,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ3':
table = {
3: 0.06,
4: 0.1385,
5: 0.2,
6: 0.227,
8: 0.1567,
10: 0.85,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ4':
table = {
3: 0.06,
4: 0.1312,
5: 0.1927,
6: 0.234,
8: 0.29,
10: 0.308,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ5':
table = {
3: 0.0052,
4: 0.042,
5: 0.0785,
6: 0.11,
8: 0.1272,
10: 0.1288,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ6':
table = {
3: 0.0227,
4: 0.12,
5: 0.3552,
6: 0.75,
8: 1.15,
10: 1.45,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ7':
table = {
2: 0.005,
3: 0.048,
4: 0.105,
5: 0.158,
6: 0.15,
8: 0.225,
10: 0.46,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'ConvexDTLZ2':
table = {
2: 0.0075,
3: 0.035,
4: 0.039,
5: 0.034,
6: 0.0273,
8: 0.0184,
10: 0.0153,
}
_epsilon = table[problem.GetNumberOfObjectives()]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
elif type(problem).__name__ == 'DTLZ5I':
if problem.GetNumberOfObjectives() == 10:
table = {
3: 0.06,
4: 0.12,
5: 0.16,
6: 0.2,
7: 0.24,
8: 0.25,
9: 0.26,
}
_epsilon = table[problem.GetManifold() + 1]
epsilon = [_epsilon] * problem.GetNumberOfObjectives()
return [epsilon]
raise Exception(type(problem).__name__, problem.GetNumberOfObjectives())
| lgpl-3.0 | -8,347,849,438,186,841,000 | 29.932331 | 76 | 0.495139 | false |
tulsawebdevs/django-multi-gtfs | multigtfs/models/trip.py | 1 | 4025 | #
# Copyright 2012-2014 John Whitlock
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from django.contrib.gis.geos import LineString
from django.utils.encoding import python_2_unicode_compatible
from jsonfield import JSONField
from multigtfs.models.base import models, Base
@python_2_unicode_compatible
class Trip(Base):
"""A trip along a route
This implements trips.txt in the GTFS feed
"""
route = models.ForeignKey('Route', on_delete=models.CASCADE)
service = models.ForeignKey(
'Service', null=True, blank=True, on_delete=models.SET_NULL)
trip_id = models.CharField(
max_length=255, db_index=True,
help_text="Unique identifier for a trip.")
headsign = models.CharField(
max_length=255, blank=True,
help_text="Destination identification for passengers.")
short_name = models.CharField(
max_length=63, blank=True,
help_text="Short name used in schedules and signboards.")
direction = models.CharField(
max_length=1, blank=True,
choices=(('0', '0'), ('1', '1')),
help_text="Direction for bi-directional routes.")
block = models.ForeignKey(
'Block', null=True, blank=True, on_delete=models.SET_NULL,
help_text="Block of sequential trips that this trip belongs to.")
shape = models.ForeignKey(
'Shape', null=True, blank=True, on_delete=models.SET_NULL,
help_text="Shape used for this trip")
geometry = models.LineStringField(
null=True, blank=True,
help_text='Geometry cache of Shape or Stops')
wheelchair_accessible = models.CharField(
max_length=1, blank=True,
choices=(
('0', 'No information'),
('1', 'Some wheelchair accommodation'),
('2', 'No wheelchair accommodation')),
help_text='Are there accommodations for riders with wheelchair?')
bikes_allowed = models.CharField(
max_length=1, blank=True,
choices=(
('0', 'No information'),
('1', 'Some bicycle accommodation'),
('2', 'No bicycles allowed')),
help_text='Are bicycles allowed?')
extra_data = JSONField(default={}, blank=True, null=True)
def update_geometry(self, update_parent=True):
"""Update the geometry from the Shape or Stops"""
original = self.geometry
if self.shape:
self.geometry = self.shape.geometry
else:
stoptimes = self.stoptime_set.order_by('stop_sequence')
if stoptimes.count() > 1:
self.geometry = LineString(
[st.stop.point.coords for st in stoptimes])
if self.geometry != original:
self.save()
if update_parent:
self.route.update_geometry()
def __str__(self):
return "%s-%s" % (self.route, self.trip_id)
class Meta:
db_table = 'trip'
app_label = 'multigtfs'
_column_map = (
('route_id', 'route__route_id'),
('service_id', 'service__service_id'),
('trip_id', 'trip_id'),
('trip_headsign', 'headsign'),
('trip_short_name', 'short_name'),
('direction_id', 'direction'),
('block_id', 'block__block_id'),
('shape_id', 'shape__shape_id'),
('wheelchair_accessible', 'wheelchair_accessible'),
('bikes_allowed', 'bikes_allowed'),
)
_filename = 'trips.txt'
_rel_to_feed = 'route__feed'
_unique_fields = ('trip_id',)
| apache-2.0 | -85,679,634,461,076,020 | 36.268519 | 74 | 0.623354 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/system/ntp/ntp_keys/ntp_key/state/__init__.py | 1 | 14103 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /system/ntp/ntp-keys/ntp-key/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for NTP auth keys
"""
__slots__ = ("_path_helper", "_extmethods", "__key_id", "__key_type", "__key_value")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__key_id = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="key-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="uint16",
is_config=False,
)
self.__key_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"NTP_AUTH_MD5": {
"@module": "openconfig-system",
"@namespace": "http://openconfig.net/yang/system",
},
"oc-sys:NTP_AUTH_MD5": {
"@module": "openconfig-system",
"@namespace": "http://openconfig.net/yang/system",
},
},
),
is_leaf=True,
yang_name="key-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="identityref",
is_config=False,
)
self.__key_value = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="key-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="string",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return ["system", "ntp", "ntp-keys", "ntp-key", "state"]
def _get_key_id(self):
"""
Getter method for key_id, mapped from YANG variable /system/ntp/ntp_keys/ntp_key/state/key_id (uint16)
YANG Description: Integer identifier used by the client and server to
designate a secret key. The client and server must use
the same key id.
"""
return self.__key_id
def _set_key_id(self, v, load=False):
"""
Setter method for key_id, mapped from YANG variable /system/ntp/ntp_keys/ntp_key/state/key_id (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_id() directly.
YANG Description: Integer identifier used by the client and server to
designate a secret key. The client and server must use
the same key id.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="key-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="uint16",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """key_id must be of a type compatible with uint16""",
"defined-type": "uint16",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="key-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='uint16', is_config=False)""",
}
)
self.__key_id = t
if hasattr(self, "_set"):
self._set()
def _unset_key_id(self):
self.__key_id = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="key-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="uint16",
is_config=False,
)
def _get_key_type(self):
"""
Getter method for key_type, mapped from YANG variable /system/ntp/ntp_keys/ntp_key/state/key_type (identityref)
YANG Description: Encryption type used for the NTP authentication key
"""
return self.__key_type
def _set_key_type(self, v, load=False):
"""
Setter method for key_type, mapped from YANG variable /system/ntp/ntp_keys/ntp_key/state/key_type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_type() directly.
YANG Description: Encryption type used for the NTP authentication key
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"NTP_AUTH_MD5": {
"@module": "openconfig-system",
"@namespace": "http://openconfig.net/yang/system",
},
"oc-sys:NTP_AUTH_MD5": {
"@module": "openconfig-system",
"@namespace": "http://openconfig.net/yang/system",
},
},
),
is_leaf=True,
yang_name="key-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """key_type must be of a type compatible with identityref""",
"defined-type": "openconfig-system:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NTP_AUTH_MD5': {'@module': 'openconfig-system', '@namespace': 'http://openconfig.net/yang/system'}, 'oc-sys:NTP_AUTH_MD5': {'@module': 'openconfig-system', '@namespace': 'http://openconfig.net/yang/system'}},), is_leaf=True, yang_name="key-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='identityref', is_config=False)""",
}
)
self.__key_type = t
if hasattr(self, "_set"):
self._set()
def _unset_key_type(self):
self.__key_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"NTP_AUTH_MD5": {
"@module": "openconfig-system",
"@namespace": "http://openconfig.net/yang/system",
},
"oc-sys:NTP_AUTH_MD5": {
"@module": "openconfig-system",
"@namespace": "http://openconfig.net/yang/system",
},
},
),
is_leaf=True,
yang_name="key-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="identityref",
is_config=False,
)
def _get_key_value(self):
"""
Getter method for key_value, mapped from YANG variable /system/ntp/ntp_keys/ntp_key/state/key_value (string)
YANG Description: NTP authentication key value
"""
return self.__key_value
def _set_key_value(self, v, load=False):
"""
Setter method for key_value, mapped from YANG variable /system/ntp/ntp_keys/ntp_key/state/key_value (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_value is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_value() directly.
YANG Description: NTP authentication key value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="key-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="string",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """key_value must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='string', is_config=False)""",
}
)
self.__key_value = t
if hasattr(self, "_set"):
self._set()
def _unset_key_value(self):
self.__key_value = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="key-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/system",
defining_module="openconfig-system",
yang_type="string",
is_config=False,
)
key_id = __builtin__.property(_get_key_id)
key_type = __builtin__.property(_get_key_type)
key_value = __builtin__.property(_get_key_value)
_pyangbind_elements = OrderedDict(
[("key_id", key_id), ("key_type", key_type), ("key_value", key_value)]
)
| apache-2.0 | 1,886,888,537,890,216,700 | 38.615169 | 621 | 0.546054 | false |
taotaocoule/stock | spider/data/bond.py | 1 | 1159 | # 国债指数:id=0000121;http://pdfm2.eastmoney.com/EM_UBG_PDTI_Fast/api/js?id=0000121&TYPE=k&js=(x)&rtntype=5&isCR=false&fsData1518154947301=fsData1518154947301
# 沪市企业: id=0000131;http://pdfm2.eastmoney.com/EM_UBG_PDTI_Fast/api/js?id=0000131&TYPE=k&js=(x)&rtntype=5&isCR=false&fsData1518156740923=fsData1518156740923
# 深圳企业:id=3994812;http://pdfm2.eastmoney.com/EM_UBG_PDTI_Fast/api/js?id=3994812&TYPE=k&js=(x)&rtntype=5&isCR=false&fsData1518156947700=fsData1518156947700
import urllib.request
import pandas as pd
import json
class Bond(object):
"""docstring for Bond"""
def __init__(self):
self.index = {
'国债指数':'0000121',
'沪市企业债':'0000131',
'深圳企业债':'3994812'
}
def bond_index(self,id):
url = r'http://pdfm2.eastmoney.com/EM_UBG_PDTI_Fast/api/js?id={}&TYPE=k&js=(x)&rtntype=5&isCR=false&fsData1518154947301=fsData1518154947301'.format(id)
raw = json.loads(urllib.request.urlopen(url).read())
head = ['日期','开盘','收盘','最高','最低','成交量','成交金额','振幅']
return pd.DataFrame(list(map(lambda x:x.split(','),raw['data'])),columns=head) | mit | -7,352,327,051,511,593,000 | 46.5 | 155 | 0.707981 | false |
googleapis/googleapis-gen | grafeas/v1/grafeas-v1-py/tests/unit/gapic/grafeas_v1/test_grafeas.py | 1 | 180629 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
from google.protobuf import any_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
from grafeas.grafeas_v1.services.grafeas import GrafeasAsyncClient
from grafeas.grafeas_v1.services.grafeas import GrafeasClient
from grafeas.grafeas_v1.services.grafeas import pagers
from grafeas.grafeas_v1.services.grafeas import transports
from grafeas.grafeas_v1.services.grafeas.transports.base import _GOOGLE_AUTH_VERSION
from grafeas.grafeas_v1.types import attestation
from grafeas.grafeas_v1.types import build
from grafeas.grafeas_v1.types import common
from grafeas.grafeas_v1.types import cvss
from grafeas.grafeas_v1.types import deployment
from grafeas.grafeas_v1.types import discovery
from grafeas.grafeas_v1.types import grafeas
from grafeas.grafeas_v1.types import image
from grafeas.grafeas_v1.types import package
from grafeas.grafeas_v1.types import provenance
from grafeas.grafeas_v1.types import upgrade
from grafeas.grafeas_v1.types import vulnerability
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert GrafeasClient._get_default_mtls_endpoint(None) is None
assert GrafeasClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert GrafeasClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert GrafeasClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert GrafeasClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert GrafeasClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [
GrafeasClient,
GrafeasAsyncClient,
])
def test_grafeas_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'containeranalysis.googleapis.com:443'
@pytest.mark.parametrize("client_class", [
GrafeasClient,
GrafeasAsyncClient,
])
def test_grafeas_client_service_account_always_use_jwt(client_class):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
client = client_class(credentials=creds)
use_jwt.assert_not_called()
@pytest.mark.parametrize("transport_class,transport_name", [
(transports.GrafeasGrpcTransport, "grpc"),
(transports.GrafeasGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_grafeas_client_service_account_always_use_jwt_true(transport_class, transport_name):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
@pytest.mark.parametrize("client_class", [
GrafeasClient,
GrafeasAsyncClient,
])
def test_grafeas_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'containeranalysis.googleapis.com:443'
def test_grafeas_client_get_transport_class():
transport = GrafeasClient.get_transport_class()
available_transports = [
transports.GrafeasGrpcTransport,
]
assert transport in available_transports
transport = GrafeasClient.get_transport_class("grpc")
assert transport == transports.GrafeasGrpcTransport
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(GrafeasClient, transports.GrafeasGrpcTransport, "grpc"),
(GrafeasAsyncClient, transports.GrafeasGrpcAsyncIOTransport, "grpc_asyncio"),
])
@mock.patch.object(GrafeasClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GrafeasClient))
@mock.patch.object(GrafeasAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GrafeasAsyncClient))
def test_grafeas_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(GrafeasClient, 'get_transport_class') as gtc:
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials()
)
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(GrafeasClient, 'get_transport_class') as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
(GrafeasClient, transports.GrafeasGrpcTransport, "grpc", "true"),
(GrafeasAsyncClient, transports.GrafeasGrpcAsyncIOTransport, "grpc_asyncio", "true"),
(GrafeasClient, transports.GrafeasGrpcTransport, "grpc", "false"),
(GrafeasAsyncClient, transports.GrafeasGrpcAsyncIOTransport, "grpc_asyncio", "false"),
])
@mock.patch.object(GrafeasClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GrafeasClient))
@mock.patch.object(GrafeasAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GrafeasAsyncClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_grafeas_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(GrafeasClient, transports.GrafeasGrpcTransport, "grpc"),
(GrafeasAsyncClient, transports.GrafeasGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_grafeas_client_client_options_scopes(client_class, transport_class, transport_name):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(GrafeasClient, transports.GrafeasGrpcTransport, "grpc"),
(GrafeasAsyncClient, transports.GrafeasGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_grafeas_client_client_options_credentials_file(client_class, transport_class, transport_name):
# Check the case credentials file is provided.
options = client_options.ClientOptions(
credentials_file="credentials.json"
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_grafeas_client_client_options_from_dict():
with mock.patch('grafeas.grafeas_v1.services.grafeas.transports.GrafeasGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = GrafeasClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_get_occurrence(transport: str = 'grpc', request_type=grafeas.GetOccurrenceRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence(
name='name_value',
resource_uri='resource_uri_value',
note_name='note_name_value',
kind=common.NoteKind.VULNERABILITY,
remediation='remediation_value',
vulnerability=vulnerability.VulnerabilityOccurrence(type_='type__value'),
)
response = client.get_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetOccurrenceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Occurrence)
assert response.name == 'name_value'
assert response.resource_uri == 'resource_uri_value'
assert response.note_name == 'note_name_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.remediation == 'remediation_value'
def test_get_occurrence_from_dict():
test_get_occurrence(request_type=dict)
def test_get_occurrence_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
client.get_occurrence()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetOccurrenceRequest()
@pytest.mark.asyncio
async def test_get_occurrence_async(transport: str = 'grpc_asyncio', request_type=grafeas.GetOccurrenceRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence(
name='name_value',
resource_uri='resource_uri_value',
note_name='note_name_value',
kind=common.NoteKind.VULNERABILITY,
remediation='remediation_value',
))
response = await client.get_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetOccurrenceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Occurrence)
assert response.name == 'name_value'
assert response.resource_uri == 'resource_uri_value'
assert response.note_name == 'note_name_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.remediation == 'remediation_value'
@pytest.mark.asyncio
async def test_get_occurrence_async_from_dict():
await test_get_occurrence_async(request_type=dict)
def test_get_occurrence_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.GetOccurrenceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
call.return_value = grafeas.Occurrence()
client.get_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_occurrence_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.GetOccurrenceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence())
await client.get_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_get_occurrence_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_occurrence(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_get_occurrence_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_occurrence(
grafeas.GetOccurrenceRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_get_occurrence_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_occurrence(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_get_occurrence_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_occurrence(
grafeas.GetOccurrenceRequest(),
name='name_value',
)
def test_list_occurrences(transport: str = 'grpc', request_type=grafeas.ListOccurrencesRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListOccurrencesResponse(
next_page_token='next_page_token_value',
)
response = client.list_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListOccurrencesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListOccurrencesPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_occurrences_from_dict():
test_list_occurrences(request_type=dict)
def test_list_occurrences_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
client.list_occurrences()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListOccurrencesRequest()
@pytest.mark.asyncio
async def test_list_occurrences_async(transport: str = 'grpc_asyncio', request_type=grafeas.ListOccurrencesRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListOccurrencesResponse(
next_page_token='next_page_token_value',
))
response = await client.list_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListOccurrencesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListOccurrencesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_occurrences_async_from_dict():
await test_list_occurrences_async(request_type=dict)
def test_list_occurrences_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.ListOccurrencesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
call.return_value = grafeas.ListOccurrencesResponse()
client.list_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_occurrences_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.ListOccurrencesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListOccurrencesResponse())
await client.list_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_occurrences_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListOccurrencesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_occurrences(
parent='parent_value',
filter='filter_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].filter == 'filter_value'
def test_list_occurrences_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_occurrences(
grafeas.ListOccurrencesRequest(),
parent='parent_value',
filter='filter_value',
)
@pytest.mark.asyncio
async def test_list_occurrences_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListOccurrencesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListOccurrencesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_occurrences(
parent='parent_value',
filter='filter_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].filter == 'filter_value'
@pytest.mark.asyncio
async def test_list_occurrences_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_occurrences(
grafeas.ListOccurrencesRequest(),
parent='parent_value',
filter='filter_value',
)
def test_list_occurrences_pager():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_occurrences(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, grafeas.Occurrence)
for i in results)
def test_list_occurrences_pages():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
pages = list(client.list_occurrences(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_occurrences_async_pager():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
async_pager = await client.list_occurrences(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, grafeas.Occurrence)
for i in responses)
@pytest.mark.asyncio
async def test_list_occurrences_async_pages():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_occurrences),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_occurrences(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_delete_occurrence(transport: str = 'grpc', request_type=grafeas.DeleteOccurrenceRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.DeleteOccurrenceRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_occurrence_from_dict():
test_delete_occurrence(request_type=dict)
def test_delete_occurrence_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
client.delete_occurrence()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.DeleteOccurrenceRequest()
@pytest.mark.asyncio
async def test_delete_occurrence_async(transport: str = 'grpc_asyncio', request_type=grafeas.DeleteOccurrenceRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.DeleteOccurrenceRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_occurrence_async_from_dict():
await test_delete_occurrence_async(request_type=dict)
def test_delete_occurrence_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.DeleteOccurrenceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
call.return_value = None
client.delete_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_delete_occurrence_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.DeleteOccurrenceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_delete_occurrence_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_occurrence(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_delete_occurrence_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_occurrence(
grafeas.DeleteOccurrenceRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_delete_occurrence_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_occurrence(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_delete_occurrence_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_occurrence(
grafeas.DeleteOccurrenceRequest(),
name='name_value',
)
def test_create_occurrence(transport: str = 'grpc', request_type=grafeas.CreateOccurrenceRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence(
name='name_value',
resource_uri='resource_uri_value',
note_name='note_name_value',
kind=common.NoteKind.VULNERABILITY,
remediation='remediation_value',
vulnerability=vulnerability.VulnerabilityOccurrence(type_='type__value'),
)
response = client.create_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.CreateOccurrenceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Occurrence)
assert response.name == 'name_value'
assert response.resource_uri == 'resource_uri_value'
assert response.note_name == 'note_name_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.remediation == 'remediation_value'
def test_create_occurrence_from_dict():
test_create_occurrence(request_type=dict)
def test_create_occurrence_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
client.create_occurrence()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.CreateOccurrenceRequest()
@pytest.mark.asyncio
async def test_create_occurrence_async(transport: str = 'grpc_asyncio', request_type=grafeas.CreateOccurrenceRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence(
name='name_value',
resource_uri='resource_uri_value',
note_name='note_name_value',
kind=common.NoteKind.VULNERABILITY,
remediation='remediation_value',
))
response = await client.create_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.CreateOccurrenceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Occurrence)
assert response.name == 'name_value'
assert response.resource_uri == 'resource_uri_value'
assert response.note_name == 'note_name_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.remediation == 'remediation_value'
@pytest.mark.asyncio
async def test_create_occurrence_async_from_dict():
await test_create_occurrence_async(request_type=dict)
def test_create_occurrence_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.CreateOccurrenceRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
call.return_value = grafeas.Occurrence()
client.create_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_create_occurrence_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.CreateOccurrenceRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence())
await client.create_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_create_occurrence_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_occurrence(
parent='parent_value',
occurrence=grafeas.Occurrence(name='name_value'),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].occurrence == grafeas.Occurrence(name='name_value')
def test_create_occurrence_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_occurrence(
grafeas.CreateOccurrenceRequest(),
parent='parent_value',
occurrence=grafeas.Occurrence(name='name_value'),
)
@pytest.mark.asyncio
async def test_create_occurrence_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_occurrence(
parent='parent_value',
occurrence=grafeas.Occurrence(name='name_value'),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].occurrence == grafeas.Occurrence(name='name_value')
@pytest.mark.asyncio
async def test_create_occurrence_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_occurrence(
grafeas.CreateOccurrenceRequest(),
parent='parent_value',
occurrence=grafeas.Occurrence(name='name_value'),
)
def test_batch_create_occurrences(transport: str = 'grpc', request_type=grafeas.BatchCreateOccurrencesRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.BatchCreateOccurrencesResponse(
)
response = client.batch_create_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.BatchCreateOccurrencesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.BatchCreateOccurrencesResponse)
def test_batch_create_occurrences_from_dict():
test_batch_create_occurrences(request_type=dict)
def test_batch_create_occurrences_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
client.batch_create_occurrences()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.BatchCreateOccurrencesRequest()
@pytest.mark.asyncio
async def test_batch_create_occurrences_async(transport: str = 'grpc_asyncio', request_type=grafeas.BatchCreateOccurrencesRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.BatchCreateOccurrencesResponse(
))
response = await client.batch_create_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.BatchCreateOccurrencesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.BatchCreateOccurrencesResponse)
@pytest.mark.asyncio
async def test_batch_create_occurrences_async_from_dict():
await test_batch_create_occurrences_async(request_type=dict)
def test_batch_create_occurrences_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.BatchCreateOccurrencesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
call.return_value = grafeas.BatchCreateOccurrencesResponse()
client.batch_create_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_batch_create_occurrences_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.BatchCreateOccurrencesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.BatchCreateOccurrencesResponse())
await client.batch_create_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_batch_create_occurrences_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.BatchCreateOccurrencesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_create_occurrences(
parent='parent_value',
occurrences=[grafeas.Occurrence(name='name_value')],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].occurrences == [grafeas.Occurrence(name='name_value')]
def test_batch_create_occurrences_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_create_occurrences(
grafeas.BatchCreateOccurrencesRequest(),
parent='parent_value',
occurrences=[grafeas.Occurrence(name='name_value')],
)
@pytest.mark.asyncio
async def test_batch_create_occurrences_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.BatchCreateOccurrencesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.BatchCreateOccurrencesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_create_occurrences(
parent='parent_value',
occurrences=[grafeas.Occurrence(name='name_value')],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].occurrences == [grafeas.Occurrence(name='name_value')]
@pytest.mark.asyncio
async def test_batch_create_occurrences_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_create_occurrences(
grafeas.BatchCreateOccurrencesRequest(),
parent='parent_value',
occurrences=[grafeas.Occurrence(name='name_value')],
)
def test_update_occurrence(transport: str = 'grpc', request_type=grafeas.UpdateOccurrenceRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence(
name='name_value',
resource_uri='resource_uri_value',
note_name='note_name_value',
kind=common.NoteKind.VULNERABILITY,
remediation='remediation_value',
vulnerability=vulnerability.VulnerabilityOccurrence(type_='type__value'),
)
response = client.update_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.UpdateOccurrenceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Occurrence)
assert response.name == 'name_value'
assert response.resource_uri == 'resource_uri_value'
assert response.note_name == 'note_name_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.remediation == 'remediation_value'
def test_update_occurrence_from_dict():
test_update_occurrence(request_type=dict)
def test_update_occurrence_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
client.update_occurrence()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.UpdateOccurrenceRequest()
@pytest.mark.asyncio
async def test_update_occurrence_async(transport: str = 'grpc_asyncio', request_type=grafeas.UpdateOccurrenceRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence(
name='name_value',
resource_uri='resource_uri_value',
note_name='note_name_value',
kind=common.NoteKind.VULNERABILITY,
remediation='remediation_value',
))
response = await client.update_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.UpdateOccurrenceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Occurrence)
assert response.name == 'name_value'
assert response.resource_uri == 'resource_uri_value'
assert response.note_name == 'note_name_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.remediation == 'remediation_value'
@pytest.mark.asyncio
async def test_update_occurrence_async_from_dict():
await test_update_occurrence_async(request_type=dict)
def test_update_occurrence_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.UpdateOccurrenceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
call.return_value = grafeas.Occurrence()
client.update_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_update_occurrence_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.UpdateOccurrenceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence())
await client.update_occurrence(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_update_occurrence_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_occurrence(
name='name_value',
occurrence=grafeas.Occurrence(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
assert args[0].occurrence == grafeas.Occurrence(name='name_value')
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value'])
def test_update_occurrence_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_occurrence(
grafeas.UpdateOccurrenceRequest(),
name='name_value',
occurrence=grafeas.Occurrence(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
@pytest.mark.asyncio
async def test_update_occurrence_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_occurrence),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Occurrence()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_occurrence(
name='name_value',
occurrence=grafeas.Occurrence(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
assert args[0].occurrence == grafeas.Occurrence(name='name_value')
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value'])
@pytest.mark.asyncio
async def test_update_occurrence_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_occurrence(
grafeas.UpdateOccurrenceRequest(),
name='name_value',
occurrence=grafeas.Occurrence(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
def test_get_occurrence_note(transport: str = 'grpc', request_type=grafeas.GetOccurrenceNoteRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082),
)
response = client.get_occurrence_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetOccurrenceNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
def test_get_occurrence_note_from_dict():
test_get_occurrence_note(request_type=dict)
def test_get_occurrence_note_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
client.get_occurrence_note()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetOccurrenceNoteRequest()
@pytest.mark.asyncio
async def test_get_occurrence_note_async(transport: str = 'grpc_asyncio', request_type=grafeas.GetOccurrenceNoteRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
))
response = await client.get_occurrence_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetOccurrenceNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
@pytest.mark.asyncio
async def test_get_occurrence_note_async_from_dict():
await test_get_occurrence_note_async(request_type=dict)
def test_get_occurrence_note_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.GetOccurrenceNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
call.return_value = grafeas.Note()
client.get_occurrence_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_occurrence_note_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.GetOccurrenceNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
await client.get_occurrence_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_get_occurrence_note_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_occurrence_note(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_get_occurrence_note_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_occurrence_note(
grafeas.GetOccurrenceNoteRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_get_occurrence_note_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_occurrence_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_occurrence_note(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_get_occurrence_note_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_occurrence_note(
grafeas.GetOccurrenceNoteRequest(),
name='name_value',
)
def test_get_note(transport: str = 'grpc', request_type=grafeas.GetNoteRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082),
)
response = client.get_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
def test_get_note_from_dict():
test_get_note(request_type=dict)
def test_get_note_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
client.get_note()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetNoteRequest()
@pytest.mark.asyncio
async def test_get_note_async(transport: str = 'grpc_asyncio', request_type=grafeas.GetNoteRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
))
response = await client.get_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.GetNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
@pytest.mark.asyncio
async def test_get_note_async_from_dict():
await test_get_note_async(request_type=dict)
def test_get_note_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.GetNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
call.return_value = grafeas.Note()
client.get_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_note_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.GetNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
await client.get_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_get_note_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_note(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_get_note_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_note(
grafeas.GetNoteRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_get_note_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_note(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_get_note_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_note(
grafeas.GetNoteRequest(),
name='name_value',
)
def test_list_notes(transport: str = 'grpc', request_type=grafeas.ListNotesRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListNotesResponse(
next_page_token='next_page_token_value',
)
response = client.list_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListNotesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListNotesPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_notes_from_dict():
test_list_notes(request_type=dict)
def test_list_notes_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
client.list_notes()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListNotesRequest()
@pytest.mark.asyncio
async def test_list_notes_async(transport: str = 'grpc_asyncio', request_type=grafeas.ListNotesRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListNotesResponse(
next_page_token='next_page_token_value',
))
response = await client.list_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListNotesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListNotesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_notes_async_from_dict():
await test_list_notes_async(request_type=dict)
def test_list_notes_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.ListNotesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
call.return_value = grafeas.ListNotesResponse()
client.list_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_notes_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.ListNotesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListNotesResponse())
await client.list_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_notes_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListNotesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_notes(
parent='parent_value',
filter='filter_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].filter == 'filter_value'
def test_list_notes_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_notes(
grafeas.ListNotesRequest(),
parent='parent_value',
filter='filter_value',
)
@pytest.mark.asyncio
async def test_list_notes_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListNotesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListNotesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_notes(
parent='parent_value',
filter='filter_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].filter == 'filter_value'
@pytest.mark.asyncio
async def test_list_notes_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_notes(
grafeas.ListNotesRequest(),
parent='parent_value',
filter='filter_value',
)
def test_list_notes_pager():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
grafeas.Note(),
],
next_page_token='abc',
),
grafeas.ListNotesResponse(
notes=[],
next_page_token='def',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
],
next_page_token='ghi',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_notes(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, grafeas.Note)
for i in results)
def test_list_notes_pages():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
grafeas.Note(),
],
next_page_token='abc',
),
grafeas.ListNotesResponse(
notes=[],
next_page_token='def',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
],
next_page_token='ghi',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
],
),
RuntimeError,
)
pages = list(client.list_notes(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_notes_async_pager():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
grafeas.Note(),
],
next_page_token='abc',
),
grafeas.ListNotesResponse(
notes=[],
next_page_token='def',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
],
next_page_token='ghi',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
],
),
RuntimeError,
)
async_pager = await client.list_notes(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, grafeas.Note)
for i in responses)
@pytest.mark.asyncio
async def test_list_notes_async_pages():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_notes),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
grafeas.Note(),
],
next_page_token='abc',
),
grafeas.ListNotesResponse(
notes=[],
next_page_token='def',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
],
next_page_token='ghi',
),
grafeas.ListNotesResponse(
notes=[
grafeas.Note(),
grafeas.Note(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_notes(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_delete_note(transport: str = 'grpc', request_type=grafeas.DeleteNoteRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.DeleteNoteRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_note_from_dict():
test_delete_note(request_type=dict)
def test_delete_note_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
client.delete_note()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.DeleteNoteRequest()
@pytest.mark.asyncio
async def test_delete_note_async(transport: str = 'grpc_asyncio', request_type=grafeas.DeleteNoteRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.DeleteNoteRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_note_async_from_dict():
await test_delete_note_async(request_type=dict)
def test_delete_note_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.DeleteNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
call.return_value = None
client.delete_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_delete_note_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.DeleteNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_delete_note_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_note(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_delete_note_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_note(
grafeas.DeleteNoteRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_delete_note_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_note(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_delete_note_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_note(
grafeas.DeleteNoteRequest(),
name='name_value',
)
def test_create_note(transport: str = 'grpc', request_type=grafeas.CreateNoteRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082),
)
response = client.create_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.CreateNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
def test_create_note_from_dict():
test_create_note(request_type=dict)
def test_create_note_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
client.create_note()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.CreateNoteRequest()
@pytest.mark.asyncio
async def test_create_note_async(transport: str = 'grpc_asyncio', request_type=grafeas.CreateNoteRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
))
response = await client.create_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.CreateNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
@pytest.mark.asyncio
async def test_create_note_async_from_dict():
await test_create_note_async(request_type=dict)
def test_create_note_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.CreateNoteRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
call.return_value = grafeas.Note()
client.create_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_create_note_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.CreateNoteRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
await client.create_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_create_note_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_note(
parent='parent_value',
note_id='note_id_value',
note=grafeas.Note(name='name_value'),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].note_id == 'note_id_value'
assert args[0].note == grafeas.Note(name='name_value')
def test_create_note_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_note(
grafeas.CreateNoteRequest(),
parent='parent_value',
note_id='note_id_value',
note=grafeas.Note(name='name_value'),
)
@pytest.mark.asyncio
async def test_create_note_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_note(
parent='parent_value',
note_id='note_id_value',
note=grafeas.Note(name='name_value'),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].note_id == 'note_id_value'
assert args[0].note == grafeas.Note(name='name_value')
@pytest.mark.asyncio
async def test_create_note_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_note(
grafeas.CreateNoteRequest(),
parent='parent_value',
note_id='note_id_value',
note=grafeas.Note(name='name_value'),
)
def test_batch_create_notes(transport: str = 'grpc', request_type=grafeas.BatchCreateNotesRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.BatchCreateNotesResponse(
)
response = client.batch_create_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.BatchCreateNotesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.BatchCreateNotesResponse)
def test_batch_create_notes_from_dict():
test_batch_create_notes(request_type=dict)
def test_batch_create_notes_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
client.batch_create_notes()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.BatchCreateNotesRequest()
@pytest.mark.asyncio
async def test_batch_create_notes_async(transport: str = 'grpc_asyncio', request_type=grafeas.BatchCreateNotesRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.BatchCreateNotesResponse(
))
response = await client.batch_create_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.BatchCreateNotesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.BatchCreateNotesResponse)
@pytest.mark.asyncio
async def test_batch_create_notes_async_from_dict():
await test_batch_create_notes_async(request_type=dict)
def test_batch_create_notes_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.BatchCreateNotesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
call.return_value = grafeas.BatchCreateNotesResponse()
client.batch_create_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_batch_create_notes_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.BatchCreateNotesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.BatchCreateNotesResponse())
await client.batch_create_notes(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_batch_create_notes_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.BatchCreateNotesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_create_notes(
parent='parent_value',
notes={'key_value': grafeas.Note(name='name_value')},
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].notes == {'key_value': grafeas.Note(name='name_value')}
def test_batch_create_notes_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_create_notes(
grafeas.BatchCreateNotesRequest(),
parent='parent_value',
notes={'key_value': grafeas.Note(name='name_value')},
)
@pytest.mark.asyncio
async def test_batch_create_notes_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_notes),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.BatchCreateNotesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.BatchCreateNotesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_create_notes(
parent='parent_value',
notes={'key_value': grafeas.Note(name='name_value')},
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
assert args[0].notes == {'key_value': grafeas.Note(name='name_value')}
@pytest.mark.asyncio
async def test_batch_create_notes_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_create_notes(
grafeas.BatchCreateNotesRequest(),
parent='parent_value',
notes={'key_value': grafeas.Note(name='name_value')},
)
def test_update_note(transport: str = 'grpc', request_type=grafeas.UpdateNoteRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082),
)
response = client.update_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.UpdateNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
def test_update_note_from_dict():
test_update_note(request_type=dict)
def test_update_note_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
client.update_note()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.UpdateNoteRequest()
@pytest.mark.asyncio
async def test_update_note_async(transport: str = 'grpc_asyncio', request_type=grafeas.UpdateNoteRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note(
name='name_value',
short_description='short_description_value',
long_description='long_description_value',
kind=common.NoteKind.VULNERABILITY,
related_note_names=['related_note_names_value'],
))
response = await client.update_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.UpdateNoteRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, grafeas.Note)
assert response.name == 'name_value'
assert response.short_description == 'short_description_value'
assert response.long_description == 'long_description_value'
assert response.kind == common.NoteKind.VULNERABILITY
assert response.related_note_names == ['related_note_names_value']
@pytest.mark.asyncio
async def test_update_note_async_from_dict():
await test_update_note_async(request_type=dict)
def test_update_note_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.UpdateNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
call.return_value = grafeas.Note()
client.update_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_update_note_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.UpdateNoteRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
await client.update_note(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_update_note_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_note(
name='name_value',
note=grafeas.Note(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
assert args[0].note == grafeas.Note(name='name_value')
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value'])
def test_update_note_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_note(
grafeas.UpdateNoteRequest(),
name='name_value',
note=grafeas.Note(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
@pytest.mark.asyncio
async def test_update_note_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_note),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.Note()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_note(
name='name_value',
note=grafeas.Note(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
assert args[0].note == grafeas.Note(name='name_value')
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value'])
@pytest.mark.asyncio
async def test_update_note_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_note(
grafeas.UpdateNoteRequest(),
name='name_value',
note=grafeas.Note(name='name_value'),
update_mask=field_mask_pb2.FieldMask(paths=['paths_value']),
)
def test_list_note_occurrences(transport: str = 'grpc', request_type=grafeas.ListNoteOccurrencesRequest):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListNoteOccurrencesResponse(
next_page_token='next_page_token_value',
)
response = client.list_note_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListNoteOccurrencesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListNoteOccurrencesPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_note_occurrences_from_dict():
test_list_note_occurrences(request_type=dict)
def test_list_note_occurrences_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
client.list_note_occurrences()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListNoteOccurrencesRequest()
@pytest.mark.asyncio
async def test_list_note_occurrences_async(transport: str = 'grpc_asyncio', request_type=grafeas.ListNoteOccurrencesRequest):
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListNoteOccurrencesResponse(
next_page_token='next_page_token_value',
))
response = await client.list_note_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == grafeas.ListNoteOccurrencesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListNoteOccurrencesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_note_occurrences_async_from_dict():
await test_list_note_occurrences_async(request_type=dict)
def test_list_note_occurrences_field_headers():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.ListNoteOccurrencesRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
call.return_value = grafeas.ListNoteOccurrencesResponse()
client.list_note_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_note_occurrences_field_headers_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = grafeas.ListNoteOccurrencesRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListNoteOccurrencesResponse())
await client.list_note_occurrences(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_list_note_occurrences_flattened():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListNoteOccurrencesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_note_occurrences(
name='name_value',
filter='filter_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
assert args[0].filter == 'filter_value'
def test_list_note_occurrences_flattened_error():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_note_occurrences(
grafeas.ListNoteOccurrencesRequest(),
name='name_value',
filter='filter_value',
)
@pytest.mark.asyncio
async def test_list_note_occurrences_flattened_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grafeas.ListNoteOccurrencesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.ListNoteOccurrencesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_note_occurrences(
name='name_value',
filter='filter_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
assert args[0].filter == 'filter_value'
@pytest.mark.asyncio
async def test_list_note_occurrences_flattened_error_async():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_note_occurrences(
grafeas.ListNoteOccurrencesRequest(),
name='name_value',
filter='filter_value',
)
def test_list_note_occurrences_pager():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('name', ''),
)),
)
pager = client.list_note_occurrences(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, grafeas.Occurrence)
for i in results)
def test_list_note_occurrences_pages():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
pages = list(client.list_note_occurrences(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_note_occurrences_async_pager():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
async_pager = await client.list_note_occurrences(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, grafeas.Occurrence)
for i in responses)
@pytest.mark.asyncio
async def test_list_note_occurrences_async_pages():
client = GrafeasAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_note_occurrences),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
grafeas.Occurrence(),
],
next_page_token='abc',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[],
next_page_token='def',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
],
next_page_token='ghi',
),
grafeas.ListNoteOccurrencesResponse(
occurrences=[
grafeas.Occurrence(),
grafeas.Occurrence(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_note_occurrences(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.GrafeasGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.GrafeasGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = GrafeasClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.GrafeasGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = GrafeasClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.GrafeasGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = GrafeasClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.GrafeasGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.GrafeasGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize("transport_class", [
transports.GrafeasGrpcTransport,
transports.GrafeasGrpcAsyncIOTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.GrafeasGrpcTransport,
)
def test_grafeas_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.GrafeasTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json"
)
def test_grafeas_base_transport():
# Instantiate the base transport.
with mock.patch('grafeas.grafeas_v1.services.grafeas.transports.GrafeasTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.GrafeasTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'get_occurrence',
'list_occurrences',
'delete_occurrence',
'create_occurrence',
'batch_create_occurrences',
'update_occurrence',
'get_occurrence_note',
'get_note',
'list_notes',
'delete_note',
'create_note',
'batch_create_notes',
'update_note',
'list_note_occurrences',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
@requires_google_auth_gte_1_25_0
def test_grafeas_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('grafeas.grafeas_v1.services.grafeas.transports.GrafeasTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.GrafeasTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json",
scopes=None,
default_scopes=(
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_grafeas_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('grafeas.grafeas_v1.services.grafeas.transports.GrafeasTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.GrafeasTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json", scopes=(
),
quota_project_id="octopus",
)
def test_grafeas_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('grafeas.grafeas_v1.services.grafeas.transports.GrafeasTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.GrafeasTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_grafeas_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
GrafeasClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_grafeas_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
GrafeasClient()
adc.assert_called_once_with(
scopes=(),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.GrafeasGrpcTransport,
transports.GrafeasGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_grafeas_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.GrafeasGrpcTransport,
transports.GrafeasGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_grafeas_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(scopes=(
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.GrafeasGrpcTransport, grpc_helpers),
(transports.GrafeasGrpcAsyncIOTransport, grpc_helpers_async)
],
)
def test_grafeas_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(
quota_project_id="octopus",
scopes=["1", "2"]
)
create_channel.assert_called_with(
"containeranalysis.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
),
scopes=["1", "2"],
default_host="containeranalysis.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("transport_class", [transports.GrafeasGrpcTransport, transports.GrafeasGrpcAsyncIOTransport])
def test_grafeas_grpc_transport_client_cert_source_for_mtls(
transport_class
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert,
private_key=expected_key
)
def test_grafeas_host_no_port():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='containeranalysis.googleapis.com'),
)
assert client.transport._host == 'containeranalysis.googleapis.com:443'
def test_grafeas_host_with_port():
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='containeranalysis.googleapis.com:8000'),
)
assert client.transport._host == 'containeranalysis.googleapis.com:8000'
def test_grafeas_grpc_transport_channel():
channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.GrafeasGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_grafeas_grpc_asyncio_transport_channel():
channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.GrafeasGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.GrafeasGrpcTransport, transports.GrafeasGrpcAsyncIOTransport])
def test_grafeas_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.GrafeasGrpcTransport, transports.GrafeasGrpcAsyncIOTransport])
def test_grafeas_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_note_path():
project = "squid"
note = "clam"
expected = "projects/{project}/notes/{note}".format(project=project, note=note, )
actual = GrafeasClient.note_path(project, note)
assert expected == actual
def test_parse_note_path():
expected = {
"project": "whelk",
"note": "octopus",
}
path = GrafeasClient.note_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_note_path(path)
assert expected == actual
def test_occurrence_path():
project = "oyster"
occurrence = "nudibranch"
expected = "projects/{project}/occurrences/{occurrence}".format(project=project, occurrence=occurrence, )
actual = GrafeasClient.occurrence_path(project, occurrence)
assert expected == actual
def test_parse_occurrence_path():
expected = {
"project": "cuttlefish",
"occurrence": "mussel",
}
path = GrafeasClient.occurrence_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_occurrence_path(path)
assert expected == actual
def test_project_path():
project = "winkle"
expected = "projects/{project}".format(project=project, )
actual = GrafeasClient.project_path(project)
assert expected == actual
def test_parse_project_path():
expected = {
"project": "nautilus",
}
path = GrafeasClient.project_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_project_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "scallop"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = GrafeasClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "abalone",
}
path = GrafeasClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "squid"
expected = "folders/{folder}".format(folder=folder, )
actual = GrafeasClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "clam",
}
path = GrafeasClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "whelk"
expected = "organizations/{organization}".format(organization=organization, )
actual = GrafeasClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "octopus",
}
path = GrafeasClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "oyster"
expected = "projects/{project}".format(project=project, )
actual = GrafeasClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "nudibranch",
}
path = GrafeasClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "cuttlefish"
location = "mussel"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = GrafeasClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "winkle",
"location": "nautilus",
}
path = GrafeasClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = GrafeasClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.GrafeasTransport, '_prep_wrapped_messages') as prep:
client = GrafeasClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.GrafeasTransport, '_prep_wrapped_messages') as prep:
transport_class = GrafeasClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
| apache-2.0 | -8,733,633,512,157,647,000 | 35.468605 | 214 | 0.640257 | false |
skatsuta/aerospike-training | book/exercise/Key-valueOperations/Python/Program.py | 1 | 8944 | #!/usr/bin/env python
#
# * Copyright 2012-2014 by Aerospike.
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy
# * of this software and associated documentation files (the "Software"), to
# * deal in the Software without restriction, including without limitation the
# * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# * sell copies of the Software, and to permit persons to whom the Software is
# * furnished to do so, subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included in
# * all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# * IN THE SOFTWARE.
#
from __future__ import print_function
import aerospike
import sys
from optparse import OptionParser
from UserService import UserService
from TweetService import TweetService
#
# * @author Raghavendra Kumar
#
class Program(object):
client=None
seedHost = str()
port = int()
namespace = str()
set = str()
writePolicy = {}
policy = {}
def __init__(self, host, port, namespace, set):
# TODO: Establish a connection to Aerospike cluster
# Exercise 1
print("\nTODO: Establish a connection to Aerospike cluster");
self.client = aerospike.client({ 'hosts': [ (host, port) ] }).connect()
self.seedHost = host
self.port = port
self.namespace = namespace
self.set = set
self.writePolicy = {}
self.policy = {}
@classmethod
def main(cls, args):
usage = "usage: %prog [options] "
optparser = OptionParser(usage=usage, add_help_option=False)
optparser.add_option( "--help", dest="help", action="store_true", help="Displays this message.")
optparser.add_option( "-h", "--host", dest="host", type="string", default="127.0.0.1", metavar="<ADDRESS>", help="Address of Aerospike server (default: 127.0.0.1)")
optparser.add_option( "-p", "--port", dest="port", type="int", default=3000, metavar="<PORT>", help="Port of the Aerospike server (default: 3000)")
optparser.add_option( "-n", "--namespace", dest="namespace", type="string", metavar="<NAMESPACE>", help="Namespace (default: test)")
optparser.add_option( "-s", "--set", dest="set", type="string",metavar="<SET>", help="Set (default: demo)")
(options, args) = optparser.parse_args()
if options.help:
optparser.print_help()
print()
sys.exit(1)
aero=Program(options.host,options.port,options.namespace,options.set)
aero.work()
def work(self):
print("***** Welcome to Aerospike Developer Training *****\n")
print("INFO: Connecting to Aerospike cluster...")
# Establish connection to Aerospike server
# TODO: Check to see if the cluster connection succeeded
# Exercise 1
if not True:
print("\nERROR: Connection to Aerospike cluster failed! Please check the server settings and try again!")
else:
print("\nINFO: Connection to Aerospike cluster succeeded!\n")
# Create instance of UserService
us = UserService(self.client)
# Create instance of TweetService
ts = TweetService(self.client)
# Present options
print("\nWhat would you like to do:\n")
print("1> Create A User And A Tweet\n")
print("2> Read A User Record\n")
print("3> Batch Read Tweets For A User\n")
print("4> Scan All Tweets For All Users\n")
print("5> Record UDF -- Update User Password\n")
print("6> Query Tweets By Username And Users By Tweet Count Range\n")
print("7> Stream UDF -- Aggregation Based on Tweet Count By Region\n")
print("0> Exit\n")
print("\nSelect 0-7 and hit enter:\n")
try:
feature=int(raw_input('Input:'))
except ValueError:
print("Input a valid feature number")
sys.exit(0)
if feature != 0:
if feature==1:
print("\n********** Your Selection: Create User And A Tweet **********\n")
us.createUser()
ts.createTweet()
elif feature==2:
print("\n********** Your Selection: Read A User Record **********\n")
us.getUser()
elif feature==3:
print("\n********** Your Selection: Batch Read Tweets For A User **********\n")
us.batchGetUserTweets()
elif feature==4:
print("\n********** Your Selection: Scan All Tweets For All Users **********\n")
ts.scanAllTweetsForAllUsers()
elif feature==5:
print("\n********** Your Selection: Update User Password using CAS **********\n")
us.updatePasswordUsingCAS()
elif feature==6:
print("\n********** Your Selection: Query Tweets By Username And Users By Tweet Count Range **********\n")
ts.queryTweetsByUsername()
ts.queryUsersByTweetCount()
elif feature==7:
print("\n********** Your Selection: Stream UDF -- Aggregation Based on Tweet Count By Region **********\n")
us.aggregateUsersByTweetCountByRegion()
elif feature==12:
print("\n********** Create Users **********\n")
us.createUsers()
elif feature==23:
print("\n********** Create Tweets **********\n")
ts.createTweets()
else:
print ("Enter a Valid number from above menue !!")
# TODO: Close Aerospike cluster connection
# Exercise 1
print("\nTODO: Close Aerospike cluster connection");
#
# * example method calls
#
def readPartial(self, userName):
""" Python read specific bins """
(key, metadata, record) = self.client.get(("test", "users", userName), ("username", "password", "gender", "region") )
return record
def readMeta(self, userName):
""" not supported in Python Client """
def write(self, username, password):
""" Python read-modify-write """
meta = None
wr_policy = {
AS_POLICY_W_GEN: AS_POLICY_GEN_EQ
}
key = ("test", "users", username)
self.client.put(key,{"username": username,"password": password},meta,wr_policy)
def delete(self, username):
""" Delete Record """
key = ("test", "users", username)
self.client.remove(key)
def exisis(self, username):
""" Python key exists """
key = ("test", "users", username)
(key,itsHere) = self.client.exists(key)
# itsHere should not be Null
return itsHere
def add(self, username):
""" Add """
key = ("test", "users", username)
self.client.put(key, {"tweetcount":1})
def touch(self, username):
""" Not supported in Python Client """
def append(self, username):
""" Not supported in Python Client """
def connectWithClientPolicy(self):
""" Connect with Client configs """
config = { 'hosts': [ ( '127.0.0.1', 3000 )
],
'policies': { 'timeout': 1000 # milliseconds
} }
client = aerospike.client(config)
def deleteBin(self, username):
key = ("test", "users", username)
# Set bin value to null to drop bin.
self.client.put(key, {"interests": None} )
AS_POLICY_W_GEN = "generation"
AS_POLICY_GEN_UNDEF = 0 # Use default value
AS_POLICY_GEN_IGNORE = 1 # Write a record, regardless of generation.
AS_POLICY_GEN_EQ = 2 # Write a record, ONLY if generations are equal
AS_POLICY_GEN_GT = 3 # Write a record, ONLY if local generation is
# greater-than remote generation.
AS_POLICY_GEN_DUP = 4 # Write a record creating a duplicate, ONLY if
if __name__ == '__main__':
import sys
Program.main(sys.argv)
| mit | -1,456,257,365,734,570,800 | 42.629268 | 172 | 0.55814 | false |
xianian/qt-creator | share/qtcreator/debugger/gdbbridge.py | 1 | 64687 |
try:
import __builtin__
except:
import builtins
try:
import gdb
except:
pass
import os
import os.path
import sys
import struct
import types
def warn(message):
print("XXX: %s\n" % message.encode("latin1"))
from dumper import *
#######################################################################
#
# Infrastructure
#
#######################################################################
def safePrint(output):
try:
print(output)
except:
out = ""
for c in output:
cc = ord(c)
if cc > 127:
out += "\\\\%d" % cc
elif cc < 0:
out += "\\\\%d" % (cc + 256)
else:
out += c
print(out)
def registerCommand(name, func):
class Command(gdb.Command):
def __init__(self):
super(Command, self).__init__(name, gdb.COMMAND_OBSCURE)
def invoke(self, args, from_tty):
safePrint(func(args))
Command()
#######################################################################
#
# Types
#
#######################################################################
PointerCode = gdb.TYPE_CODE_PTR
ArrayCode = gdb.TYPE_CODE_ARRAY
StructCode = gdb.TYPE_CODE_STRUCT
UnionCode = gdb.TYPE_CODE_UNION
EnumCode = gdb.TYPE_CODE_ENUM
FlagsCode = gdb.TYPE_CODE_FLAGS
FunctionCode = gdb.TYPE_CODE_FUNC
IntCode = gdb.TYPE_CODE_INT
FloatCode = gdb.TYPE_CODE_FLT # Parts of GDB assume that this means complex.
VoidCode = gdb.TYPE_CODE_VOID
#SetCode = gdb.TYPE_CODE_SET
RangeCode = gdb.TYPE_CODE_RANGE
StringCode = gdb.TYPE_CODE_STRING
#BitStringCode = gdb.TYPE_CODE_BITSTRING
#ErrorTypeCode = gdb.TYPE_CODE_ERROR
MethodCode = gdb.TYPE_CODE_METHOD
MethodPointerCode = gdb.TYPE_CODE_METHODPTR
MemberPointerCode = gdb.TYPE_CODE_MEMBERPTR
ReferenceCode = gdb.TYPE_CODE_REF
CharCode = gdb.TYPE_CODE_CHAR
BoolCode = gdb.TYPE_CODE_BOOL
ComplexCode = gdb.TYPE_CODE_COMPLEX
TypedefCode = gdb.TYPE_CODE_TYPEDEF
NamespaceCode = gdb.TYPE_CODE_NAMESPACE
#Code = gdb.TYPE_CODE_DECFLOAT # Decimal floating point.
#Code = gdb.TYPE_CODE_MODULE # Fortran
#Code = gdb.TYPE_CODE_INTERNAL_FUNCTION
#######################################################################
#
# Convenience
#
#######################################################################
# Just convienience for 'python print ...'
class PPCommand(gdb.Command):
def __init__(self):
super(PPCommand, self).__init__("pp", gdb.COMMAND_OBSCURE)
def invoke(self, args, from_tty):
print(eval(args))
PPCommand()
# Just convienience for 'python print gdb.parse_and_eval(...)'
class PPPCommand(gdb.Command):
def __init__(self):
super(PPPCommand, self).__init__("ppp", gdb.COMMAND_OBSCURE)
def invoke(self, args, from_tty):
print(gdb.parse_and_eval(args))
PPPCommand()
def scanStack(p, n):
p = int(p)
r = []
for i in xrange(n):
f = gdb.parse_and_eval("{void*}%s" % p)
m = gdb.execute("info symbol %s" % f, to_string=True)
if not m.startswith("No symbol matches"):
r.append(m)
p += f.type.sizeof
return r
class ScanStackCommand(gdb.Command):
def __init__(self):
super(ScanStackCommand, self).__init__("scanStack", gdb.COMMAND_OBSCURE)
def invoke(self, args, from_tty):
if len(args) == 0:
args = 20
safePrint(scanStack(gdb.parse_and_eval("$sp"), int(args)))
ScanStackCommand()
#######################################################################
#
# Import plain gdb pretty printers
#
#######################################################################
class PlainDumper:
def __init__(self, printer):
self.printer = printer
self.typeCache = {}
def __call__(self, d, value):
printer = self.printer.invoke(value)
lister = getattr(printer, "children", None)
children = [] if lister is None else list(lister())
d.putType(self.printer.name)
val = printer.to_string()
if isinstance(val, str):
d.putValue(val)
else: # Assuming LazyString
d.putStdStringHelper(val.address, val.length, val.type.sizeof)
d.putNumChild(len(children))
if d.isExpanded():
with Children(d):
for child in children:
d.putSubItem(child[0], child[1])
def importPlainDumpers(args):
if args == "off":
gdb.execute("disable pretty-printer .* .*")
else:
theDumper.importPlainDumpers()
registerCommand("importPlainDumpers", importPlainDumpers)
class OutputSafer:
def __init__(self, d):
self.d = d
def __enter__(self):
self.savedOutput = self.d.output
self.d.output = []
def __exit__(self, exType, exValue, exTraceBack):
if self.d.passExceptions and not exType is None:
showException("OUTPUTSAFER", exType, exValue, exTraceBack)
self.d.output = self.savedOutput
else:
self.savedOutput.extend(self.d.output)
self.d.output = self.savedOutput
return False
#def couldBePointer(p, align):
# typeobj = lookupType("unsigned int")
# ptr = gdb.Value(p).cast(typeobj)
# d = int(str(ptr))
# warn("CHECKING : %s %d " % (p, ((d & 3) == 0 and (d > 1000 or d == 0))))
# return (d & (align - 1)) and (d > 1000 or d == 0)
Value = gdb.Value
def stripTypedefs(typeobj):
typeobj = typeobj.unqualified()
while typeobj.code == TypedefCode:
typeobj = typeobj.strip_typedefs().unqualified()
return typeobj
#######################################################################
#
# The Dumper Class
#
#######################################################################
class Dumper(DumperBase):
def __init__(self):
DumperBase.__init__(self)
# These values will be kept between calls to 'showData'.
self.isGdb = True
self.childEventAddress = None
self.typeCache = {}
self.typesReported = {}
self.typesToReport = {}
self.qtNamespaceToReport = None
self.qmlEngines = []
self.qmlBreakpoints = []
def prepare(self, args):
self.output = []
self.currentIName = ""
self.currentPrintsAddress = True
self.currentChildType = ""
self.currentChildNumChild = -1
self.currentMaxNumChild = -1
self.currentNumChild = -1
self.currentValue = ReportItem()
self.currentType = ReportItem()
self.currentAddress = None
# The guess does not need to be updated during a showData()
# as the result is fixed during that time (ignoring "active"
# dumpers causing loading of shared objects etc).
self.currentQtNamespaceGuess = None
self.resultVarName = args.get("resultvarname", "")
self.expandedINames = set(args.get("expanded", []))
self.stringCutOff = int(args.get("stringcutoff", 10000))
self.displayStringLimit = int(args.get("displaystringlimit", 100))
self.typeformats = args.get("typeformats", {})
self.formats = args.get("formats", {})
self.watchers = args.get("watchers", {})
self.qmlcontext = int(args.get("qmlcontext", "0"), 0)
self.useDynamicType = int(args.get("dyntype", "0"))
self.useFancy = int(args.get("fancy", "0"))
self.forceQtNamespace = int(args.get("forcens", "0"))
self.passExceptions = int(args.get("passExceptions", "0"))
self.nativeMixed = int(args.get("nativemixed", "0"))
self.autoDerefPointers = int(args.get("autoderef", "0"))
self.partialUpdate = int(args.get("partial", "0"))
self.fallbackQtVersion = 0x50200
self.sortStructMembers = bool(args.get("sortStructMembers", True))
#warn("NAMESPACE: '%s'" % self.qtNamespace())
#warn("EXPANDED INAMES: %s" % self.expandedINames)
#warn("WATCHERS: %s" % self.watchers)
def listOfLocals(self):
frame = gdb.selected_frame()
try:
block = frame.block()
#warn("BLOCK: %s " % block)
except RuntimeError as error:
#warn("BLOCK IN FRAME NOT ACCESSIBLE: %s" % error)
return []
except:
warn("BLOCK NOT ACCESSIBLE FOR UNKNOWN REASONS")
return []
items = []
shadowed = {}
while True:
if block is None:
warn("UNEXPECTED 'None' BLOCK")
break
for symbol in block:
name = symbol.print_name
if name == "__in_chrg" or name == "__PRETTY_FUNCTION__":
continue
# "NotImplementedError: Symbol type not yet supported in
# Python scripts."
#warn("SYMBOL %s (%s): " % (symbol, name))
if name in shadowed:
level = shadowed[name]
name1 = "%s@%s" % (name, level)
shadowed[name] = level + 1
else:
name1 = name
shadowed[name] = 1
#warn("SYMBOL %s (%s, %s)): " % (symbol, name, symbol.name))
item = self.LocalItem()
item.iname = "local." + name1
item.name = name1
try:
item.value = frame.read_var(name, block)
#warn("READ 1: %s" % item.value)
items.append(item)
continue
except:
pass
try:
#warn("READ 2: %s" % item.value)
item.value = frame.read_var(name)
items.append(item)
continue
except:
# RuntimeError: happens for
# void foo() { std::string s; std::wstring w; }
# ValueError: happens for (as of 2010/11/4)
# a local struct as found e.g. in
# gcc sources in gcc.c, int execute()
pass
try:
#warn("READ 3: %s %s" % (name, item.value))
item.value = gdb.parse_and_eval(name)
#warn("ITEM 3: %s" % item.value)
items.append(item)
except:
# Can happen in inlined code (see last line of
# RowPainter::paintChars(): "RuntimeError:
# No symbol \"__val\" in current context.\n"
pass
# The outermost block in a function has the function member
# FIXME: check whether this is guaranteed.
if not block.function is None:
break
block = block.superblock
return items
# Hack to avoid QDate* dumper timeouts with GDB 7.4 on 32 bit
# due to misaligned %ebx in SSE calls (qstring.cpp:findChar)
# This seems to be fixed in 7.9 (or earlier)
def canCallLocale(self):
return False if self.is32bit() else True
def showData(self, args):
self.prepare(args)
partialVariable = args.get("partialVariable", "")
isPartial = len(partialVariable) > 0
#
# Locals
#
self.output.append('data=[')
if self.qmlcontext:
locals = self.extractQmlVariables(self.qmlcontext)
elif isPartial:
parts = partialVariable.split('.')
name = parts[1]
item = self.LocalItem()
item.iname = parts[0] + '.' + name
item.name = name
try:
if parts[0] == 'local':
frame = gdb.selected_frame()
item.value = frame.read_var(name)
else:
item.name = self.hexdecode(name)
item.value = gdb.parse_and_eval(item.name)
except RuntimeError as error:
item.value = error
except:
item.value = "<no value>"
locals = [item]
else:
locals = self.listOfLocals()
# Take care of the return value of the last function call.
if len(self.resultVarName) > 0:
try:
item = self.LocalItem()
item.name = self.resultVarName
item.iname = "return." + self.resultVarName
item.value = self.parseAndEvaluate(self.resultVarName)
locals.append(item)
except:
# Don't bother. It's only supplementary information anyway.
pass
locals.sort(key = lambda item: item.name)
for item in locals:
value = self.downcast(item.value) if self.useDynamicType else item.value
with OutputSafer(self):
self.anonNumber = -1
if item.iname == "local.argv" and str(value.type) == "char **":
self.putSpecialArgv(value)
else:
# A "normal" local variable or parameter.
with TopLevelItem(self, item.iname):
self.put('iname="%s",' % item.iname)
self.put('name="%s",' % item.name)
self.putItem(value)
with OutputSafer(self):
self.handleWatches(args)
self.output.append('],typeinfo=[')
for name in self.typesToReport.keys():
typeobj = self.typesToReport[name]
# Happens e.g. for '(anonymous namespace)::InsertDefOperation'
if not typeobj is None:
self.output.append('{name="%s",size="%s"}'
% (self.hexencode(name), typeobj.sizeof))
self.output.append(']')
self.typesToReport = {}
if self.forceQtNamespace:
self.qtNamepaceToReport = self.qtNamespace()
if self.qtNamespaceToReport:
self.output.append(',qtnamespace="%s"' % self.qtNamespaceToReport)
self.qtNamespaceToReport = None
self.output.append(',partial="%d"' % isPartial)
safePrint(''.join(self.output))
def enterSubItem(self, item):
if not item.iname:
item.iname = "%s.%s" % (self.currentIName, item.name)
#warn("INAME %s" % item.iname)
self.put('{')
#if not item.name is None:
if isinstance(item.name, str):
self.put('name="%s",' % item.name)
item.savedIName = self.currentIName
item.savedValue = self.currentValue
item.savedType = self.currentType
item.savedCurrentAddress = self.currentAddress
self.currentIName = item.iname
self.currentValue = ReportItem();
self.currentType = ReportItem();
self.currentAddress = None
def exitSubItem(self, item, exType, exValue, exTraceBack):
#warn("CURRENT VALUE: %s: %s %s" % (self.currentIName, self.currentValue, self.currentType))
if not exType is None:
if self.passExceptions:
showException("SUBITEM", exType, exValue, exTraceBack)
self.putNumChild(0)
self.putSpecialValue(SpecialNotAccessibleValue)
try:
if self.currentType.value:
typeName = self.stripClassTag(self.currentType.value)
if len(typeName) > 0 and typeName != self.currentChildType:
self.put('type="%s",' % typeName) # str(type.unqualified()) ?
if self.currentValue.value is None:
self.put('value="",encoding="%d","numchild="0",'
% SpecialNotAccessibleValue)
else:
if not self.currentValue.encoding is None:
self.put('valueencoded="%d",' % self.currentValue.encoding)
if self.currentValue.elided:
self.put('valueelided="%d",' % self.currentValue.elided)
self.put('value="%s",' % self.currentValue.value)
except:
pass
if not self.currentAddress is None:
self.put(self.currentAddress)
self.put('},')
self.currentIName = item.savedIName
self.currentValue = item.savedValue
self.currentType = item.savedType
self.currentAddress = item.savedCurrentAddress
return True
def parseAndEvaluate(self, exp):
return gdb.parse_and_eval(exp)
def callHelper(self, value, func, args):
# args is a tuple.
arg = ""
for i in range(len(args)):
if i:
arg += ','
a = args[i]
if (':' in a) and not ("'" in a):
arg = "'%s'" % a
else:
arg += a
#warn("CALL: %s -> %s(%s)" % (value, func, arg))
typeName = self.stripClassTag(str(value.type))
if typeName.find(":") >= 0:
typeName = "'" + typeName + "'"
# 'class' is needed, see http://sourceware.org/bugzilla/show_bug.cgi?id=11912
#exp = "((class %s*)%s)->%s(%s)" % (typeName, value.address, func, arg)
ptr = value.address if value.address else self.pokeValue(value)
exp = "((%s*)%s)->%s(%s)" % (typeName, ptr, func, arg)
#warn("CALL: %s" % exp)
result = gdb.parse_and_eval(exp)
#warn(" -> %s" % result)
if not value.address:
gdb.parse_and_eval("free(0x%x)" % ptr)
return result
def childWithName(self, value, name):
try:
return value[name]
except:
return None
def isBadPointer(self, value):
try:
target = value.dereference()
target.is_optimized_out # Access test.
return False
except:
return True
def makeValue(self, typeobj, init):
typename = "::" + self.stripClassTag(str(typeobj));
# Avoid malloc symbol clash with QVector.
gdb.execute("set $d = (%s*)calloc(sizeof(%s), 1)" % (typename, typename))
gdb.execute("set *$d = {%s}" % init)
value = gdb.parse_and_eval("$d").dereference()
#warn(" TYPE: %s" % value.type)
#warn(" ADDR: %s" % value.address)
#warn(" VALUE: %s" % value)
return value
def makeExpression(self, value):
typename = "::" + self.stripClassTag(str(value.type))
#warn(" TYPE: %s" % typename)
#exp = "(*(%s*)(&%s))" % (typename, value.address)
exp = "(*(%s*)(%s))" % (typename, value.address)
#warn(" EXP: %s" % exp)
return exp
def makeStdString(init):
# Works only for small allocators, but they are usually empty.
gdb.execute("set $d=(std::string*)calloc(sizeof(std::string), 2)");
gdb.execute("call($d->basic_string(\"" + init +
"\",*(std::allocator<char>*)(1+$d)))")
value = gdb.parse_and_eval("$d").dereference()
#warn(" TYPE: %s" % value.type)
#warn(" ADDR: %s" % value.address)
#warn(" VALUE: %s" % value)
return value
def childAt(self, value, index):
field = value.type.fields()[index]
try:
# Official access in GDB 7.6 or later.
return value[field]
except:
pass
try:
# Won't work with anon entities, tradionally with empty
# field name, but starting with GDB 7.7 commit b5b08fb4
# with None field name.
return value[field.name]
except:
pass
# FIXME: Cheat. There seems to be no official way to access
# the real item, so we pass back the value. That at least
# enables later ...["name"] style accesses as gdb handles
# them transparently.
return value
def fieldAt(self, typeobj, index):
return typeobj.fields()[index]
def simpleValue(self, value):
return str(value)
def directBaseClass(self, typeobj, index = 0):
for f in typeobj.fields():
if f.is_base_class:
if index == 0:
return f.type
index -= 1;
return None
def directBaseObject(self, value, index = 0):
for f in value.type.fields():
if f.is_base_class:
if index == 0:
return value.cast(f.type)
index -= 1;
return None
def checkPointer(self, p, align = 1):
if not self.isNull(p):
p.dereference()
def pointerValue(self, p):
return toInteger(p)
def isNull(self, p):
# The following can cause evaluation to abort with "UnicodeEncodeError"
# for invalid char *, as their "contents" is being examined
#s = str(p)
#return s == "0x0" or s.startswith("0x0 ")
#try:
# # Can fail with: "RuntimeError: Cannot access memory at address 0x5"
# return p.cast(self.lookupType("void").pointer()) == 0
#except:
# return False
try:
# Can fail with: "RuntimeError: Cannot access memory at address 0x5"
return toInteger(p) == 0
except:
return False
def templateArgument(self, typeobj, position):
try:
# This fails on stock 7.2 with
# "RuntimeError: No type named myns::QObject.\n"
return typeobj.template_argument(position)
except:
# That's something like "myns::QList<...>"
return self.lookupType(self.extractTemplateArgument(str(typeobj.strip_typedefs()), position))
def numericTemplateArgument(self, typeobj, position):
# Workaround for gdb < 7.1
try:
return int(typeobj.template_argument(position))
except RuntimeError as error:
# ": No type named 30."
msg = str(error)
msg = msg[14:-1]
# gdb at least until 7.4 produces for std::array<int, 4u>
# for template_argument(1): RuntimeError: No type named 4u.
if msg[-1] == 'u':
msg = msg[0:-1]
return int(msg)
def intType(self):
self.cachedIntType = self.lookupType('int')
self.intType = lambda: self.cachedIntType
return self.cachedIntType
def charType(self):
return self.lookupType('char')
def sizetType(self):
return self.lookupType('size_t')
def charPtrType(self):
return self.lookupType('char*')
def voidPtrType(self):
return self.lookupType('void*')
def addressOf(self, value):
return toInteger(value.address)
def createPointerValue(self, address, pointeeType):
# This might not always work:
# a Python 3 based GDB due to the bug addressed in
# https://sourceware.org/ml/gdb-patches/2013-09/msg00571.html
try:
return gdb.Value(address).cast(pointeeType.pointer())
except:
# Try _some_ fallback (good enough for the std::complex dumper)
return gdb.parse_and_eval("(%s*)%s" % (pointeeType, address))
def intSize(self):
return 4
def ptrSize(self):
self.cachedPtrSize = self.lookupType('void*').sizeof
self.ptrSize = lambda: self.cachedPtrSize
return self.cachedPtrSize
def pokeValue(self, value):
"""
Allocates inferior memory and copies the contents of value.
Returns a pointer to the copy.
"""
# Avoid malloc symbol clash with QVector
size = value.type.sizeof
data = value.cast(gdb.lookup_type("unsigned char").array(0, int(size - 1)))
string = ''.join("\\x%02x" % int(data[i]) for i in range(size))
exp = '(%s*)memcpy(calloc(%s, 1), "%s", %s)' % (value.type, size, string, size)
#warn("EXP: %s" % exp)
return toInteger(gdb.parse_and_eval(exp))
def createValue(self, address, referencedType):
try:
return gdb.Value(address).cast(referencedType.pointer()).dereference()
except:
# Try _some_ fallback (good enough for the std::complex dumper)
return gdb.parse_and_eval("{%s}%s" % (referencedType, address))
def setValue(self, address, typename, value):
cmd = "set {%s}%s=%s" % (typename, address, value)
gdb.execute(cmd)
def setValues(self, address, typename, values):
cmd = "set {%s[%s]}%s={%s}" \
% (typename, len(values), address, ','.join(map(str, values)))
gdb.execute(cmd)
def selectedInferior(self):
try:
# gdb.Inferior is new in gdb 7.2
self.cachedInferior = gdb.selected_inferior()
except:
# Pre gdb 7.4. Right now we don't have more than one inferior anyway.
self.cachedInferior = gdb.inferiors()[0]
# Memoize result.
self.selectedInferior = lambda: self.cachedInferior
return self.cachedInferior
def readRawMemory(self, addr, size):
mem = self.selectedInferior().read_memory(addr, size)
if sys.version_info[0] >= 3:
mem.tobytes()
return mem
def extractInt64(self, addr):
return struct.unpack("q", self.readRawMemory(addr, 8))[0]
def extractUInt64(self, addr):
return struct.unpack("Q", self.readRawMemory(addr, 8))[0]
def extractInt(self, addr):
return struct.unpack("i", self.readRawMemory(addr, 4))[0]
def extractUInt(self, addr):
return struct.unpack("I", self.readRawMemory(addr, 4))[0]
def extractShort(self, addr):
return struct.unpack("h", self.readRawMemory(addr, 2))[0]
def extractUShort(self, addr):
return struct.unpack("H", self.readRawMemory(addr, 2))[0]
def extractByte(self, addr):
return struct.unpack("b", self.readRawMemory(addr, 1))[0]
def findStaticMetaObject(self, typename):
return self.findSymbol(typename + "::staticMetaObject")
def findSymbol(self, symbolName):
try:
result = gdb.lookup_global_symbol(symbolName)
return result.value() if result else 0
except:
pass
# Older GDB ~7.4
try:
address = gdb.parse_and_eval("&'%s'" % symbolName)
typeobj = gdb.lookup_type(self.qtNamespace() + "QMetaObject")
return self.createPointerValue(address, typeobj)
except:
return 0
def put(self, value):
self.output.append(value)
def childRange(self):
if self.currentMaxNumChild is None:
return xrange(0, toInteger(self.currentNumChild))
return xrange(min(toInteger(self.currentMaxNumChild), toInteger(self.currentNumChild)))
def isArmArchitecture(self):
return 'arm' in gdb.TARGET_CONFIG.lower()
def isQnxTarget(self):
return 'qnx' in gdb.TARGET_CONFIG.lower()
def isWindowsTarget(self):
# We get i686-w64-mingw32
return 'mingw' in gdb.TARGET_CONFIG.lower()
def qtVersionString(self):
try:
return str(gdb.lookup_symbol("qVersion")[0].value()())
except:
pass
try:
ns = self.qtNamespace()
return str(gdb.parse_and_eval("((const char*(*)())'%sqVersion')()" % ns))
except:
pass
return None
def qtVersion(self):
try:
version = self.qtVersionString()
(major, minor, patch) = version[version.find('"')+1:version.rfind('"')].split('.')
qtversion = 0x10000 * int(major) + 0x100 * int(minor) + int(patch)
self.qtVersion = lambda: qtversion
return qtversion
except:
# Use fallback until we have a better answer.
return self.fallbackQtVersion
def isQt3Support(self):
if self.qtVersion() >= 0x050000:
return False
else:
try:
# This will fail on Qt 4 without Qt 3 support
gdb.execute("ptype QChar::null", to_string=True)
self.cachedIsQt3Suport = True
except:
self.cachedIsQt3Suport = False
# Memoize good results.
self.isQt3Support = lambda: self.cachedIsQt3Suport
return self.cachedIsQt3Suport
def putAddress(self, addr):
if self.currentPrintsAddress and not self.isCli:
try:
# addr can be "None", int(None) fails.
#self.put('addr="0x%x",' % int(addr))
self.currentAddress = 'addr="0x%x",' % toInteger(addr)
except:
pass
def putSimpleValue(self, value, encoding = None, priority = 0):
self.putValue(value, encoding, priority)
def putPointerValue(self, value):
# Use a lower priority
if value is None:
self.putEmptyValue(-1)
else:
self.putValue("0x%x" % value.cast(
self.lookupType("unsigned long")), None, -1)
def stripNamespaceFromType(self, typeName):
typename = self.stripClassTag(typeName)
ns = self.qtNamespace()
if len(ns) > 0 and typename.startswith(ns):
typename = typename[len(ns):]
pos = typename.find("<")
# FIXME: make it recognize foo<A>::bar<B>::iterator?
while pos != -1:
pos1 = typename.rfind(">", pos)
typename = typename[0:pos] + typename[pos1+1:]
pos = typename.find("<")
return typename
def isMovableType(self, typeobj):
if typeobj.code == PointerCode:
return True
if self.isSimpleType(typeobj):
return True
return self.isKnownMovableType(self.stripNamespaceFromType(str(typeobj)))
def putSubItem(self, component, value, tryDynamic=True):
with SubItem(self, component):
self.putItem(value, tryDynamic)
def isSimpleType(self, typeobj):
code = typeobj.code
return code == BoolCode \
or code == CharCode \
or code == IntCode \
or code == FloatCode \
or code == EnumCode
def simpleEncoding(self, typeobj):
code = typeobj.code
if code == BoolCode or code == CharCode:
return Hex2EncodedInt1
if code == IntCode:
if str(typeobj).find("unsigned") >= 0:
if typeobj.sizeof == 1:
return Hex2EncodedUInt1
if typeobj.sizeof == 2:
return Hex2EncodedUInt2
if typeobj.sizeof == 4:
return Hex2EncodedUInt4
if typeobj.sizeof == 8:
return Hex2EncodedUInt8
else:
if typeobj.sizeof == 1:
return Hex2EncodedInt1
if typeobj.sizeof == 2:
return Hex2EncodedInt2
if typeobj.sizeof == 4:
return Hex2EncodedInt4
if typeobj.sizeof == 8:
return Hex2EncodedInt8
if code == FloatCode:
if typeobj.sizeof == 4:
return Hex2EncodedFloat4
if typeobj.sizeof == 8:
return Hex2EncodedFloat8
return None
def isReferenceType(self, typeobj):
return typeobj.code == gdb.TYPE_CODE_REF
def isStructType(self, typeobj):
return typeobj.code == gdb.TYPE_CODE_STRUCT
def isFunctionType(self, typeobj):
return typeobj.code == MethodCode or typeobj.code == FunctionCode
def putItem(self, value, tryDynamic=True):
if value is None:
# Happens for non-available watchers in gdb versions that
# need to use gdb.execute instead of gdb.parse_and_eval
self.putSpecialValue(SpecialNotAvailableValue)
self.putType("<unknown>")
self.putNumChild(0)
return
typeobj = value.type.unqualified()
typeName = str(typeobj)
if value.is_optimized_out:
self.putSpecialValue(SpecialOptimizedOutValue)
self.putType(typeName)
self.putNumChild(0)
return
tryDynamic &= self.useDynamicType
self.addToCache(typeobj) # Fill type cache
if tryDynamic:
self.putAddress(value.address)
# FIXME: Gui shows references stripped?
#warn(" ")
#warn("REAL INAME: %s" % self.currentIName)
#warn("REAL TYPE: %s" % value.type)
#warn("REAL CODE: %s" % value.type.code)
#warn("REAL VALUE: %s" % value)
if typeobj.code == ReferenceCode:
try:
# Try to recognize null references explicitly.
if toInteger(value.address) == 0:
self.putSpecialValue(SpecialNullReferenceValue)
self.putType(typeName)
self.putNumChild(0)
return
except:
pass
if tryDynamic:
try:
# Dynamic references are not supported by gdb, see
# http://sourceware.org/bugzilla/show_bug.cgi?id=14077.
# Find the dynamic type manually using referenced_type.
value = value.referenced_value()
value = value.cast(value.dynamic_type)
self.putItem(value)
self.putBetterType("%s &" % value.type)
return
except:
pass
try:
# FIXME: This throws "RuntimeError: Attempt to dereference a
# generic pointer." with MinGW's gcc 4.5 when it "identifies"
# a "QWidget &" as "void &" and with optimized out code.
self.putItem(value.cast(typeobj.target().unqualified()))
self.putBetterType("%s &" % self.currentType.value)
return
except RuntimeError:
self.putSpecialValue(SpecialOptimizedOutValue)
self.putType(typeName)
self.putNumChild(0)
return
if typeobj.code == IntCode or typeobj.code == CharCode:
self.putType(typeName)
if typeobj.sizeof == 1:
# Force unadorned value transport for char and Co.
self.putValue(int(value) & 0xff)
else:
self.putValue(value)
self.putNumChild(0)
return
if typeobj.code == FloatCode or typeobj.code == BoolCode:
self.putType(typeName)
self.putValue(value)
self.putNumChild(0)
return
if typeobj.code == EnumCode:
self.putType(typeName)
self.putValue("%s (%d)" % (value, value))
self.putNumChild(0)
return
if typeobj.code == ComplexCode:
self.putType(typeName)
self.putValue("%s" % value)
self.putNumChild(0)
return
if typeobj.code == TypedefCode:
if typeName in self.qqDumpers:
self.putType(typeName)
self.qqDumpers[typeName](self, value)
return
typeobj = stripTypedefs(typeobj)
# The cast can destroy the address?
#self.putAddress(value.address)
# Workaround for http://sourceware.org/bugzilla/show_bug.cgi?id=13380
if typeobj.code == ArrayCode:
value = self.parseAndEvaluate("{%s}%s" % (typeobj, value.address))
else:
try:
value = value.cast(typeobj)
except:
self.putValue("<optimized out typedef>")
self.putType(typeName)
self.putNumChild(0)
return
self.putItem(value)
self.putBetterType(typeName)
return
if typeobj.code == ArrayCode:
self.putCStyleArray(value)
return
if typeobj.code == PointerCode:
# This could still be stored in a register and
# potentially dereferencable.
self.putFormattedPointer(value)
return
if typeobj.code == MethodPointerCode \
or typeobj.code == MethodCode \
or typeobj.code == FunctionCode \
or typeobj.code == MemberPointerCode:
self.putType(typeName)
self.putValue(value)
self.putNumChild(0)
return
if typeName.startswith("<anon"):
# Anonymous union. We need a dummy name to distinguish
# multiple anonymous unions in the struct.
self.putType(typeobj)
self.putSpecialValue(SpecialEmptyStructureValue)
self.anonNumber += 1
with Children(self, 1):
self.listAnonymous(value, "#%d" % self.anonNumber, typeobj)
return
if typeobj.code == StringCode:
# FORTRAN strings
size = typeobj.sizeof
data = self.readMemory(value.address, size)
self.putValue(data, Hex2EncodedLatin1, 1)
self.putType(typeobj)
if typeobj.code != StructCode and typeobj.code != UnionCode:
warn("WRONG ASSUMPTION HERE: %s " % typeobj.code)
self.check(False)
if tryDynamic:
self.putItem(self.expensiveDowncast(value), False)
return
if self.tryPutPrettyItem(typeName, value):
return
# D arrays, gdc compiled.
if typeName.endswith("[]"):
n = value["length"]
base = value["ptr"]
self.putType(typeName)
self.putItemCount(n)
if self.isExpanded():
self.putArrayData(base.type.target(), base, n)
return
#warn("GENERIC STRUCT: %s" % typeobj)
#warn("INAME: %s " % self.currentIName)
#warn("INAMES: %s " % self.expandedINames)
#warn("EXPANDED: %s " % (self.currentIName in self.expandedINames))
staticMetaObject = self.extractStaticMetaObject(value.type)
if staticMetaObject:
self.putQObjectNameValue(value)
self.putType(typeName)
self.putEmptyValue()
self.putNumChild(len(typeobj.fields()))
if self.currentIName in self.expandedINames:
innerType = None
with Children(self, 1, childType=innerType):
self.putFields(value)
if staticMetaObject:
self.putQObjectGuts(value, staticMetaObject)
def toBlob(self, value):
size = toInteger(value.type.sizeof)
if value.address:
return self.extractBlob(value.address, size)
# No address. Possibly the result of an inferior call.
y = value.cast(gdb.lookup_type("unsigned char").array(0, int(size - 1)))
buf = bytearray(struct.pack('x' * size))
for i in range(size):
buf[i] = int(y[i])
return Blob(bytes(buf))
def extractBlob(self, base, size):
inferior = self.selectedInferior()
return Blob(inferior.read_memory(base, size))
def readCString(self, base):
inferior = self.selectedInferior()
mem = ""
while True:
char = inferior.read_memory(base, 1)[0]
if not char:
break
mem += char
base += 1
#if sys.version_info[0] >= 3:
# return mem.tobytes()
return mem
def putFields(self, value, dumpBase = True):
fields = value.type.fields()
if self.sortStructMembers:
def sortOrder(field):
if field.is_base_class:
return 0
if field.name and field.name.startswith("_vptr."):
return 1
return 2
fields.sort(key = lambda field: "%d%s" % (sortOrder(field), field.name))
#warn("TYPE: %s" % value.type)
#warn("FIELDS: %s" % fields)
baseNumber = 0
for field in fields:
#warn("FIELD: %s" % field)
#warn(" BITSIZE: %s" % field.bitsize)
#warn(" ARTIFICIAL: %s" % field.artificial)
# Since GDB commit b5b08fb4 anonymous structs get also reported
# with a 'None' name.
if field.name is None:
if value.type.code == ArrayCode:
# An array.
typeobj = stripTypedefs(value.type)
innerType = typeobj.target()
p = value.cast(innerType.pointer())
for i in xrange(int(typeobj.sizeof / innerType.sizeof)):
with SubItem(self, i):
self.putItem(p.dereference())
p = p + 1
else:
# Something without a name.
self.anonNumber += 1
with SubItem(self, str(self.anonNumber)):
self.putItem(value[field])
continue
# Ignore vtable pointers for virtual inheritance.
if field.name.startswith("_vptr."):
with SubItem(self, "[vptr]"):
# int (**)(void)
n = 100
self.putType(" ")
self.putValue(value[field.name])
self.putNumChild(n)
if self.isExpanded():
with Children(self):
p = value[field.name]
for i in xrange(n):
if toInteger(p.dereference()) != 0:
with SubItem(self, i):
self.putItem(p.dereference())
self.putType(" ")
p = p + 1
continue
#warn("FIELD NAME: %s" % field.name)
#warn("FIELD TYPE: %s" % field.type)
if field.is_base_class:
# Field is base type. We cannot use field.name as part
# of the iname as it might contain spaces and other
# strange characters.
if dumpBase:
baseNumber += 1
with UnnamedSubItem(self, "@%d" % baseNumber):
baseValue = value.cast(field.type)
self.putBaseClassName(field.name)
self.putAddress(baseValue.address)
self.putItem(baseValue, False)
elif len(field.name) == 0:
# Anonymous union. We need a dummy name to distinguish
# multiple anonymous unions in the struct.
self.anonNumber += 1
self.listAnonymous(value, "#%d" % self.anonNumber,
field.type)
else:
# Named field.
with SubItem(self, field.name):
#bitsize = getattr(field, "bitsize", None)
#if not bitsize is None:
# self.put("bitsize=\"%s\"" % bitsize)
self.putItem(self.downcast(value[field.name]))
def putBaseClassName(self, name):
self.put('iname="%s",' % self.currentIName)
self.put('name="[%s]",' % name)
def listAnonymous(self, value, name, typeobj):
for field in typeobj.fields():
#warn("FIELD NAME: %s" % field.name)
if field.name:
with SubItem(self, field.name):
self.putItem(value[field.name])
else:
# Further nested.
self.anonNumber += 1
name = "#%d" % self.anonNumber
#iname = "%s.%s" % (selitem.iname, name)
#child = SameItem(item.value, iname)
with SubItem(self, name):
self.put('name="%s",' % name)
self.putEmptyValue()
fieldTypeName = str(field.type)
if fieldTypeName.endswith("<anonymous union>"):
self.putType("<anonymous union>")
elif fieldTypeName.endswith("<anonymous struct>"):
self.putType("<anonymous struct>")
else:
self.putType(fieldTypeName)
with Children(self, 1):
self.listAnonymous(value, name, field.type)
#def threadname(self, maximalStackDepth, objectPrivateType):
# e = gdb.selected_frame()
# out = ""
# ns = self.qtNamespace()
# while True:
# maximalStackDepth -= 1
# if maximalStackDepth < 0:
# break
# e = e.older()
# if e == None or e.name() == None:
# break
# if e.name() == ns + "QThreadPrivate::start" \
# or e.name() == "_ZN14QThreadPrivate5startEPv@4":
# try:
# thrptr = e.read_var("thr").dereference()
# d_ptr = thrptr["d_ptr"]["d"].cast(objectPrivateType).dereference()
# try:
# objectName = d_ptr["objectName"]
# except: # Qt 5
# p = d_ptr["extraData"]
# if not self.isNull(p):
# objectName = p.dereference()["objectName"]
# if not objectName is None:
# data, size, alloc = self.stringData(objectName)
# if size > 0:
# s = self.readMemory(data, 2 * size)
#
# thread = gdb.selected_thread()
# inner = '{valueencoded="';
# inner += str(Hex4EncodedLittleEndianWithoutQuotes)+'",id="'
# inner += str(thread.num) + '",value="'
# inner += s
# #inner += self.encodeString(objectName)
# inner += '"},'
#
# out += inner
# except:
# pass
# return out
def threadnames(self, maximalStackDepth):
# FIXME: This needs a proper implementation for MinGW, and only there.
# Linux, Mac and QNX mirror the objectName() to the underlying threads,
# so we get the names already as part of the -thread-info output.
return '[]'
#out = '['
#oldthread = gdb.selected_thread()
#if oldthread:
# try:
# objectPrivateType = gdb.lookup_type(ns + "QObjectPrivate").pointer()
# inferior = self.selectedInferior()
# for thread in inferior.threads():
# thread.switch()
# out += self.threadname(maximalStackDepth, objectPrivateType)
# except:
# pass
# oldthread.switch()
#return out + ']'
def importPlainDumper(self, printer):
name = printer.name.replace("::", "__")
self.qqDumpers[name] = PlainDumper(printer)
self.qqFormats[name] = ""
def importPlainDumpers(self):
for obj in gdb.objfiles():
for printers in obj.pretty_printers + gdb.pretty_printers:
for printer in printers.subprinters:
self.importPlainDumper(printer)
def qtNamespace(self):
if not self.currentQtNamespaceGuess is None:
return self.currentQtNamespaceGuess
# This only works when called from a valid frame.
try:
cand = "QArrayData::shared_null"
symbol = gdb.lookup_symbol(cand)[0]
if symbol:
ns = symbol.name[:-len(cand)]
self.qtNamespaceToReport = ns
self.qtNamespace = lambda: ns
return ns
except:
pass
try:
# This is Qt, but not 5.x.
cand = "QByteArray::shared_null"
symbol = gdb.lookup_symbol(cand)[0]
if symbol:
ns = symbol.name[:-len(cand)]
self.qtNamespaceToReport = ns
self.qtNamespace = lambda: ns
self.fallbackQtVersion = 0x40800
return ns
except:
pass
try:
# Last fall backs.
s = gdb.execute("ptype QByteArray", to_string=True)
if s.find("QMemArray") >= 0:
# Qt 3.
self.qtNamespaceToReport = ""
self.qtNamespace = lambda: ""
self.qtVersion = lambda: 0x30308
self.fallbackQtVersion = 0x30308
return ""
# Seemingly needed with Debian's GDB 7.4.1
ns = s[s.find("class")+6:s.find("QByteArray")]
if len(ns):
self.qtNamespaceToReport = ns
self.qtNamespace = lambda: ns
return ns
except:
pass
self.currentQtNamespaceGuess = ""
return ""
def assignValue(self, args):
typeName = self.hexdecode(args['type'])
expr = self.hexdecode(args['expr'])
value = self.hexdecode(args['value'])
simpleType = int(args['simpleType'])
ns = self.qtNamespace()
if typeName.startswith(ns):
typeName = typeName[len(ns):]
typeName = typeName.replace("::", "__")
pos = typeName.find('<')
if pos != -1:
typeName = typeName[0:pos]
if typeName in self.qqEditable and not simpleType:
#self.qqEditable[typeName](self, expr, value)
expr = gdb.parse_and_eval(expr)
self.qqEditable[typeName](self, expr, value)
else:
cmd = "set variable (%s)=%s" % (expr, value)
gdb.execute(cmd)
def hasVTable(self, typeobj):
fields = typeobj.fields()
if len(fields) == 0:
return False
if fields[0].is_base_class:
return hasVTable(fields[0].type)
return str(fields[0].type) == "int (**)(void)"
def dynamicTypeName(self, value):
if self.hasVTable(value.type):
#vtbl = str(gdb.parse_and_eval("{int(*)(int)}%s" % int(value.address)))
try:
# Fails on 7.1 due to the missing to_string.
vtbl = gdb.execute("info symbol {int*}%s" % int(value.address),
to_string = True)
pos1 = vtbl.find("vtable ")
if pos1 != -1:
pos1 += 11
pos2 = vtbl.find(" +", pos1)
if pos2 != -1:
return vtbl[pos1 : pos2]
except:
pass
return str(value.type)
def downcast(self, value):
try:
return value.cast(value.dynamic_type)
except:
pass
#try:
# return value.cast(self.lookupType(self.dynamicTypeName(value)))
#except:
# pass
return value
def expensiveDowncast(self, value):
try:
return value.cast(value.dynamic_type)
except:
pass
try:
return value.cast(self.lookupType(self.dynamicTypeName(value)))
except:
pass
return value
def addToCache(self, typeobj):
typename = str(typeobj)
if typename in self.typesReported:
return
self.typesReported[typename] = True
self.typesToReport[typename] = typeobj
def enumExpression(self, enumType, enumValue):
return self.qtNamespace() + "Qt::" + enumValue
def lookupType(self, typestring):
typeobj = self.typeCache.get(typestring)
#warn("LOOKUP 1: %s -> %s" % (typestring, typeobj))
if not typeobj is None:
return typeobj
if typestring == "void":
typeobj = gdb.lookup_type(typestring)
self.typeCache[typestring] = typeobj
self.typesToReport[typestring] = typeobj
return typeobj
#try:
# typeobj = gdb.parse_and_eval("{%s}&main" % typestring).typeobj
# if not typeobj is None:
# self.typeCache[typestring] = typeobj
# self.typesToReport[typestring] = typeobj
# return typeobj
#except:
# pass
# See http://sourceware.org/bugzilla/show_bug.cgi?id=13269
# gcc produces "{anonymous}", gdb "(anonymous namespace)"
# "<unnamed>" has been seen too. The only thing gdb
# understands when reading things back is "(anonymous namespace)"
if typestring.find("{anonymous}") != -1:
ts = typestring
ts = ts.replace("{anonymous}", "(anonymous namespace)")
typeobj = self.lookupType(ts)
if not typeobj is None:
self.typeCache[typestring] = typeobj
self.typesToReport[typestring] = typeobj
return typeobj
#warn(" RESULT FOR 7.2: '%s': %s" % (typestring, typeobj))
# This part should only trigger for
# gdb 7.1 for types with namespace separators.
# And anonymous namespaces.
ts = typestring
while True:
#warn("TS: '%s'" % ts)
if ts.startswith("class "):
ts = ts[6:]
elif ts.startswith("struct "):
ts = ts[7:]
elif ts.startswith("const "):
ts = ts[6:]
elif ts.startswith("volatile "):
ts = ts[9:]
elif ts.startswith("enum "):
ts = ts[5:]
elif ts.endswith(" const"):
ts = ts[:-6]
elif ts.endswith(" volatile"):
ts = ts[:-9]
elif ts.endswith("*const"):
ts = ts[:-5]
elif ts.endswith("*volatile"):
ts = ts[:-8]
else:
break
if ts.endswith('*'):
typeobj = self.lookupType(ts[0:-1])
if not typeobj is None:
typeobj = typeobj.pointer()
self.typeCache[typestring] = typeobj
self.typesToReport[typestring] = typeobj
return typeobj
try:
#warn("LOOKING UP '%s'" % ts)
typeobj = gdb.lookup_type(ts)
except RuntimeError as error:
#warn("LOOKING UP '%s': %s" % (ts, error))
# See http://sourceware.org/bugzilla/show_bug.cgi?id=11912
exp = "(class '%s'*)0" % ts
try:
typeobj = self.parseAndEvaluate(exp).type.target()
except:
# Can throw "RuntimeError: No type named class Foo."
pass
except:
#warn("LOOKING UP '%s' FAILED" % ts)
pass
if not typeobj is None:
self.typeCache[typestring] = typeobj
self.typesToReport[typestring] = typeobj
return typeobj
# This could still be None as gdb.lookup_type("char[3]") generates
# "RuntimeError: No type named char[3]"
self.typeCache[typestring] = typeobj
self.typesToReport[typestring] = typeobj
return typeobj
def stackListFrames(self, args):
def fromNativePath(str):
return str.replace('\\', '/')
limit = int(args['limit'])
if limit <= 0:
limit = 10000
options = args['options']
opts = {}
if options == "nativemixed":
opts["nativemixed"] = 1
self.prepare(opts)
self.output = []
frame = gdb.newest_frame()
i = 0
self.currentCallContext = None
while i < limit and frame:
with OutputSafer(self):
name = frame.name()
functionName = "??" if name is None else name
fileName = ""
objfile = ""
fullName = ""
pc = frame.pc()
sal = frame.find_sal()
line = -1
if sal:
line = sal.line
symtab = sal.symtab
if not symtab is None:
objfile = fromNativePath(symtab.objfile.filename)
fileName = fromNativePath(symtab.filename)
fullName = symtab.fullname()
if fullName is None:
fullName = ""
else:
fullName = fromNativePath(fullName)
if self.nativeMixed:
if self.isReportableQmlFrame(functionName):
engine = frame.read_var("engine")
h = self.extractQmlLocation(engine)
self.put(('frame={level="%s",func="%s",file="%s",'
'fullname="%s",line="%s",language="js",addr="0x%x"}')
% (i, h['functionName'], h['fileName'], h['fileName'],
h['lineNumber'], h['context']))
i += 1
frame = frame.older()
continue
if self.isInternalQmlFrame(functionName):
frame = frame.older()
self.put(('frame={level="%s",addr="0x%x",func="%s",'
'file="%s",fullname="%s",line="%s",'
'from="%s",language="c",usable="0"}') %
(i, pc, functionName, fileName, fullName, line, objfile))
i += 1
frame = frame.older()
continue
self.put(('frame={level="%s",addr="0x%x",func="%s",'
'file="%s",fullname="%s",line="%s",'
'from="%s",language="c"}') %
(i, pc, functionName, fileName, fullName, line, objfile))
frame = frame.older()
i += 1
safePrint(''.join(self.output))
def createResolvePendingBreakpointsHookBreakpoint(self, args):
class Resolver(gdb.Breakpoint):
def __init__(self, dumper, args):
self.dumper = dumper
self.args = args
spec = "qt_v4ResolvePendingBreakpointsHook"
print("Preparing hook to resolve pending QML breakpoint at %s" % args)
super(Resolver, self).\
__init__(spec, gdb.BP_BREAKPOINT, internal=True, temporary=False)
def stop(self):
bp = self.dumper.doInsertQmlBreakpoint(args)
print("Resolving QML breakpoint %s -> %s" % (args, bp))
self.enabled = False
return False
self.qmlBreakpoints.append(Resolver(self, args))
def exitGdb(self, _):
gdb.execute("quit")
def loadDumpers(self, args):
self.setupDumpers()
def reportDumpers(self, msg):
print(msg)
def profile1(self, args):
"""Internal profiling"""
import tempfile
import cProfile
tempDir = tempfile.gettempdir() + "/bbprof"
cProfile.run('theDumper.showData(%s)' % args, tempDir)
import pstats
pstats.Stats(tempDir).sort_stats('time').print_stats()
def profile2(self, args):
import timeit
print(timeit.repeat('theDumper.showData(%s)' % args,
'from __main__ import theDumper', number=10))
class CliDumper(Dumper):
def __init__(self):
Dumper.__init__(self)
self.childrenPrefix = '['
self.chidrenSuffix = '] '
self.indent = 0
self.isCli = True
def reportDumpers(self, msg):
return msg
def enterSubItem(self, item):
if not item.iname:
item.iname = "%s.%s" % (self.currentIName, item.name)
self.indent += 1
self.putNewline()
if isinstance(item.name, str):
self.output += item.name + ' = '
item.savedIName = self.currentIName
item.savedValue = self.currentValue
item.savedType = self.currentType
item.savedCurrentAddress = self.currentAddress
self.currentIName = item.iname
self.currentValue = ReportItem();
self.currentType = ReportItem();
self.currentAddress = None
def exitSubItem(self, item, exType, exValue, exTraceBack):
self.indent -= 1
#warn("CURRENT VALUE: %s: %s %s" %
# (self.currentIName, self.currentValue, self.currentType))
if not exType is None:
if self.passExceptions:
showException("SUBITEM", exType, exValue, exTraceBack)
self.putNumChild(0)
self.putSpecialValue(SpecialNotAccessibleValue)
try:
if self.currentType.value:
typeName = self.stripClassTag(self.currentType.value)
self.put('<%s> = {' % typeName)
if self.currentValue.value is None:
self.put('<not accessible>')
else:
value = self.currentValue.value
if self.currentValue.encoding is Hex2EncodedLatin1:
value = self.hexdecode(value)
elif self.currentValue.encoding is Hex2EncodedUtf8:
value = self.hexdecode(value)
elif self.currentValue.encoding is Hex4EncodedLittleEndian:
b = bytes.fromhex(value)
value = codecs.decode(b, 'utf-16')
self.put('"%s"' % value)
if self.currentValue.elided:
self.put('...')
if self.currentType.value:
self.put('}')
except:
pass
if not self.currentAddress is None:
self.put(self.currentAddress)
self.currentIName = item.savedIName
self.currentValue = item.savedValue
self.currentType = item.savedType
self.currentAddress = item.savedCurrentAddress
return True
def putNewline(self):
self.output += '\n' + ' ' * self.indent
def put(self, line):
if self.output.endswith('\n'):
self.output = self.output[0:-1]
self.output += line
def putNumChild(self, numchild):
pass
def putBaseClassName(self, name):
pass
def putOriginalAddress(self, value):
pass
def putAddressRange(self, base, step):
return True
def showData(self, args):
args['fancy'] = 1
args['passException'] = 1
args['autoderef'] = 1
name = args['varlist']
self.prepare(args)
self.output = name + ' = '
frame = gdb.selected_frame()
value = frame.read_var(name)
with TopLevelItem(self, name):
self.putItem(value)
return self.output
# Global instance.
if gdb.parameter('height') is None:
theDumper = Dumper()
else:
import codecs
theDumper = CliDumper()
######################################################################
#
# ThreadNames Command
#
#######################################################################
def threadnames(arg):
return theDumper.threadnames(int(arg))
registerCommand("threadnames", threadnames)
#######################################################################
#
# Native Mixed
#
#######################################################################
#class QmlEngineCreationTracker(gdb.Breakpoint):
# def __init__(self):
# spec = "QQmlEnginePrivate::init"
# super(QmlEngineCreationTracker, self).\
# __init__(spec, gdb.BP_BREAKPOINT, internal=True)
#
# def stop(self):
# engine = gdb.parse_and_eval("q_ptr")
# print("QML engine created: %s" % engine)
# theDumper.qmlEngines.append(engine)
# return False
#
#QmlEngineCreationTracker()
class TriggeredBreakpointHookBreakpoint(gdb.Breakpoint):
def __init__(self):
spec = "qt_v4TriggeredBreakpointHook"
super(TriggeredBreakpointHookBreakpoint, self).\
__init__(spec, gdb.BP_BREAKPOINT, internal=True)
def stop(self):
print("QML engine stopped.")
return True
TriggeredBreakpointHookBreakpoint()
| lgpl-2.1 | 4,238,433,531,481,384,400 | 34.328782 | 105 | 0.519471 | false |
ClearCorp/server-tools | external_file_location/models/task.py | 1 | 8567 | # coding: utf-8
# @ 2015 Valentin CHEMIERE @ Akretion
# © @author Mourad EL HADJ MIMOUNE <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, fields, api
import openerp
from openerp import tools
from base64 import b64encode
import os
import datetime
import logging
_logger = logging.getLogger(__name__)
try:
# We use a jinja2 sandboxed environment to render mako templates.
# Note that the rendering does not cover all the mako syntax, in particular
# arbitrary Python statements are not accepted, and not all expressions are
# allowed: only "public" attributes (not starting with '_') of objects may
# be accessed.
# This is done on purpose: it prevents incidental or malicious execution of
# Python code that may break the security of the server.
from jinja2.sandbox import SandboxedEnvironment
mako_template_env = SandboxedEnvironment(
variable_start_string="${",
variable_end_string="}",
line_statement_prefix="%",
trim_blocks=True, # do not output newline after blocks
)
mako_template_env.globals.update({
'str': str,
'datetime': datetime,
'len': len,
'abs': abs,
'min': min,
'max': max,
'sum': sum,
'filter': filter,
'reduce': reduce,
'map': map,
'round': round,
})
except ImportError:
_logger.warning("jinja2 not available, templating features will not work!")
class Task(models.Model):
_name = 'external.file.task'
_description = 'External file task'
name = fields.Char(required=True)
method_type = fields.Selection(
[('import', 'Import'), ('export', 'Export')],
required=True)
filename = fields.Char(help='File name which is imported.'
'You can use file pattern like *.txt'
'to import all txt files')
filepath = fields.Char(help='Path to imported/exported file')
location_id = fields.Many2one('external.file.location', string='Location',
required=True)
attachment_ids = fields.One2many('ir.attachment.metadata', 'task_id',
string='Attachment')
move_path = fields.Char(string='Move Path',
help='Imported File will be moved to this path')
new_name = fields.Char(string='New Name',
help='Imported File will be renamed to this name'
'Name can use mako template where obj is an '
'ir_attachement. template exemple : '
' ${obj.name}-${obj.create_date}.csv')
md5_check = fields.Boolean(help='Control file integrity after import with'
' a md5 file')
after_import = fields.Selection(selection='_get_action',
help='Action after import a file')
company_id = fields.Many2one(
'res.company', 'Company',
default=lambda self: self.env['res.company']._company_default_get(
'external.file.task'))
file_type = fields.Selection(
selection=[],
string="File Type",
help="The file type determines an import method to be used "
"to parse and transform data before their import in ERP")
active = fields.Boolean(default=True)
def _get_action(self):
return [('rename', 'Rename'),
('move', 'Move'),
('move_rename', 'Move & Rename'),
('delete', 'Delete'),
]
@api.multi
def _prepare_attachment_vals(self, datas, filename, md5_datas):
self.ensure_one()
vals = {
'name': filename,
'datas': b64encode(datas),
'datas_fname': filename,
'task_id': self.id,
'external_hash': md5_datas,
'file_type': self.file_type or False,
}
return vals
@api.model
def _template_render(self, template, record):
try:
template = mako_template_env.from_string(tools.ustr(template))
except Exception:
_logger.exception("Failed to load template %r", template)
variables = {'obj': record}
try:
render_result = template.render(variables)
except Exception:
_logger.exception(
"Failed to render template %r using values %r" %
(template, variables))
render_result = u""
if render_result == u"False":
render_result = u""
return render_result
@api.model
def run_task_scheduler(self, domain=None):
if domain is None:
domain = []
tasks = self.env['external.file.task'].search(domain)
for task in tasks:
if task.method_type == 'import':
task.run_import()
elif task.method_type == 'export':
task.run_export()
@api.multi
def run_import(self):
self.ensure_one()
protocols = self.env['external.file.location']._get_classes()
cls = protocols.get(self.location_id.protocol)[1]
attach_obj = self.env['ir.attachment.metadata']
with cls.connect(self.location_id) as conn:
md5_datas = ''
for file_name in conn.listdir(path=self.filepath,
wildcard=self.filename or '',
files_only=True):
with api.Environment.manage():
with openerp.registry(
self.env.cr.dbname).cursor() as new_cr:
new_env = api.Environment(new_cr, self.env.uid,
self.env.context)
try:
full_path = os.path.join(self.filepath, file_name)
file_data = conn.open(full_path, 'rb')
datas = file_data.read()
if self.md5_check:
md5_file = conn.open(full_path + '.md5', 'rb')
md5_datas = md5_file.read().rstrip('\r\n')
attach_vals = self._prepare_attachment_vals(
datas, file_name, md5_datas)
attachment = attach_obj.with_env(new_env).create(
attach_vals)
new_full_path = False
if self.after_import == 'rename':
new_name = self._template_render(
self.new_name, attachment)
new_full_path = os.path.join(
self.filepath, new_name)
elif self.after_import == 'move':
new_full_path = os.path.join(
self.move_path, file_name)
elif self.after_import == 'move_rename':
new_name = self._template_render(
self.new_name, attachment)
new_full_path = os.path.join(
self.move_path, new_name)
if new_full_path:
conn.rename(full_path, new_full_path)
if self.md5_check:
conn.rename(
full_path + '.md5',
new_full_path + '/md5')
if self.after_import == 'delete':
conn.remove(full_path)
if self.md5_check:
conn.remove(full_path + '.md5')
except Exception, e:
new_env.cr.rollback()
raise e
else:
new_env.cr.commit()
@api.multi
def run_export(self):
self.ensure_one()
attachment_obj = self.env['ir.attachment.metadata']
attachments = attachment_obj.search(
[('task_id', '=', self.id), ('state', '!=', 'done')])
for attachment in attachments:
attachment.run()
| agpl-3.0 | 3,746,848,047,542,242,000 | 39.40566 | 79 | 0.494747 | false |
maruina/kanedias | aws/load_config.py | 1 | 2747 | import os
import sys
from boto.pyami.config import Config
from fabric.colors import red
# Load the configuration file
if os.path.exists('config.ini'):
boto_config = Config()
boto_config.load_credential_file('config.ini')
if boto_config.items('Credentials'):
AWS_ID = boto_config.get('Credentials', 'aws_access_key_id')
AWS_KEY = boto_config.get('Credentials', 'aws_secret_access_key')
REGION = boto_config.get('Credentials', 'region')
else:
print(red('Error: credentials section is missing, abort!'))
sys.exit(1)
if boto_config.items('Config'):
DEFAULT_OS = boto_config.get('Config', 'default_os')
DEFAULT_SSH_DIR = os.path.expanduser(boto_config.get('Config', 'default_ssh_dir'))
DEFAULT_FILE_DIR = os.path.expanduser(boto_config.get('Config', 'default_file_dir'))
DEFAULT_INTERNAL_DOMAIN = boto_config.get('Config', 'default_internal_domain')
DEFAULT_PUBLIC_DOMAIN = boto_config.get('Config', 'default_public_domain')
else:
print(red('Error: config section is missing, abort!'))
sys.exit(1)
else:
print(red('Error: configuration file missing, abort!'))
sys.exit(1)
AWS_REGIONS = {
'ap-northeast-1': 'Asia Pacific (Tokyo)',
'ap-southeast-1': 'Asia Pacific (Singapore)',
'ap-southeast-2': 'Asia Pacific (Sydney)',
'eu-central-1': 'EU (Frankfurt)',
'eu-west-1': 'EU (Ireland)',
'sa-east-1': 'South America (Sao Paulo)',
'us-east-1': 'US East (N. Virginia)',
'us-west-1': 'US West (N. California)',
'us-west-2': 'US West (Oregon)'
}
AMI_LIST = {
'CentOS': {
'version': 'CentOS-6 x86_64 with updates',
'type': 'HVM',
'regions': {
'us-east-1': 'ami-c2a818aa',
'us-west-1': 'ami-57cfc412',
'us-west-2': 'ami-81d092b1',
'eu-west-1': 'ami-30ff5c47'
}
},
'Debian': {
'version': 'Debian x86_64 7.7',
'type': 'HVM',
'regions': {
'us-east-1': 'ami-5ae66932',
'us-west-1': 'ami-b12e39f4',
'us-west-2': 'ami-87367eb7',
'eu-west-1': 'ami-46cc6631'
}
},
'Ubuntu': {
'version': 'Ubuntu x86_64 12.04 LTS',
'type': 'HVM with EBS-SSD',
'regions': {
'us-east-1': 'ami-34cc7a5c',
'us-west-1': 'ami-b7515af2',
'us-west-2': 'ami-0f47053f',
'eu-west-1': 'ami-6ca1011b',
'eu-central-1': 'ami-643c0a79'
}
}
}
AMI_USER = {
'CentOS': 'root',
'Debian': 'admin',
'Ubuntu': 'ubuntu',
'Amazon': 'ec2-user'
}
ENVIRONMENTS = {
'dev': 'development',
'tst': 'test',
'sta': 'staging',
'prd': 'production'
}
| mit | -3,950,142,547,903,683,600 | 30.215909 | 92 | 0.552967 | false |
chrisxue815/leetcode_python | problems/test_0407.py | 1 | 2180 | import heapq
import unittest
class Solution:
def trapRainWater(self, heightMap):
"""
:type heightMap: List[List[int]]
:rtype: int
"""
if not heightMap:
return 0
res = 0
height = len(heightMap)
width = len(heightMap[0])
visited = [[False] * width for _ in range(height)]
q = []
for row in range(height):
visited[row][0] = True
visited[row][width - 1] = True
heapq.heappush(q, (heightMap[row][0], row, 0))
heapq.heappush(q, (heightMap[row][width - 1], row, width - 1))
for col in range(width):
visited[0][col] = True
visited[height - 1][col] = True
heapq.heappush(q, (heightMap[0][col], 0, col))
heapq.heappush(q, (heightMap[height - 1][col], height - 1, col))
dirs = ((-1, 0), (1, 0), (0, -1), (0, 1))
while q:
prev_height, prev_row, prev_col = heapq.heappop(q)
for delta_row, delta_col in dirs:
row = prev_row + delta_row
col = prev_col + delta_col
if 0 <= row < height and 0 <= col < width and not visited[row][col]:
visited[row][col] = True
h = heightMap[row][col]
water_level = max(prev_height, h)
res += water_level - h
heapq.heappush(q, (water_level, row, col))
return res
class Test(unittest.TestCase):
def test(self):
self._test([
[1, 4, 3, 1, 3, 2],
[3, 2, 1, 3, 2, 4],
[2, 3, 3, 2, 3, 1],
], 4)
self._test([
[12, 13, 1, 12],
[13, 4, 13, 12],
[13, 8, 10, 12],
[12, 13, 12, 12],
[13, 13, 13, 13],
], 14)
self._test([
[5, 5, 5, 1],
[5, 1, 1, 5],
[5, 1, 5, 5],
[5, 2, 5, 8],
], 3)
def _test(self, heights, expected):
actual = Solution().trapRainWater(heights)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
| unlicense | 5,613,371,172,959,575,000 | 27.684211 | 84 | 0.441743 | false |
eldstal/cardcinogen | card.py | 1 | 2572 | #!/bin/env python3
import unittest
import os
import sys
import util
import log
from PIL import Image
from layout import SimpleLayout, ComplexLayout
class CardTemplate:
""" Parsed version of a JSON card template """
def __init__(self, json, rootdir="."):
self.front_name = util.get_default(json, "front-image", "front.png")
self.hidden_name = util.get_default(json, "hidden-image", "hidden.png")
self.layouts = []
for j in util.get_default(json, "layouts", []):
self.type = util.get_default(j, "type", "simple")
if (self.type == "complex"):
self.layouts.append(ComplexLayout(j, rootdir))
else:
self.layouts.append(SimpleLayout(j, rootdir))
front_path = os.path.join(rootdir, self.front_name)
hidden_path = os.path.join(rootdir, self.hidden_name)
# Make sure we have valid images and they all have matching sizes
self.front = util.default_image(front_path, (372, 520))
self.hidden = util.default_image(hidden_path, self.front.size, self.front.size)
def make_card(self, textgen):
""" Generate a single card """
if (len(self.layouts) == 0):
log.log.write("Warning: No layouts specified.")
return None
face = self.front.copy()
for l in self.layouts:
overlay = l.render(face.size, textgen)
if (overlay is None):
# This layout is done generating cards.
# This happens when, eventually, textgen runs out of card texts for a given layout.
continue
# We have a card! Return it and that's that.
face.paste(overlay, mask=overlay)
return face
# None of the layouts can generate any cards. We're done.
return None
#
# Unit tests
#
class TestCardStuff(unittest.TestCase):
def test_default(self):
tmpl_default = CardTemplate({})
self.assertEqual(tmpl_default.front_name, "front.png")
self.assertEqual(tmpl_default.hidden_name, "hidden.png")
self.assertEqual(tmpl_default.labels, [])
# Override all settings
dic = {
"front-image": "card-front.jpeg",
"hidden-image": "card-hidden.jpeg",
"layout": [
{
"x": 10
},
{
"y": 20
}
]
}
tmpl = CardTemplate(dic)
self.assertEqual(tmpl.front_name, dic["front-image"])
self.assertEqual(tmpl.hidden_name, dic["hidden-image"])
self.assertEqual(len(tmpl.labels), 2)
self.assertEqual(tmpl.labels[0].x, dic["layout"][0]["x"])
self.assertEqual(tmpl.labels[1].y, dic["layout"][1]["y"])
if __name__ == '__main__':
unittest.main()
| mit | 445,096,269,937,110,460 | 26.073684 | 91 | 0.630638 | false |
dpaiton/OpenPV | pv-core/python/pvtools/writepvpfile.py | 1 | 10534 | import numpy as np
import scipy.sparse as sp
import pdb
from readpvpheader import headerPattern, extendedHeaderPattern
def checkData(data):
#Check if dictionary
if not isinstance(data, dict):
raise ValueError("Input data structure must be a dictionary with the keys \"values\" and \"time\"")
#Check for fields values and time
if not 'values' in data.keys():
raise ValueError("Input data structure missing \"values\" key");
if not 'time' in data.keys():
raise ValueError("Input data structure missing \"time\" key");
values = data["values"]
time = data["time"]
#Make sure the 2 arrays are numpy arrays or sparse matrices
if not sp.issparse(values) and not type(values).__module__ == np.__name__:
raise ValueError("Values field must be either a sparse matrix or a numpy array")
#If time is a list, convert to numpy array
if type(time) == list:
data["time"] = np.array(data["time"])
time = data["time"]
if not type(time).__module__ == np.__name__:
raise ValueError("Time field must be either a numpy array or a list")
#Check dimensions of values and time
if sp.issparse(values):
if not values.ndim == 2:
raise ValueError("Sparse values must have 2 dimensions")
else:
if not values.ndim == 4 and not values.ndim == 6:
raise ValueError("Dense values must have either 4 or 6 dimensions")
#Check that sizes of values and time matches
valuesShape = values.shape
timeShape = time.shape
if not valuesShape[0] == timeShape[0]:
raise ValueError("Values must have the same number of frames as time (" + str(valuesShape[0]) + " vs " + str(timeShape[0]) + ")")
#Values should be single floats, time should be double floats
data["values"] = data["values"].astype(np.float32)
data["time"] = data["time"].astype(np.float64)
#Dense values must be c-contiguous
if(not sp.issparse(data["values"]) and not data["values"].flags["C_CONTIGUOUS"]):
data["values"] = data["values"].copy(order='C')
def generateHeader(data, inShape):
#data["values"] can be one of 3 shapes: dense 4d mat for activity, dense 6d mat for weights
#scipy coo_sparse matrix for sparse activity
header = {}
values = data["values"]
#If sparse matrix, write as sparse format
if(sp.issparse(values)):
if(inShape == None):
raise ValueError("Sparse values must have shape input when generating header")
if len(inShape) != 3:
raise ValueError("Shape parameter must be a 3 tuple of (ny, nx, nf)")
(ny, nx, nf) = inShape
(numFrames, numFeat) = values.shape
if(not numFeat == ny*nx*nf):
raise ValueError("Shape provided does not match the data shape (" + str(ny) + "*" + str(nx) + "*" + str(nf) + " vs " + str(numFeat) + ")")
header["headersize"] = np.uint32(80)
header["numparams"] = np.uint32(20)
header["filetype"] = np.uint32(6)
header["nx"] = np.uint32(nx)
header["ny"] = np.uint32(ny)
header["nf"] = np.uint32(nf)
header["numrecords"] = np.uint32(1)
header["recordsize"] = np.uint32(0) #Not used in sparse activity
header["datasize"] = np.uint32(8) #Int/float are 4 bytes each
header["datatype"] = np.uint32(4) #Type is location-value pair
header["nxprocs"] = np.uint32(1) #No longer used
header["nyprocs"] = np.uint32(1)
header["nxGlobal"] = np.uint32(nx)
header["nyGlobal"] = np.uint32(ny)
header["kx0"] = np.uint32(0)
header["ky0"] = np.uint32(0)
header["nbatch"] = np.uint32(1)
header["nbands"] = np.uint32(numFrames)
header["time"] = np.float64(data["time"][0])
#If 4d dense matrix, write as dense format
elif(values.ndim == 4):
(numFrames, ny, nx, nf) = values.shape
header["headersize"] = np.uint32(80)
header["numparams"] = np.uint32(20)
header["filetype"] = np.uint32(4)
header["nx"] = np.uint32(nx)
header["ny"] = np.uint32(ny)
header["nf"] = np.uint32(nf)
header["numrecords"] = np.uint32(1)
header["recordsize"] = np.uint32(nx*ny*nf) #Not used in sparse activity
header["datasize"] = np.uint32(4) #floats are 4 bytes
header["datatype"] = np.uint32(3) #Type is float
header["nxprocs"] = np.uint32(1) #No longer used
header["nyprocs"] = np.uint32(1)
header["nxGlobal"] = np.uint32(nx)
header["nyGlobal"] = np.uint32(ny)
header["kx0"] = np.uint32(0)
header["ky0"] = np.uint32(0)
header["nbatch"] = np.uint32(1)
header["nbands"] = np.uint32(numFrames)
header["time"] = np.float64(data["time"][0])
#If 6d dense matrix, write as weights format
elif(values.ndim == 6):
(numFrames, numArbors, numKernels, nyp, nxp, nfp) = values.shape
header["headersize"] = np.uint32(104)
header["numparams"] = np.uint32(26)
header["filetype"] = np.uint32(5)
header["nx"] = np.uint32(1) #size not used by weights
header["ny"] = np.uint32(1)
header["nf"] = np.uint32(numKernels) #Pre nf
header["numrecords"] = np.uint32(numArbors)
#Each data for arbor is preceded by nxp(2 bytes), ny (2 bytes) and offset (4 bytes)
header["recordsize"] = np.uint32(numKernels * (8+4*nxp*nyp*nfp))
header["datasize"] = np.uint32(4) #floats are 4 bytes
header["datatype"] = np.uint32(3) #float type
header["nxprocs"] = np.uint32(1)
header["nyprocs"] = np.uint32(1)
header["nxGlobal"] = np.uint32(1)
header["nyGlobal"] = np.uint32(1)
header["kx0"] = np.uint32(0)
header["ky0"] = np.uint32(0)
header["nbatch"] = np.uint32(1)
header["nbands"] = np.uint32(numArbors) #For weights, numArbors is stored in nbands, no field for numFrames
#This field will be updated on write
header["time"] = np.float64(data["time"][0])
#Weights have extended header
header["nxp"] = np.uint32(nxp)
header["nyp"] = np.uint32(nyp)
header["nfp"] = np.uint32(nfp)
header["wMax"] = np.uint32(1) #This field will be updated on write
header["wMin"] = np.uint32(1) #This field will be updated on write
header["numpatches"] = np.uint32(numKernels)
return header
def writepvpfile(filename, data, shape=None, useExistingHeader=False):
#Check data structure
checkData(data)
if not 'header' in data.keys():
if useExistingHeader:
raise ValueError("Must specify a \"header\" field if using existing header")
#Data can either have a header field or not
#Generate header if no header field
if not useExistingHeader:
#If it doesn't exist, generate header
data["header"] = generateHeader(data, shape)
# To get ordered list of header params
if data["header"]['numparams'] == 26:
hPattern = extendedHeaderPattern
else:
hPattern = headerPattern
with open(filename, 'wb') as stream:
if data["header"]['filetype'] == 1:
print('Filetype 1 not yet supported for write pvp')
elif data["header"]['filetype'] == 2:
print('Filetype 2 not yet supported for write pvp')
elif data["header"]['filetype'] == 3:
print('Filetype 3 not yet supported for write pvp')
elif data["header"]['filetype'] == 4:
(numFrames, ny, nx, nf) = data["values"].shape
#Write out header
for headerEntry in hPattern:
stream.write(headerEntry[1](data["header"][headerEntry[0]]))
for dataFrame in range(numFrames):
stream.write(data["time"][dataFrame])
stream.write(data["values"][dataFrame, :, :, :])
elif data["header"]['filetype'] == 5:
(numFrames, numArbors, numKernels, nyp, nxp, nfp) = data["values"].shape
# Type 5's have a header in each frame
#Make a copy of header dictionary to avoid changing
#the header field
tmpHeader = data["header"].copy()
for dataFrame in range(numFrames):
#Set header fields that change from frame to frame
tmpHeader["time"] = np.float64(data["time"][dataFrame])
##wMax and wMin are int32's, whereas the max and min might not be an int
#tmpHeader["wMax"] = np.uint32(np.max(data["values"][dataFrame, :, :, :, :, :]))
#tmpHeader["wMin"] = np.uint32(np.min(data["values"][dataFrame, :, :, :, :, :]))
for headerEntry in hPattern:
stream.write(headerEntry[1](tmpHeader[headerEntry[0]]))
#Within each patch, we write out each nxp, nyp, and offset
for dataArbor in range(numArbors):
for dataKernel in range(numKernels):
stream.write(np.uint16(nxp))
stream.write(np.uint16(nyp))
stream.write(np.uint32(0)) #Offset is always 0 for kernels
stream.write(data["values"][dataFrame, dataArbor, dataKernel, :, :, :])
#Sparse values
elif data["header"]['filetype'] == 6:
(numFrames, numData) = data["values"].shape
# Copied from filetype 4
for headerEntry in hPattern:
stream.write(headerEntry[1](data["header"][headerEntry[0]]))
for dataFrame in range(numFrames):
frameVals = data["values"].getrow(dataFrame)
count = frameVals.nnz
index = frameVals.indices
value = frameVals.data
#Write time first, followed by count, followed by values
stream.write(data["time"][dataFrame])
stream.write(np.uint32(count))
for i in range(count):
stream.write(np.uint32(index[i]))
stream.write(np.float32(value[i]))
if __name__ == "__main__":
data = {}
values = np.ones((2, 10))
data["values"] = sp.coo_matrix(values)
data["time"] = range(2)
writepvpfile("test.pvp", data, shape=(2, 5, 1))
| epl-1.0 | 6,473,318,771,748,574,000 | 43.447257 | 150 | 0.57566 | false |
thinkAmi-sandbox/Bottle-sample | e.g._bbs_app/bbs.py | 1 | 1890 | import datetime
import pickle
from pathlib import Path
from bottle import Bottle, run, get, post, redirect, request, response, jinja2_template
class Message(object):
def __init__(self, title, handle, message):
self.title = title
self.handle = handle
self.message = message
self.created_at = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
# テストコードで扱えるよう、変数appにインスタンスをセット
app = Bottle()
@app.get('/')
def get_form():
# Cookieの値をUnicodeで取得するため、getunicode()メソッドを使う
# https://bottlepy.org/docs/dev/tutorial.html#introducing-formsdict
# https://bottlepy.org/docs/dev/api.html#bottle.FormsDict
# handle = request.get_cookie('handle') #=> 「ã」がセットされてしまう
handle = request.cookies.getunicode('handle', default='')
messages = read_messages()
return jinja2_template('bbs.html', handle=handle, messages=messages)
@app.post('/')
def post_form():
response.set_cookie('handle', request.forms.get('handle'))
message = Message(
# こちらもHTML上での文字化けを防ぐため、get()ではなくgetunicode()を使う
title=request.forms.getunicode('title'),
handle=request.forms.getunicode('handle'),
message=request.forms.getunicode('message'),
)
messages = read_messages()
messages.append(message)
with open('bbs.pickle', mode='wb') as f:
pickle.dump(messages, f)
redirect('/')
@app.get('/delete_cookie')
def delete_cookie():
response.delete_cookie('handle')
redirect('/')
def read_messages():
if Path('bbs.pickle').exists():
with open('bbs.pickle', mode='rb') as f:
return pickle.load(f)
return []
if __name__ == "__main__":
run(app, host="localhost", port=8080, debug=True, reloader=True) | unlicense | -4,043,259,285,766,576,000 | 27.8 | 87 | 0.653735 | false |
capitalone/cloud-custodian | tools/c7n_azure/c7n_azure/provisioning/deployment_unit.py | 1 | 1645 | import logging
from abc import ABCMeta, abstractmethod
from c7n.utils import local_session
from c7n_azure.session import Session
class DeploymentUnit(metaclass=ABCMeta):
log = logging.getLogger('custodian.azure.deployment_unit.DeploymentUnit')
def __init__(self, client):
self.type = ""
self.session = local_session(Session)
self.client = self.session.client(client)
def get(self, params):
result = self._get(params)
if result:
self.log.info('Found %s "%s".' % (self.type, params['name']))
else:
self.log.info('%s "%s" not found.' % (self.type, params['name']))
return result
def check_exists(self):
return self.get() is not None
def provision(self, params):
self.log.info('Creating %s "%s"' % (self.type, params['name']))
result = self._provision(params)
if result:
self.log.info('%s "%s" successfully created' % (self.type, params['name']))
else:
self.log.info('Failed to create %s "%s"' % (self.type, params['name']))
return result
def provision_if_not_exists(self, params):
result = self.get(params)
if result is None:
if 'id' in params.keys():
raise Exception('%s with %s id is not found' % (self.type, params['id']))
result = self.provision(params)
return result
@abstractmethod
def _get(self, params):
raise NotImplementedError()
@abstractmethod
def _provision(self, params):
raise NotImplementedError()
| apache-2.0 | -3,710,982,216,230,997,000 | 30.254902 | 89 | 0.579331 | false |
iYgnohZ/crack-geetest | geetest/geetest.py | 1 | 4035 | # -*- coding: utf-8 -*-
import time
import uuid
import StringIO
from PIL import Image
from selenium.webdriver.common.action_chains import ActionChains
class BaseGeetestCrack(object):
"""验证码破解基础类"""
def __init__(self, driver):
self.driver = driver
self.driver.maximize_window()
def input_by_id(self, text=u"中国移动", element_id="keyword_qycx"):
"""输入查询关键词
:text: Unicode, 要输入的文本
:element_id: 输入框网页元素id
"""
input_el = self.driver.find_element_by_id(element_id)
input_el.clear()
input_el.send_keys(text)
time.sleep(3.5)
def click_by_id(self, element_id="popup-submit"):
"""点击查询按钮
:element_id: 查询按钮网页元素id
"""
search_el = self.driver.find_element_by_id(element_id)
search_el.click()
time.sleep(3.5)
def calculate_slider_offset(self):
"""计算滑块偏移位置,必须在点击查询按钮之后调用
:returns: Number
"""
img1 = self.crop_captcha_image()
self.drag_and_drop(x_offset=5)
img2 = self.crop_captcha_image()
w1, h1 = img1.size
w2, h2 = img2.size
if w1 != w2 or h1 != h2:
return False
left = 0
flag = False
for i in xrange(45, w1):
for j in xrange(h1):
if not self.is_pixel_equal(img1, img2, i, j):
left = i
flag = True
break
if flag:
break
if left == 45:
left -= 2
return left
def is_pixel_equal(self, img1, img2, x, y):
pix1 = img1.load()[x, y]
pix2 = img2.load()[x, y]
if (abs(pix1[0] - pix2[0] < 60) and abs(pix1[1] - pix2[1] < 60) and abs(pix1[2] - pix2[2] < 60)):
return True
else:
return False
def crop_captcha_image(self, element_id="gt_box"):
"""截取验证码图片
:element_id: 验证码图片网页元素id
:returns: StringIO, 图片内容
"""
captcha_el = self.driver.find_element_by_class_name(element_id)
location = captcha_el.location
size = captcha_el.size
left = int(location['x'])
top = int(location['y'])
left = 1010
top = 535
# right = left + int(size['width'])
# bottom = top + int(size['height'])
right = left + 523
bottom = top + 235
print(left, top, right, bottom)
screenshot = self.driver.get_screenshot_as_png()
screenshot = Image.open(StringIO.StringIO(screenshot))
captcha = screenshot.crop((left, top, right, bottom))
captcha.save("%s.png" % uuid.uuid4().get_hex())
return captcha
def get_browser_name(self):
"""获取当前使用浏览器名称
:returns: TODO
"""
return str(self.driver).split('.')[2]
def drag_and_drop(self, x_offset=0, y_offset=0, element_class="gt_slider_knob"):
"""拖拽滑块
:x_offset: 相对滑块x坐标偏移
:y_offset: 相对滑块y坐标偏移
:element_class: 滑块网页元素CSS类名
"""
dragger = self.driver.find_element_by_class_name(element_class)
action = ActionChains(self.driver)
action.drag_and_drop_by_offset(dragger, x_offset, y_offset).perform()
# 这个延时必须有,在滑动后等待回复原状
time.sleep(8)
def move_to_element(self, element_class="gt_slider_knob"):
"""鼠标移动到网页元素上
:element: 目标网页元素
"""
time.sleep(3)
element = self.driver.find_element_by_class_name(element_class)
action = ActionChains(self.driver)
action.move_to_element(element).perform()
time.sleep(4.5)
def crack(self):
"""执行破解程序
"""
raise NotImplementedError
| mit | 1,481,177,789,686,042,400 | 25.435714 | 105 | 0.543367 | false |
j5shi/Thruster | pylibs/idlelib/IdleHistory.py | 1 | 4239 | "Implement Idle Shell history mechanism with History class"
from idlelib.configHandler import idleConf
class History:
''' Implement Idle Shell history mechanism.
store - Store source statement (called from PyShell.resetoutput).
fetch - Fetch stored statement matching prefix already entered.
history_next - Bound to <<history-next>> event (default Alt-N).
history_prev - Bound to <<history-prev>> event (default Alt-P).
'''
def __init__(self, text):
'''Initialize data attributes and bind event methods.
.text - Idle wrapper of tk Text widget, with .bell().
.history - source statements, possibly with multiple lines.
.prefix - source already entered at prompt; filters history list.
.pointer - index into history.
.cyclic - wrap around history list (or not).
'''
self.text = text
self.history = []
self.prefix = None
self.pointer = None
self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool")
text.bind("<<history-previous>>", self.history_prev)
text.bind("<<history-next>>", self.history_next)
def history_next(self, event):
"Fetch later statement; start with ealiest if cyclic."
self.fetch(reverse=False)
return "break"
def history_prev(self, event):
"Fetch earlier statement; start with most recent."
self.fetch(reverse=True)
return "break"
def fetch(self, reverse):
'''Fetch statememt and replace current line in text widget.
Set prefix and pointer as needed for successive fetches.
Reset them to None, None when returning to the start line.
Sound bell when return to start line or cannot leave a line
because cyclic is False.
'''
nhist = len(self.history)
pointer = self.pointer
prefix = self.prefix
if pointer is not None and prefix is not None:
if self.text.compare("insert", "!=", "end-1c") or \
self.text.get("iomark", "end-1c") != self.history[pointer]:
pointer = prefix = None
self.text.mark_set("insert", "end-1c") # != after cursor move
if pointer is None or prefix is None:
prefix = self.text.get("iomark", "end-1c")
if reverse:
pointer = nhist # will be decremented
else:
if self.cyclic:
pointer = -1 # will be incremented
else: # abort history_next
self.text.bell()
return
nprefix = len(prefix)
while 1:
pointer += -1 if reverse else 1
if pointer < 0 or pointer >= nhist:
self.text.bell()
if not self.cyclic and pointer < 0: # abort history_prev
return
else:
if self.text.get("iomark", "end-1c") != prefix:
self.text.delete("iomark", "end-1c")
self.text.insert("iomark", prefix)
pointer = prefix = None
break
item = self.history[pointer]
if item[:nprefix] == prefix and len(item) > nprefix:
self.text.delete("iomark", "end-1c")
self.text.insert("iomark", item)
break
self.text.see("insert")
self.text.tag_remove("sel", "1.0", "end")
self.pointer = pointer
self.prefix = prefix
def store(self, source):
"Store Shell input statement into history list."
source = source.strip()
if len(source) > 2:
# avoid duplicates
try:
self.history.remove(source)
except ValueError:
pass
self.history.append(source)
self.pointer = None
self.prefix = None
if __name__ == "__main__":
from test import test_support as support
support.use_resources = ['gui']
from unittest import main
main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False)
| gpl-2.0 | 6,686,773,657,945,853,000 | 37.990566 | 80 | 0.548714 | false |
Youwotma/splash | splash/kernel/kernel.py | 1 | 9476 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import six
import sys
import lupa
from ipykernel.kernelapp import IPKernelApp
from ipykernel.eventloops import loop_qt5
from jupyter_client.kernelspec import install_kernel_spec
from twisted.internet import defer
import splash
from splash.lua import get_version, get_main_sandboxed, get_main
from splash.browser_tab import BrowserTab
from splash.lua_runtime import SplashLuaRuntime
from splash.qtrender_lua import (
Splash, MainCoroutineRunner, StoredExceptions, Extras
)
from splash.qtutils import init_qt_app
from splash.render_options import RenderOptions
from splash import defaults
from splash.kernel.kernelbase import Kernel
from splash.utils import BinaryCapsule
from splash.kernel.completer import Completer
from splash.kernel.inspections import Inspector
from splash.kernel.errors import error_repr
import splash.server as server
def install(user=True):
""" Install IPython kernel specification """
name = 'splash-py2' if six.PY2 else 'splash-py3'
folder = os.path.join(os.path.dirname(__file__), 'kernels', name)
install_kernel_spec(folder, kernel_name="splash", user=user, replace=True)
def init_browser(network_manager_factory):
# TODO: support the same command-line options as HTTP server.
# from splash.server import start_logging
# class opts(object):
# logfile = "./kernel.log"
# start_logging(opts)
proxy_factory = None # TODO
data = {}
data['uid'] = id(data)
tab = BrowserTab(
network_manager=network_manager_factory(),
splash_proxy_factory=proxy_factory,
verbosity=2, # TODO
render_options=RenderOptions(data, defaults.MAX_TIMEOUT), # TODO: timeout
visible=True,
)
return tab
class DeferredSplashRunner(object):
def __init__(self, lua, splash, sandboxed, log=None, render_options=None):
self.lua = lua
self.splash = splash
self.sandboxed = sandboxed
if log is None:
self.log = self.splash.tab.logger.log
else:
self.log = log
self.runner = MainCoroutineRunner(
lua=self.lua,
log=self.log,
splash=splash,
sandboxed=self.sandboxed,
)
def run(self, main_coro):
"""
Run main_coro Lua coroutine, passing it a Splash
instance as an argument. Return a Deferred.
"""
d = defer.Deferred()
def return_result(result):
d.callback(result)
def return_error(err):
d.errback(err)
self.runner.start(
main_coro=main_coro,
return_result=return_result,
return_error=return_error,
)
return d
class SplashKernel(Kernel):
implementation = 'Splash'
implementation_version = splash.__version__
language = 'Lua'
language_version = get_version()
language_info = {
'name': 'Splash',
'mimetype': 'application/x-lua',
'display_name': 'Splash',
'language': 'lua',
'codemirror_mode': {
"name": "text/x-lua",
},
'file_extension': '.lua',
'pygments_lexer': 'lua',
'version': get_version(),
}
banner = "Splash kernel - write browser automation scripts interactively"
help_links = [
{
'text': "Splash Tutorial",
'url': 'http://splash.readthedocs.org/en/latest/scripting-tutorial.html'
},
{
'text': "Splash Reference",
'url': 'http://splash.readthedocs.org/en/latest/scripting-ref.html'
},
{
'text': "Programming in Lua",
'url': 'http://www.lua.org/pil/contents.html'
},
{
'text': "Lua 5.2 Manual",
'url': 'http://www.lua.org/manual/5.2/'
},
]
sandboxed = False
def __init__(self, **kwargs):
super(SplashKernel, self).__init__(**kwargs)
self.tab = init_browser(SplashKernel.network_manager_factory)
self.lua = SplashLuaRuntime(self.sandboxed, "", ())
self.exceptions = StoredExceptions()
self.splash = Splash(
lua=self.lua,
exceptions=self.exceptions,
tab=self.tab
)
self.lua.add_to_globals("splash", self.splash.get_wrapped())
self.extras = Extras(self.lua, self.exceptions)
self.extras.inject_to_globals()
self.runner = DeferredSplashRunner(self.lua, self.splash, self.sandboxed) #, self.log_msg)
self.completer = Completer(self.lua)
self.inspector = Inspector(self.lua)
#
# try:
# sys.stdout.write = self._print
# sys.stderr.write = self._print
# except:
# pass # Can't change stdout
def send_execute_reply(self, stream, ident, parent, md, reply_content):
def done(result):
reply, result, ct = result
if result:
data = {
'text/plain': result if isinstance(result, six.text_type) else str(result),
}
if isinstance(result, BinaryCapsule):
if result.content_type in {'image/png', 'image/jpeg'}:
data[result.content_type] = result.as_b64()
self._publish_execute_result(parent, data, {}, self.execution_count)
super(SplashKernel, self).send_execute_reply(stream, ident, parent, md, reply)
assert isinstance(reply_content, defer.Deferred)
reply_content.addCallback(done)
def do_execute(self, code, silent, store_history=True, user_expressions=None,
allow_stdin=False):
def success(res):
result, content_type, headers, status_code = res
reply = {
'status': 'ok',
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
return reply, result, content_type or 'text/plain'
def error(failure):
text = "<unknown error>"
try:
failure.raiseException()
except Exception as e:
text = error_repr(e)
reply = {
'status': 'error',
'execution_count': self.execution_count,
'ename': '',
'evalue': text,
'traceback': []
}
return reply, text, 'text/plain'
try:
try:
# XXX: this ugly formatting is important for exception
# line numbers to be displayed properly!
lua_source = 'local repr = require("repr"); function main(splash) return repr(%s) end' % code
main_coro = self._get_main(lua_source)
except lupa.LuaSyntaxError:
try:
lines = code.splitlines(False)
lua_source = '''local repr = require("repr"); function main(splash) %s
return repr(%s)
end
''' % ("\n".join(lines[:-1]), lines[-1])
main_coro = self._get_main(lua_source)
except lupa.LuaSyntaxError:
lua_source = "function main(splash) %s end" % code
main_coro = self._get_main(lua_source)
except (lupa.LuaSyntaxError, lupa.LuaError) as e:
d = defer.Deferred()
d.addCallbacks(success, error)
d.errback(e)
return d
except Exception:
d = defer.Deferred()
d.addCallbacks(success, error)
d.errback()
return d
d = self.runner.run(main_coro)
d.addCallbacks(success, error)
return d
def do_complete(self, code, cursor_pos):
return self.completer.complete(code, cursor_pos)
def do_inspect(self, code, cursor_pos, detail_level=0):
return self.inspector.help(code, cursor_pos, detail_level)
def _publish_execute_result(self, parent, data, metadata, execution_count):
msg = {
u'data': data,
u'metadata': metadata,
u'execution_count': execution_count
}
self.session.send(self.iopub_socket, u'execute_result', msg,
parent=parent, ident=self._topic('execute_result')
)
def log_msg(self, text, min_level=2):
self._print(text + "\n")
def _print(self, message):
stream_content = {'name': 'stdout', 'text': message, 'metadata': dict()}
self.log.debug('Write: %s' % message)
self.send_response(self.iopub_socket, 'stream', stream_content)
def _get_main(self, lua_source):
if self.sandboxed:
main, env = get_main_sandboxed(self.lua, lua_source)
else:
main, env = get_main(self.lua, lua_source)
return self.lua.create_coroutine(main)
def server_factory(network_manager_factory, verbosity, **kwargs):
init_qt_app(verbose=verbosity >= 5)
SplashKernel.network_manager_factory = network_manager_factory
kernel = IPKernelApp.instance(kernel_class=SplashKernel)
kernel.initialize()
kernel.kernel.eventloop = loop_qt5
kernel.start()
def start():
splash_args = os.environ.get('SPLASH_ARGS', '').split()
server.main(jupyter=True, argv=splash_args, server_factory=server_factory)
| bsd-3-clause | 2,472,990,847,262,979,600 | 32.249123 | 109 | 0.577353 | false |
cmclaughlin/cabot | setup.py | 1 | 1818 | #!/usr/bin/env python
from setuptools import setup, find_packages
from os import environ as env
# pull in active plugins
plugins = env['CABOT_PLUGINS_ENABLED'].split(',') if 'CABOT_PLUGINS_ENABLED' in env else ["cabot_alert_hipchat", "cabot_alert_twilio", "cabot_alert_email"]
setup(
name='cabot',
version='0.0.1-dev',
description="Self-hosted, easily-deployable monitoring and alerts service"
" - like a lightweight PagerDuty",
long_description=open('README.md').read(),
author="Arachnys",
author_email='[email protected]',
url='http://cabotapp.com',
license='MIT',
install_requires=[
'Django==1.6.8',
'Markdown==2.5',
'PyJWT==0.1.2',
'South==1.0',
'amqp==1.4.9',
'anyjson==0.3.3',
'argparse==1.2.1',
'billiard==3.3.0.23',
'celery==3.1.23',
'distribute==0.7.3',
'dj-database-url==0.2.2',
'django-appconf==0.6',
'django-celery==3.1.1',
'django-celery-with-redis==3.0',
'django-compressor==1.4',
'django-filter==0.7',
'django-jsonify==0.2.1',
'django-polymorphic==0.5.6',
'django-redis==1.4.5',
'django-smtp-ssl==1.0',
'djangorestframework==2.4.2',
'gunicorn==18.0',
'gevent==1.0.1',
'hiredis==0.1.1',
'httplib2==0.7.7',
'icalendar==3.2',
'kombu==3.0.34',
'mock==1.0.1',
'psycogreen==1.0',
'psycopg2==2.5.1',
'pytz==2014.10',
'redis==2.9.0',
'requests==2.9.1',
'six==1.5.1',
'twilio==3.4.1',
'wsgiref==0.1.2',
'python-dateutil==2.1',
'django-auth-ldap==1.2.6',
] + plugins,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| mit | -5,933,584,139,439,844,000 | 28.803279 | 155 | 0.528603 | false |
smARTLab-liv/smartlabatwork-release | slaw_smach/src/slaw_smach/slaw_smach.py | 1 | 11996 | #!/usr/bin/env python
import rospy
from ArmStates import *
from MoveStates import *
from ObjectDetectState import *
from DecisionStates import *
from std_srvs.srv import Empty, EmptyResponse
from std_msgs.msg import Bool
## TODO after Eindhoven: Add failsafe if hole not detected
## add states if object too far or too close to gripper
class Smach():
def __init__(self):
rospy.init_node('slaw_smach')
self.sm = smach.StateMachine(outcomes=['end'])
with self.sm:
### MOVE STATE WITH RECOVER
smach.StateMachine.add('MoveToNext', MoveStateUserData(), transitions = {'reached':'DecideAfterMove', 'not_reached': 'RecoverMove', 'failed': 'DeleteCurGoal'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('RecoverMove', RecoverState(), transitions = {'done':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out': 'pose'})
### END MOVE STATE WITH RECOVER
##Decision state after Move:
smach.StateMachine.add('DecideAfterMove', DecideAfterMoveState(),transitions = {'BNT': 'ScanMatcher_BNT', 'Pickup':'ScanMatcher_Pickup', 'Place':'ScanMatcher_Place', 'End':'end'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
######BNT SPECIFIC
smach.StateMachine.add('ScanMatcher_BNT', ScanMatcher(), transitions = {'reached':'SleepState', 'not_reached':'ScanMatcher_BNT', 'failed':'SleepState'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('SleepState', SleepState(), transitions = {'done':'DeleteCurGoal'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
########END BNT
##### DELETE CURRENT GOAL OR GET NEXT GOAL
smach.StateMachine.add('DeleteCurGoal', DeleteCurrentGoalState(), transitions = {'done':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('GetNextGoal', GetNextGoalState(), transitions = {'done':'MoveToNext'}, remapping = {'pose_in':'pose','object_in':'object', 'pose_out':'pose'})
##### END DELETE CURRENT GOAL OR GET NEXT GOAL
### PICKUP
smach.StateMachine.add('ScanMatcher_Pickup', ScanMatcher(), transitions = {'reached':'DecideBeforePreGrip', 'not_reached':'ScanMatcher_Pickup', 'failed':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
#smach.StateMachine.add('ScanMatcher_Pickup', ScanMatcher(), transitions = {'reached':'ScanMatcher_Align', 'not_reached':'ScanMatcher_Pickup', 'failed':'MoveToNext'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
#smach.StateMachine.add('ScanMatcher_Align', AlignState(), transitions = {'done':'DecideBeforePreGrip'})
##
#Either CBT Pickup or normal Pickup
smach.StateMachine.add('DecideBeforePreGrip', DecideBeforePreGripState(),transitions = {'CBT': 'PreGrip_CBT', 'Pickup':'PreGrip'}, remapping = {'pose_in':'pose', 'pose_out':'pose', 'dist_out':'dist'})
######CBT STUFF
smach.StateMachine.add('PreGrip_CBT', PreGripCBT(), transitions = {'success':'ScanForObjectCBT', 'failed':'TuckArmPreGripCBT'},remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('TuckArmPreGripCBT', TuckArm(), transitions = {'success':'PreGrip_CBT', 'not_reached':'TuckArmPreGripCBT','failed':'end'})
smach.StateMachine.add('ScanForObjectCBT', ScanForObjectCBT(), transitions = {'success':'GripCBT'})
smach.StateMachine.add('GripCBT', GripCBT(), transitions = {'end':'DeleteCurGoal'})
#### END CBT Stuff
### NORMAL PICKUP
smach.StateMachine.add('PreGrip', PreGrip(), transitions = {'success':'Scan', 'failed':'TuckArmPreGrip'},remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('TuckArmPreGrip', TuckArm(), transitions = {'success':'PreGrip', 'not_reached':'TuckArmPreGrip','failed':'end'})
#scan
smach.StateMachine.add("Scan", ScanForObjectsState(), transitions = {'success': 'Grip', 'failed':'TuckArmMoveNext','nothing_found': 'TuckArmDelete'}, remapping = {'pose_in':'pose', 'pose_out':'pose', 'object_out':'object', 'point_out':'point', 'dist_in':'dist','dist_out':'dist'})
#if misdetection try again
smach.StateMachine.add('TuckArmMoveNext', TuckArm(), transitions = {'success':'MoveToNext', 'not_reached':'TuckArmMoveNext','failed':'end'})
#if nothing found try next Goal
smach.StateMachine.add('TuckArmDelete', TuckArm(), transitions = {'success':'DeleteCurGoal', 'not_reached':'TuckArmDelete','failed':'end'})
#Grip Object
smach.StateMachine.add("Grip", Grip(), transitions = {'success':'DecideRV20', 'too_far':'ScanMatcher_Pickup', 'failed':'TuckArmFailGrip', 'failed_after_grip':'TuckArmGrip'}, remapping = {'pose_in':'pose', 'object_in':'object', 'point_in':'point','pose_out':'pose', 'object_out':'object', 'point_out':'point'})
#Decide RV20:
smach.StateMachine.add('DecideRV20', DecideRV20State(),transitions = {'RV20': 'TuckForDriveAfterGrip', 'Normal':'TuckForDriveAfterGrip'}, remapping = {'object_in':'object', 'object_out':'object'})
#smach.StateMachine.add('DecideRV20', DecideRV20State(),transitions = {'RV20': 'RV20CheckArm', 'Normal':'TuckForDriveAfterGrip'}, remapping = {'object_in':'object', 'object_out':'object', 'pose_out':'pose'})
####CHECK if RV20 which one
smach.StateMachine.add('RV20CheckArm', RV20CheckState(), transitions = {'success':'RV20CheckVision','failed':'TuckArmPreCheckArm'}, remapping = {'pose_in':'pose'})
smach.StateMachine.add('TuckArmPreCheckArm', TuckArm(), transitions = {'success':'RV20CheckArm', 'not_reached':'TuckArmPreCheckArm','failed':'end'})
#smach.StateMachine.add('RV20CheckVision', RV20CheckVision(), transitions = {'success':'RV20RotateTake','failed':'RV20RotateReplace'}, remapping = {'pose_in':'pose', 'object_in':'object', 'pose_out':'pose'})
smach.StateMachine.add('RV20CheckVision', RV20CheckVision(), transitions = {'success':'RV20RotateTake','failed':'RV20Trash'}, remapping = {'pose_in':'pose', 'object_in':'object', 'pose_out':'pose'})
smach.StateMachine.add('RV20Trash', RV20Trash(), transitions = {'done':'PreGrip'})
#smach.StateMachine.add('RV20RotateReplace', RV20ReplaceObjectRotate(), transitions = {'success':'RV20Replace','failed':'RV20Replace'}, remapping = {'pose_in':'pose'})
smach.StateMachine.add('RV20RotateTake', RV20ReplaceObjectRotate(), transitions = {'success':'TuckForDriveAfterGrip','failed':'TuckForDriveAfterGrip'}, remapping = {'pose_in':'pose'})
#smach.StateMachine.add('RV20Replace', FinePlace(), transitions = {'success':'RV20ReplaceUp', 'failed':'TuckArmFailPlace_RV20', 'too_far':'RV20Replace','failed_after_place':'TuckArmFailPlace_RV20'}, remapping = {'object_in':'object','pose_in':'pose', 'pose_out':'pose', 'point_in':'point'})
#smach.StateMachine.add('TuckArmFailPlace_RV20', TuckArm(), transitions = {'success':'RV20Replace', 'not_reached':'TuckArmFailPlace_RV20','failed':'end'})
#smach.StateMachine.add('RV20ReplaceUp', RV20ReplaceUp(), transitions = {'done':'MoveBack10'})
#MoveBack 10 to skip object and resume scanning
#smach.StateMachine.add('MoveBack10', MoveBack(0.10), transitions = {'done':'Remove10'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
#smach.StateMachine.add('Remove10', RemoveDist(0.10), transitions = {'done':'PreGrip'}, remapping = {'dist_in':'dist', 'dist_out':'dist'})
#Tuck and Move away
##Tuck For Drive
smach.StateMachine.add('TuckForDriveAfterGrip', TuckForDrive(), transitions={'done':'MoveAwayFromPlatform'}, remapping = {'pose_in':'pose'} )
smach.StateMachine.add('TuckArmGrip', TuckArm(), transitions = {'success':'MoveAwayFromPlatform', 'not_reached':'TuckArmGrip','failed':'end'})
smach.StateMachine.add('TuckArmFailGrip', TuckArm(), transitions = {'success':'MoveToNext', 'not_reached':'TuckArmFailGrip','failed':'end'})
smach.StateMachine.add('MoveAwayFromPlatform', RecoverState(), transitions = {'done':'MoveToPlace'})
### Move to Place location
smach.StateMachine.add('MoveToPlace', MoveStateUserData(), transitions = {'reached': 'ScanMatcher_Place', 'not_reached': 'MoveAwayFromPlatform', 'failed': 'MoveAwayFromPlatform'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('ScanMatcher_Place', ScanMatcher(), transitions = {'reached':'DecideBeforePlace', 'not_reached':'ScanMatcher_Place', 'failed':'DecideBeforePlace'}, remapping = {'pose_in':'pose', 'suffix_in':'suffix', 'pose_out':'pose'})
#### Decide either Normal place or PPT place
smach.StateMachine.add('DecideBeforePlace', DecideBeforePlaceState(),transitions = {'PPT': 'PreScanHole', 'Normal':'MoveBack'}, remapping = {'object_in':'object', 'object_out':'object'})
####PPT
smach.StateMachine.add('PreScanHole', PreGrip(), transitions = {'success':'ScanHole', 'failed':'TuckArmPreScan'},remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('TuckArmPreScan', TuckArm(), transitions = {'success':'PreScanHole', 'not_reached':'TuckArmPreScan','failed':'end'})
smach.StateMachine.add("ScanHole", ScanForHoles(), transitions = {'success': 'FinePlace', 'failed':'ScanMatcher_Place','nothing_found': 'ScanMatcher_Place'}, remapping = {'pose_in':'pose', 'pose_out':'pose', 'object_in':'object', 'object_out':'object', 'point_out':'point'})
smach.StateMachine.add('FinePlace', FinePlace(), transitions = {'success':'TuckForDriveAfterPlace', 'failed':'TuckArmFailPlace_PPT', 'too_far':'ScanMatcher_Place','failed_after_place':'TuckArmFailPlace_PPT'}, remapping = {'object_in':'object','pose_in':'pose', 'pose_out':'pose', 'point_in':'point'})
smach.StateMachine.add('TuckArmFailPlace_PPT', TuckArm(), transitions = {'success':'FinePlace', 'not_reached':'TuckArmFailPlace_PPT','failed':'end'})
### END PPT
##NORMAL PLACE
smach.StateMachine.add('MoveBack', MoveBack(0.25), transitions = {'done':'Place'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
smach.StateMachine.add('Place', Place(), transitions = {'success':'TuckForDriveAfterPlace', 'failed':'TuckArmFailPlace'}, remapping = {'pose_in':'pose', 'pose_out':'pose'})
##Tuck For Drive
smach.StateMachine.add('TuckForDriveAfterPlace', TuckForDrive(), transitions={'done':'MoveAwayFromPlatformAfterPlace'}, remapping = {'pose_in':'pose'} )
smach.StateMachine.add('TuckArmFailPlace', TuckArm(), transitions = {'success':'Place', 'not_reached':'TuckArmFailPlace','failed':'end'})
smach.StateMachine.add('MoveAwayFromPlatformAfterPlace', RecoverState(), transitions = {'done':'GetNextGoal'})
# Create and start the introspection server
self.sis = smach_ros.IntrospectionServer('server_name', self.sm, '/SLAW_SMACH')
self.sis.start()
self.serv = rospy.Service("/start_SMACH", Empty, self.go)
def go(self, req):
#sm.userdata.pose = "D2"
print "Starting SMACH"
locations = rospy.get_param('locations')
self.sm.userdata.pose = locations[0]
#self.sm.userdata.suffix = "_grip"
self.sm.execute()
return EmptyResponse()
def stop(self):
self.sis.stop()
if __name__ == '__main__':
smach = Smach()
rospy.spin()
smach.stop()
| mit | -3,893,603,533,906,580,000 | 68.744186 | 321 | 0.635128 | false |
mtrdesign/pylogwatch | pylogwatch/logwlib.py | 1 | 5917 | # Python 2.5 compatibility
from __future__ import with_statement
# Python version
import sys
if sys.version_info < (2, 5):
raise "Required python 2.5 or greater"
import os, sqlite3, itertools, time
from datetime import datetime
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
proj_path = lambda x: os.path.abspath(os.path.join(PROJECT_DIR,x))
# Check if we are bundled together with raven, and add our dir to the pythonpath if we are
if os.path.exists(proj_path( 'raven')):
sys.path.append(PROJECT_DIR)
from raven import Client
def item_import(name):
d = name.rfind(".")
classname = name[d+1:]
m = __import__(name[:d], globals(), locals(), [classname])
return getattr(m, classname)
class PyLog (object):
def __init__ (self, filenames, dbname = 'logw.db', filetable = 'file_cursor', eventtable = 'events'):
self._filetable = filetable
self._eventtable = eventtable
self.conn = self.init_db(dbname)
self.curs = self.conn.cursor()
self.fnames = filenames
def init_db (self, dbname):
"""Set up the DB"""
conn = sqlite3.connect (dbname)
curs = conn.cursor()
sql = 'create table if not exists file_cursor (filename TEXT PRIMARY KEY, inode INTEGER, lastbyte INTEGER, updated INTEGER)'
curs.execute (sql)
sql = 'create table if not exists events (event TEXT PRIMARY KEY, args TEXT, updated INTEGER)'
curs.execute (sql)
conn.commit()
return conn
def readlines (self, f, lastpos = 0):
"""Read full lines from the file object f starting from lastpos"""
self.save_fileinfo (f.name, os.stat(f.name)[1], lastpos)
f.seek(lastpos)
result = []
for line in f:
# handle lines that are not yet finished (no \n)
curpos = f.tell()
if not line.endswith('\n'):
f.seek(curpos)
raise StopIteration
yield line
def get_fileinfo (self, fname):
self.curs.execute ('SELECT filename, inode, lastbyte from file_cursor where filename=?', [fname,])
result = self.curs.fetchone()
if result and len(result)==3:
f, inode, lastbyte = result
return inode,lastbyte
else:
return None,0
def save_fileinfo (self, fname, inode, lastbyte):
self.curs.execute ("REPLACE into file_cursor (filename, inode, lastbyte, updated) \
values (?,?,?,datetime())", [fname,inode, lastbyte ])
self.conn.commit()
return
def update_bytes (self,fname, lastbyte):
"""
Only updates the lastbyte property of a file, without touching the inode.
Meant for calling after each line is processed
"""
def save_fileinfo (self, fname, inode, lastbyte):
self.curs.execute ("UPDATE into file_cursor set lastbyte=? where filename=?",\
[fname,inode, lastbyte ])
self.conn.commit()
return
def process_lines (self, fname, lines):
"""Dummy line processor - should be overridden"""
raise NotImplementedError
def open_rotated_version(self, fname):
sufxs = ['.1','.1.gz','.0']
for sufx in sufxs:
newname = fname + sufx
if not os.path.exists (newname):
continue
try:
f = open(newname)
return f
except:
continue
def run (self):
for fn in self.fnames:
if not os.path.exists (fn):
continue
newlines = []
rotated = None
lastinode, lastbyte = self.get_fileinfo (fn)
if lastbyte and not lastinode == os.stat(fn)[1]:
# handle rotated files
rotated = self.open_rotated_version(fn)
if rotated:
newlines = self.readlines (rotated, lastbyte)
lastbyte = 0
self.process_lines (fn, rotated, newlines)
try:
f = open(fn)
except:
continue
self.process_lines (fn, f, self.readlines (f, lastbyte))
lastbyte = f.tell()
lastinode = os.stat(fn)[1]
f.close()
self.save_fileinfo (fn, lastinode, lastbyte)
if rotated:
rotated.close()
class PyLogConf (PyLog):
def __init__ (self, conf):
"""
Initialize object based on the provided configuration
"""
self.conf = conf
self.client = Client (conf.RAVEN['dsn'])
self.formatters = {}
for k,v in self.conf.FILE_FORMATTERS.iteritems():
if isinstance(v,str):
raise ValueError ('Please use a list or a tuple for the file formatters values')
self.formatters[k] = [item_import(i)() for i in v]
dbname = os.path.join(os.path.dirname(conf.__file__),'pylogwatch.db')
return super(PyLogConf, self).__init__ (self.conf.FILE_FORMATTERS.keys(), dbname = dbname)
def process_lines (self, fname, fileobject, lines):
"""Main workhorse. Called with the filename that is being logged and an iterable of lines"""
for line in lines:
paramdict = {}
data = {'event_type':'Message', 'message': line.replace('%','%%'), 'data' :{'logger':fname}}
for fobj in self.formatters[fname]:
fobj.format_line(line, data, paramdict)
if not data.pop('_do_not_send', False): # Skip lines that have the '_do_not_send' key
if paramdict:
data['params'] = tuple([paramdict[i] for i in sorted(paramdict.keys())])
if self.conf.DEBUG:
print data
self.client.capture(**data)
self.update_bytes(fname, fileobject.tell())
| gpl-3.0 | -7,848,765,276,239,736,000 | 36.449367 | 132 | 0.564982 | false |
googlemaps/google-maps-services-python | googlemaps/convert.py | 1 | 10197 | #
# Copyright 2014 Google Inc. All rights reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Converts Python types to string representations suitable for Maps API server.
For example:
sydney = {
"lat" : -33.8674869,
"lng" : 151.2069902
}
convert.latlng(sydney)
# '-33.8674869,151.2069902'
"""
def format_float(arg):
"""Formats a float value to be as short as possible.
Truncates float to 8 decimal places and trims extraneous
trailing zeros and period to give API args the best
possible chance of fitting within 2000 char URL length
restrictions.
For example:
format_float(40) -> "40"
format_float(40.0) -> "40"
format_float(40.1) -> "40.1"
format_float(40.001) -> "40.001"
format_float(40.0010) -> "40.001"
format_float(40.000000001) -> "40"
format_float(40.000000009) -> "40.00000001"
:param arg: The lat or lng float.
:type arg: float
:rtype: string
"""
return ("%.8f" % float(arg)).rstrip("0").rstrip(".")
def latlng(arg):
"""Converts a lat/lon pair to a comma-separated string.
For example:
sydney = {
"lat" : -33.8674869,
"lng" : 151.2069902
}
convert.latlng(sydney)
# '-33.8674869,151.2069902'
For convenience, also accepts lat/lon pair as a string, in
which case it's returned unchanged.
:param arg: The lat/lon pair.
:type arg: string or dict or list or tuple
"""
if is_string(arg):
return arg
normalized = normalize_lat_lng(arg)
return "%s,%s" % (format_float(normalized[0]), format_float(normalized[1]))
def normalize_lat_lng(arg):
"""Take the various lat/lng representations and return a tuple.
Accepts various representations:
1) dict with two entries - "lat" and "lng"
2) list or tuple - e.g. (-33, 151) or [-33, 151]
:param arg: The lat/lng pair.
:type arg: dict or list or tuple
:rtype: tuple (lat, lng)
"""
if isinstance(arg, dict):
if "lat" in arg and "lng" in arg:
return arg["lat"], arg["lng"]
if "latitude" in arg and "longitude" in arg:
return arg["latitude"], arg["longitude"]
# List or tuple.
if _is_list(arg):
return arg[0], arg[1]
raise TypeError(
"Expected a lat/lng dict or tuple, "
"but got %s" % type(arg).__name__)
def location_list(arg):
"""Joins a list of locations into a pipe separated string, handling
the various formats supported for lat/lng values.
For example:
p = [{"lat" : -33.867486, "lng" : 151.206990}, "Sydney"]
convert.waypoint(p)
# '-33.867486,151.206990|Sydney'
:param arg: The lat/lng list.
:type arg: list
:rtype: string
"""
if isinstance(arg, tuple):
# Handle the single-tuple lat/lng case.
return latlng(arg)
else:
return "|".join([latlng(location) for location in as_list(arg)])
def join_list(sep, arg):
"""If arg is list-like, then joins it with sep.
:param sep: Separator string.
:type sep: string
:param arg: Value to coerce into a list.
:type arg: string or list of strings
:rtype: string
"""
return sep.join(as_list(arg))
def as_list(arg):
"""Coerces arg into a list. If arg is already list-like, returns arg.
Otherwise, returns a one-element list containing arg.
:rtype: list
"""
if _is_list(arg):
return arg
return [arg]
def _is_list(arg):
"""Checks if arg is list-like. This excludes strings and dicts."""
if isinstance(arg, dict):
return False
if isinstance(arg, str): # Python 3-only, as str has __iter__
return False
return _has_method(arg, "__getitem__") if not _has_method(arg, "strip") else _has_method(arg, "__iter__")
def is_string(val):
"""Determines whether the passed value is a string, safe for 2/3."""
try:
basestring
except NameError:
return isinstance(val, str)
return isinstance(val, basestring)
def time(arg):
"""Converts the value into a unix time (seconds since unix epoch).
For example:
convert.time(datetime.now())
# '1409810596'
:param arg: The time.
:type arg: datetime.datetime or int
"""
# handle datetime instances.
if _has_method(arg, "timestamp"):
arg = arg.timestamp()
if isinstance(arg, float):
arg = int(arg)
return str(arg)
def _has_method(arg, method):
"""Returns true if the given object has a method with the given name.
:param arg: the object
:param method: the method name
:type method: string
:rtype: bool
"""
return hasattr(arg, method) and callable(getattr(arg, method))
def components(arg):
"""Converts a dict of components to the format expected by the Google Maps
server.
For example:
c = {"country": "US", "postal_code": "94043"}
convert.components(c)
# 'country:US|postal_code:94043'
:param arg: The component filter.
:type arg: dict
:rtype: basestring
"""
# Components may have multiple values per type, here we
# expand them into individual key/value items, eg:
# {"country": ["US", "AU"], "foo": 1} -> "country:AU", "country:US", "foo:1"
def expand(arg):
for k, v in arg.items():
for item in as_list(v):
yield "%s:%s" % (k, item)
if isinstance(arg, dict):
return "|".join(sorted(expand(arg)))
raise TypeError(
"Expected a dict for components, "
"but got %s" % type(arg).__name__)
def bounds(arg):
"""Converts a lat/lon bounds to a comma- and pipe-separated string.
Accepts two representations:
1) string: pipe-separated pair of comma-separated lat/lon pairs.
2) dict with two entries - "southwest" and "northeast". See convert.latlng
for information on how these can be represented.
For example:
sydney_bounds = {
"northeast" : {
"lat" : -33.4245981,
"lng" : 151.3426361
},
"southwest" : {
"lat" : -34.1692489,
"lng" : 150.502229
}
}
convert.bounds(sydney_bounds)
# '-34.169249,150.502229|-33.424598,151.342636'
:param arg: The bounds.
:type arg: dict
"""
if is_string(arg) and arg.count("|") == 1 and arg.count(",") == 2:
return arg
elif isinstance(arg, dict):
if "southwest" in arg and "northeast" in arg:
return "%s|%s" % (latlng(arg["southwest"]),
latlng(arg["northeast"]))
raise TypeError(
"Expected a bounds (southwest/northeast) dict, "
"but got %s" % type(arg).__name__)
def size(arg):
if isinstance(arg, int):
return "%sx%s" % (arg, arg)
elif _is_list(arg):
return "%sx%s" % (arg[0], arg[1])
raise TypeError(
"Expected a size int or list, "
"but got %s" % type(arg).__name__)
def decode_polyline(polyline):
"""Decodes a Polyline string into a list of lat/lng dicts.
See the developer docs for a detailed description of this encoding:
https://developers.google.com/maps/documentation/utilities/polylinealgorithm
:param polyline: An encoded polyline
:type polyline: string
:rtype: list of dicts with lat/lng keys
"""
points = []
index = lat = lng = 0
while index < len(polyline):
result = 1
shift = 0
while True:
b = ord(polyline[index]) - 63 - 1
index += 1
result += b << shift
shift += 5
if b < 0x1f:
break
lat += (~result >> 1) if (result & 1) != 0 else (result >> 1)
result = 1
shift = 0
while True:
b = ord(polyline[index]) - 63 - 1
index += 1
result += b << shift
shift += 5
if b < 0x1f:
break
lng += ~(result >> 1) if (result & 1) != 0 else (result >> 1)
points.append({"lat": lat * 1e-5, "lng": lng * 1e-5})
return points
def encode_polyline(points):
"""Encodes a list of points into a polyline string.
See the developer docs for a detailed description of this encoding:
https://developers.google.com/maps/documentation/utilities/polylinealgorithm
:param points: a list of lat/lng pairs
:type points: list of dicts or tuples
:rtype: string
"""
last_lat = last_lng = 0
result = ""
for point in points:
ll = normalize_lat_lng(point)
lat = int(round(ll[0] * 1e5))
lng = int(round(ll[1] * 1e5))
d_lat = lat - last_lat
d_lng = lng - last_lng
for v in [d_lat, d_lng]:
v = ~(v << 1) if v < 0 else v << 1
while v >= 0x20:
result += (chr((0x20 | (v & 0x1f)) + 63))
v >>= 5
result += (chr(v + 63))
last_lat = lat
last_lng = lng
return result
def shortest_path(locations):
"""Returns the shortest representation of the given locations.
The Elevations API limits requests to 2000 characters, and accepts
multiple locations either as pipe-delimited lat/lng values, or
an encoded polyline, so we determine which is shortest and use it.
:param locations: The lat/lng list.
:type locations: list
:rtype: string
"""
if isinstance(locations, tuple):
# Handle the single-tuple lat/lng case.
locations = [locations]
encoded = "enc:%s" % encode_polyline(locations)
unencoded = location_list(locations)
if len(encoded) < len(unencoded):
return encoded
else:
return unencoded
| apache-2.0 | 7,628,449,255,522,140,000 | 25.417098 | 109 | 0.5939 | false |
mo-ki/pypgpwords | pypgpwords.py | 1 | 13257 | #!/usr/bin/python3
"""Define a PGPWords object inherited from bytearray.
Adding initialization via hex-, or pgp-word-string,
adding .hex() method and
overriding __str__
Mainline code:
Convert pgp words to hex strings and vice versa.
Example:
$ pypgpwords.py DEAD 1337
tactics perceptive Aztec consensus
or
$ pypgpwords.py absurd bodyguard baboon unicorn
0116 14EC
[email protected]
"""
from __future__ import print_function
import sys
SEPARATOR = " "
EVEN = ("aardvark",
"absurd",
"accrue",
"acme",
"adrift",
"adult",
"afflict",
"ahead",
"aimless",
"Algol",
"allow",
"alone",
"ammo",
"ancient",
"apple",
"artist",
"assume",
"Athens",
"atlas",
"Aztec",
"baboon",
"backfield",
"backward",
"banjo",
"beaming",
"bedlamp",
"beehive",
"beeswax",
"befriend",
"Belfast",
"berserk",
"billiard",
"bison",
"blackjack",
"blockade",
"blowtorch",
"bluebird",
"bombast",
"bookshelf",
"brackish",
"breadline",
"breakup",
"brickyard",
"briefcase",
"Burbank",
"button",
"buzzard",
"cement",
"chairlift",
"chatter",
"checkup",
"chisel",
"choking",
"chopper",
"Christmas",
"clamshell",
"classic",
"classroom",
"cleanup",
"clockwork",
"cobra",
"commence",
"concert",
"cowbell",
"crackdown",
"cranky",
"crowfoot",
"crucial",
"crumpled",
"crusade",
"cubic",
"dashboard",
"deadbolt",
"deckhand",
"dogsled",
"dragnet",
"drainage",
"dreadful",
"drifter",
"dropper",
"drumbeat",
"drunken",
"Dupont",
"dwelling",
"eating",
"edict",
"egghead",
"eightball",
"endorse",
"endow",
"enlist",
"erase",
"escape",
"exceed",
"eyeglass",
"eyetooth",
"facial",
"fallout",
"flagpole",
"flatfoot",
"flytrap",
"fracture",
"framework",
"freedom",
"frighten",
"gazelle",
"Geiger",
"glitter",
"glucose",
"goggles",
"goldfish",
"gremlin",
"guidance",
"hamlet",
"highchair",
"hockey",
"indoors",
"indulge",
"inverse",
"involve",
"island",
"jawbone",
"keyboard",
"kickoff",
"kiwi",
"klaxon",
"locale",
"lockup",
"merit",
"minnow",
"miser",
"Mohawk",
"mural",
"music",
"necklace",
"Neptune",
"newborn",
"nightbird",
"Oakland",
"obtuse",
"offload",
"optic",
"orca",
"payday",
"peachy",
"pheasant",
"physique",
"playhouse",
"Pluto",
"preclude",
"prefer",
"preshrunk",
"printer",
"prowler",
"pupil",
"puppy",
"python",
"quadrant",
"quiver",
"quota",
"ragtime",
"ratchet",
"rebirth",
"reform",
"regain",
"reindeer",
"rematch",
"repay",
"retouch",
"revenge",
"reward",
"rhythm",
"ribcage",
"ringbolt",
"robust",
"rocker",
"ruffled",
"sailboat",
"sawdust",
"scallion",
"scenic",
"scorecard",
"Scotland",
"seabird",
"select",
"sentence",
"shadow",
"shamrock",
"showgirl",
"skullcap",
"skydive",
"slingshot",
"slowdown",
"snapline",
"snapshot",
"snowcap",
"snowslide",
"solo",
"southward",
"soybean",
"spaniel",
"spearhead",
"spellbind",
"spheroid",
"spigot",
"spindle",
"spyglass",
"stagehand",
"stagnate",
"stairway",
"standard",
"stapler",
"steamship",
"sterling",
"stockman",
"stopwatch",
"stormy",
"sugar",
"surmount",
"suspense",
"sweatband",
"swelter",
"tactics",
"talon",
"tapeworm",
"tempest",
"tiger",
"tissue",
"tonic",
"topmost",
"tracker",
"transit",
"trauma",
"treadmill",
"Trojan",
"trouble",
"tumor",
"tunnel",
"tycoon",
"uncut",
"unearth",
"unwind",
"uproot",
"upset",
"upshot",
"vapor",
"village",
"virus",
"Vulcan",
"waffle",
"wallet",
"watchword",
"wayside",
"willow",
"woodlark",
"Zulu")
ODD = ("adroitness",
"adviser",
"aftermath",
"aggregate",
"alkali",
"almighty",
"amulet",
"amusement",
"antenna",
"applicant",
"Apollo",
"armistice",
"article",
"asteroid",
"Atlantic",
"atmosphere",
"autopsy",
"Babylon",
"backwater",
"barbecue",
"belowground",
"bifocals",
"bodyguard",
"bookseller",
"borderline",
"bottomless",
"Bradbury",
"bravado",
"Brazilian",
"breakaway",
"Burlington",
"businessman",
"butterfat",
"Camelot",
"candidate",
"cannonball",
"Capricorn",
"caravan",
"caretaker",
"celebrate",
"cellulose",
"certify",
"chambermaid",
"Cherokee",
"Chicago",
"clergyman",
"coherence",
"combustion",
"commando",
"company",
"component",
"concurrent",
"confidence",
"conformist",
"congregate",
"consensus",
"consulting",
"corporate",
"corrosion",
"councilman",
"crossover",
"crucifix",
"cumbersome",
"customer",
"Dakota",
"decadence",
"December",
"decimal",
"designing",
"detector",
"detergent",
"determine",
"dictator",
"dinosaur",
"direction",
"disable",
"disbelief",
"disruptive",
"distortion",
"document",
"embezzle",
"enchanting",
"enrollment",
"enterprise",
"equation",
"equipment",
"escapade",
"Eskimo",
"everyday",
"examine",
"existence",
"exodus",
"fascinate",
"filament",
"finicky",
"forever",
"fortitude",
"frequency",
"gadgetry",
"Galveston",
"getaway",
"glossary",
"gossamer",
"graduate",
"gravity",
"guitarist",
"hamburger",
"Hamilton",
"handiwork",
"hazardous",
"headwaters",
"hemisphere",
"hesitate",
"hideaway",
"holiness",
"hurricane",
"hydraulic",
"impartial",
"impetus",
"inception",
"indigo",
"inertia",
"infancy",
"inferno",
"informant",
"insincere",
"insurgent",
"integrate",
"intention",
"inventive",
"Istanbul",
"Jamaica",
"Jupiter",
"leprosy",
"letterhead",
"liberty",
"maritime",
"matchmaker",
"maverick",
"Medusa",
"megaton",
"microscope",
"microwave",
"midsummer",
"millionaire",
"miracle",
"misnomer",
"molasses",
"molecule",
"Montana",
"monument",
"mosquito",
"narrative",
"nebula",
"newsletter",
"Norwegian",
"October",
"Ohio",
"onlooker",
"opulent",
"Orlando",
"outfielder",
"Pacific",
"pandemic",
"Pandora",
"paperweight",
"paragon",
"paragraph",
"paramount",
"passenger",
"pedigree",
"Pegasus",
"penetrate",
"perceptive",
"performance",
"pharmacy",
"phonetic",
"photograph",
"pioneer",
"pocketful",
"politeness",
"positive",
"potato",
"processor",
"provincial",
"proximate",
"puberty",
"publisher",
"pyramid",
"quantity",
"racketeer",
"rebellion",
"recipe",
"recover",
"repellent",
"replica",
"reproduce",
"resistor",
"responsive",
"retraction",
"retrieval",
"retrospect",
"revenue",
"revival",
"revolver",
"sandalwood",
"sardonic",
"Saturday",
"savagery",
"scavenger",
"sensation",
"sociable",
"souvenir",
"specialist",
"speculate",
"stethoscope",
"stupendous",
"supportive",
"surrender",
"suspicious",
"sympathy",
"tambourine",
"telephone",
"therapist",
"tobacco",
"tolerance",
"tomorrow",
"torpedo",
"tradition",
"travesty",
"trombonist",
"truncated",
"typewriter",
"ultimate",
"undaunted",
"underfoot",
"unicorn",
"unify",
"universe",
"unravel",
"upcoming",
"vacancy",
"vagabond",
"vertigo",
"Virginia",
"visitor",
"vocalist",
"voyager",
"warranty",
"Waterloo",
"whimsical",
"Wichita",
"Wilmington",
"Wyoming",
"yesteryear",
"Yucatan")
class InvalidWordError(ValueError):
pass
def words_to_int(word_iter, odd=False):
"""Generator yielding integer indices for each word in word_iter.
:param word_iter: iterable of pgp words
:type word_iter: iterable
:param odd: start with odd word list
:type odd: boolean
:return: integer
:rtype: generator
"""
for word in word_iter:
try:
yield (ODD if odd else EVEN).index(word)
except ValueError:
msg = "not in {} word list: '{}'"
raise InvalidWordError(msg.format("odd" if odd else "even", word))
# toggle odd/even
odd = not odd
def ints_to_word(int_iter, odd=False):
"""Generator yielding PGP words for each byte/int in int_iter.
:param int_iter: iterable of integers between 0 and 255
:type int_iter: iterable
:param odd: start with odd word list
:type odd: boolean
:return: pgp words
:rtype: generator
"""
for idx in int_iter:
yield (ODD if odd else EVEN)[idx]
# toggle odd/even
odd = not odd
class PGPWords(bytearray):
"""Inherits from bytearray. Add .hex() method and overwrite __str__"""
def __init__(self, source, **kwargs):
"""Initiate bytearray. Added initialization styles:
E.g.:
p = PGPWords("absurd bodyguard baboon", encoding="pgp-words")
p = PGPWords("DEAD 1337", encoding="hex")
"""
enc = kwargs.get("encoding")
if enc == "pgp-words":
kwargs.pop("encoding")
source = words_to_int(source.split(SEPARATOR), **kwargs)
kwargs = {}
elif enc == "hex" or source.startswith('0x'):
kwargs.pop("encoding")
tmp = source.replace("0x", '').replace(' ', '')
source = (int(tmp[i:i+2], 16) for i in range(0, len(tmp), 2))
super(PGPWords, self).__init__(source, **kwargs)
def __str__(self):
"""Return corresponding pgp words, separated by SEPARATOR."""
gen = ints_to_word(self)
return SEPARATOR.join(gen)
def hex(self):
"""Return corresponding hex representation as string"""
tmp = ''.join([hex(i).split('x')[1].zfill(2) for i in self])
gen = (tmp[i:i+4].upper() for i in range(0, len(tmp), 4))
return SEPARATOR.join(gen)
def main():
"""Try to convert arguments in either direction."""
if len(sys.argv) < 2 or sys.argv[1].startswith('-'):
print(__doc__.split("Mainline code:\n\n")[1], file=sys.stderr)
exit(-1)
arg_str = ' '.join(sys.argv[1:])
try:
result = PGPWords(arg_str, encoding="hex")
print(result)
except ValueError as err1:
try:
result = PGPWords(arg_str, encoding="pgp-words").hex()
print(result)
except InvalidWordError as err2:
print(err1, file=sys.stderr)
print(err2, file=sys.stderr)
exit(-1)
if __name__ == "__main__":
main()
| mit | -7,093,815,584,150,289,000 | 19.746479 | 78 | 0.45063 | false |
petezybrick/iote2e | iote2e-pyclient/src/iote2epyclient/test/testhatsensors.py | 1 | 3137 | # Copyright 2016, 2017 Peter Zybrick and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
testhatsensors - Test RPi HAT sensors
:author: Pete Zybrick
:contact: [email protected]
:version: 1.0.0
"""
import sys
import datetime
from sense_hat import SenseHat
from time import sleep
def main(conf_file):
import logging.config
logging.config.fileConfig( conf_file, disable_existing_loggers=False)
logger = logging.getLogger(__name__)
logger.info('Starting')
sense = SenseHat()
#showMessages(sense)
#showLetters(sense)
#showPixels(sense)
showTemperature(sense)
#showJoystickPoll(sense)
#showJoystickWait(sense)
sense.clear()
logger.info('Done')
def showJoystickPoll(sense):
while True:
for event in sense.stick.get_events():
print("The joystick was {} {}".format(event.action,event.direction))
sleep(.25)
print('poll')
def showJoystickWait(sense):
while True:
event = sense.stick.wait_for_event()
if "middle" == event.direction:
if "pressed" == event.action:
print("1");
elif "released" == event.action:
print("0");
#print("The joystick was {} {}".format(event.action,event.direction))
def showTemperature(sense):
for i in range(0,5):
t = round(sense.get_temperature(),2)
print(t)
sense.show_message("{}".format(t), scroll_speed=.1)
sleep(1)
def showMessages(sense):
sense.show_message("Watson, come here. I need you.", scroll_speed=.025);
def showLetters(sense):
sense.show_letter("R", text_colour=[255,0,0],back_colour=[0,0,0]);
sleep(1.5)
sense.show_letter("G", text_colour=[0,255,0],back_colour=[0,0,0]);
sleep(1.5)
sense.show_letter("B", text_colour=[0,0,255],back_colour=[0,0,0]);
sleep(1.5)
def showPixels(sense):
b = [0,0,255]
y = [255,255,0]
e = [0,0,0]
image = [
b,b,e,b,b,e,y,y,
b,b,e,b,b,e,y,y,
e,e,e,e,e,e,e,e,
b,b,e,b,b,e,b,b,
b,b,e,b,b,e,b,b,
e,e,e,e,e,e,e,e,
b,b,e,b,b,e,b,b,
b,b,e,b,b,e,b,b
]
sense.set_pixels(image)
angles = [0,90,180,270,0,90,180,270]
for angle in angles:
sense.set_rotation(angle)
sleep(2)
if __name__ == '__main__':
sys.argv = ['testhatsensors.py', '/home/pete/iote2epyclient/log-configs/client_consoleonly.conf']
if( len(sys.argv) < 2 ):
print('Invalid format, execution cancelled')
print('Correct format: python <consoleConfigFile.conf>')
sys.exit(8)
main(sys.argv[1])
| apache-2.0 | 3,881,030,705,390,971,000 | 27.008929 | 101 | 0.620019 | false |
tensorflow/ngraph-bridge | test/python/test_fusedConv2D.py | 1 | 5531 | # ==============================================================================
# Copyright 2018-2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""nGraph TensorFlow bridge fusedConv2D tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import platform
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import nn_impl
from tensorflow.python.ops import array_ops
from common import NgraphTest
from tensorflow.python.framework import dtypes
import numpy as np
class TestFusedConv2D(NgraphTest):
INPUT_SIZES = [3, 1, 6, 2]
FILTER_SIZES = [1, 1, 2, 2]
BIAS_SIZES = [2]
def get_relu_op(self, relutype):
return {
'relu': nn_ops.relu,
'relu6': nn_ops.relu6,
'': (lambda x: x)
}[relutype]
@pytest.mark.parametrize(("relutype",), (
('relu',),
('relu6',),
('',),
))
@pytest.mark.skipif(platform.system() == 'Darwin', reason='Only for Linux')
def test_fusedconv2d_bias_relu(self, relutype):
inp_values = np.random.rand(*self.INPUT_SIZES)
filt_values = np.random.rand(*self.FILTER_SIZES)
bias_values = np.random.rand(*self.BIAS_SIZES)
def run_test(sess):
inp = array_ops.placeholder(dtypes.float32)
filt = array_ops.placeholder(dtypes.float32)
bias = array_ops.placeholder(dtypes.float32)
relu_op = self.get_relu_op(relutype)
return sess.run(
relu_op(
nn_ops.bias_add(
nn_ops.conv2d(
inp, filt, strides=[1, 1, 1, 1], padding="SAME"),
bias)), {
inp: inp_values,
filt: filt_values,
bias: bias_values,
})
assert np.allclose(
self.without_ngraph(run_test), self.with_ngraph(run_test))
@pytest.mark.parametrize(("relutype",), (
('relu',),
('relu6',),
('',),
))
@pytest.mark.skipif(platform.system() == 'Darwin', reason='Only for Linux')
def test_fusedconv2d_batchnorm(self, relutype):
inp_values = np.random.rand(*self.INPUT_SIZES)
filt_values = np.random.rand(*self.FILTER_SIZES)
scale_values = np.random.rand(*self.BIAS_SIZES)
offset_values = np.random.rand(*self.BIAS_SIZES)
mean_values = np.random.rand(*self.BIAS_SIZES)
variance_values = np.random.rand(*self.BIAS_SIZES)
def run_test(sess):
inp = array_ops.placeholder(dtypes.float32)
filt = array_ops.placeholder(dtypes.float32)
scale = array_ops.placeholder(dtypes.float32)
offset = array_ops.placeholder(dtypes.float32)
mean = array_ops.placeholder(dtypes.float32)
variance = array_ops.placeholder(dtypes.float32)
relu_op = self.get_relu_op(relutype)
bn, _, _ = nn_impl.fused_batch_norm(
nn_ops.conv2d(inp, filt, strides=[1, 1, 1, 1], padding="SAME"),
scale,
offset,
mean,
variance,
epsilon=0.02,
is_training=False)
return sess.run(
relu_op(bn), {
inp: inp_values,
filt: filt_values,
scale: scale_values,
offset: offset_values,
mean: mean_values,
variance: variance_values,
})
assert np.allclose(
self.without_ngraph(run_test),
self.with_ngraph(run_test),
rtol=0,
atol=5e-5)
@pytest.mark.skipif(platform.system() == 'Darwin', reason='Only for Linux')
def test_fusedconv2d_squeeze_bias(self):
inp_values = np.random.rand(*self.INPUT_SIZES)
filt_values = np.random.rand(*self.FILTER_SIZES)
bias_values = np.random.rand(*self.BIAS_SIZES)
squeeze_dim = [1]
def run_test(sess):
inp = array_ops.placeholder(dtypes.float32)
filt = array_ops.placeholder(dtypes.float32)
bias = array_ops.placeholder(dtypes.float32)
return sess.run(
nn_ops.bias_add(
array_ops.squeeze(
nn_ops.conv2d(
inp, filt, strides=[1, 1, 1, 1], padding="SAME"),
squeeze_dim), bias), {
inp: inp_values,
filt: filt_values,
bias: bias_values,
})
assert np.allclose(
self.without_ngraph(run_test), self.with_ngraph(run_test))
| apache-2.0 | -2,680,302,436,829,461,500 | 36.371622 | 80 | 0.540047 | false |
Goyatuzo/Challenges | HackerRank/Algorithms/Sorting/Insertion Sort Part 1/insertion_sort_p1.py | 1 | 1113 | def insertion_sort(lst):
"""Instead of just inserting the value where it should be at,
it shifts the entire array until the location is found. It prints
out all the intermediate steps, but the final step is actually just
returned, so the output must be manually printed.
:param lst: The list of values to be sorted by insertion."""
# The value to be inserted.
to_insert = lst[-1]
n = len(lst)
# Remove the element to be added and replace with last element.
del lst[-1]
lst.append(lst[-1])
print(" ".join(map(str, lst)))
for i in range(n - 2, -1, -1):
# If it's at the beginning of the list, just insert it.
if i <= 0:
lst.insert(0, to_insert)
del lst[1]
break
# If it's in the middle of the list.
elif lst[i - 1] <= to_insert and lst[i] >= to_insert:
lst.insert(i, to_insert)
del lst[i + 1]
break
else:
lst.insert(i, lst[i - 1])
del lst[i + 1]
print(" ".join(map(str, lst)))
return " ".join(map(str, lst))
| mit | 3,846,452,538,363,552,000 | 29.916667 | 71 | 0.562444 | false |
Loisel/tmr3 | tmr.py | 1 | 15096 | #!/usr/bin/python
"""
A module to calculate the current, the conductance and the TMR from
a set of rate arrays.
The rate arrays are supposed to be stored in a h5 file in the job directory.
The result is stored in a h5 file. The name of the dataset contains all
parameters. They are also stored as attributes in the dataset.
The conductance in the two lead configurations (parallel/anti-parallel)
are stored in arrays in the dataset.
Usage:
./tmr.py <jobname>
"""
import numpy as np
from numpy import linalg
import time
import sys
import getopt
import h5py
import os
# We are picky about possible floating point overflows
# to avoid calculating NaNs
np.seterr(divide="raise")
np.seterr(invalid="raise")
# A helper module to calculate the populations.
import pop
# The configuration module
import cfg
# path to the dat directory
datpath = "dat/"
# name of the temporary file where the rates are stored
ratefile = "running_calc.h5"
# name of the h5 file to store the conductance for the two configuration
# and the configuraion parameters.
hdffile = "simdata_new.h5"
def save_hdf5(fname,G_P,G_AP):
"""
Store the conductance and the configuration to the h5 file.
Args:
fname: filename of the h5 file
G_P: the conductance for leads with parallel magnetization
G_AP: the conductance for leads with anti-parallel magnetization
"""
print "Shape of GP {}".format(G_P.shape)
fileh = h5py.File(fname,"a")
# Note that the selection of parameters to construct the name of the
# dataset should be chosen such that this string is unique!
# That is, it should contain all running parameters.
dset_name = "G={}_kbT={}_Ec={}_E0={}_Pol={}_PolOrb={}_SO={}_tau={}_DS={}_B_P={}_B_AP={}_B_ORB_P={}_B_ORB_AP={}_W_e={}_W_0={}".format(cfg.conf['G_scale'],cfg.conf['kBT'],cfg.conf['E_C'],cfg.conf['E_0'],cfg.conf['Pol'],cfg.conf['OrbPol'],cfg.conf['SO'],cfg.conf['tau_r'],cfg.conf['D_S_factor'],cfg.conf['B_P'],cfg.conf['B_AP'],cfg.conf['B_ORB_P'],cfg.conf['B_ORB_AP'],cfg.conf['W_E'],cfg.conf['W_0'])
try:
# we create the dataset
dset = fileh.create_dataset(dset_name,data=np.vstack((G_P,G_AP)))
# and store the config attributes
dset.attrs['alpha'] = cfg.conf['ALPHA']
dset.attrs['temperature'] = cfg.conf['kBT']
dset.attrs['coupling'] = cfg.conf['G_scale']
dset.attrs['electron_number'] = cfg.conf['N_0']
dset.attrs['charging_energy'] = cfg.conf['E_C']
dset.attrs['level_spacing'] = cfg.conf['E_0']
dset.attrs['polarization_spin'] = cfg.conf['Pol']
dset.attrs['polarization_orbit'] = cfg.conf['OrbPol']
dset.attrs['spinorbit'] = cfg.conf['SO']
dset.attrs['stonershift'] = cfg.conf['D_S_factor']
dset.attrs['tau_r'] = cfg.conf['tau_r']
dset.attrs['vg_min'] = cfg.conf['V_g_min']
dset.attrs['vg_max'] = cfg.conf['V_g_max']
dset.attrs['b_p'] = cfg.conf['B_P']
dset.attrs['b_ap'] = cfg.conf['B_AP']
dset.attrs['b_orb_p'] = cfg.conf['B_ORB_P']
dset.attrs['b_orb_ap'] = cfg.conf['B_ORB_AP']
dset.attrs['w_0'] = cfg.conf['W_0']
dset.attrs['w_e'] = cfg.conf['W_E']
dset.attrs['timestamp'] = time.time()
except KeyError:
# If the choice was not unique we complain but continue.
print "Dataset exists."
fileh.close()
def eval_DENKER(GM,GP,configuration):
"""
Evaluate the density matrix kernel using the in- and out-tunneling rates.
Args:
GM,GP: numpy arrays containing in- and out-tunneling rates
in the order of cfg.TLIST.
configuration: integer determining parallel (0) or anti-parallel(1)
configuration
Returns:
the density matrix as a square 2-d numpy array that is NP**2 in size,
where NP is the number of states in the groundstatespace.
"""
# we get a view on the transition list and, for simplicity, its transpose
TLIST = cfg.TLIST[configuration]
TLIST_T = np.transpose(TLIST)
# from all transitions we extract all groundstates in the statespace
# this is probably a complicated way to do it
PLIST = list(set(TLIST_T[0]).union(TLIST_T[1]))
# ... and sort it by index
PLIST.sort()
# the number of groundstates
NP = len(PLIST)
# let's create an empty density matrix
ME = np.zeros((NP,NP))
# we create a version of the transition list that does not contain
# the indices in terms of the energy array (see cfg.py), but
# in terms of the number in the state list (plist)
# (the transition list can then be used to denote non-zero matrix elements)
TMP = np.copy(TLIST)
for idx,val in enumerate(PLIST):
TMP[TLIST == val] = idx
# We calculate diagonal elements of the density matrix:
# TLIST_T[1] == num selects the correct in-tunneling rates for the
# state with label num
# have a look at numpy.where to understand this line
for idx,num in enumerate(PLIST):
ME[idx,idx] = -np.sum(np.where(TLIST_T[1] == num,GP,0.)) - np.sum(np.where(TLIST_T[0] == num,GM,0.))
# for the off diagonal elements we can directly use the generated TMP
# transition list
for k,tup in enumerate(TMP):
ME[tup[0],tup[1]] = GP[k]
ME[tup[1],tup[0]] = GM[k]
# print "tup: {} and matrix element {}".format(tup,ME[tuple(tup)])
return ME
def eval_CURKER(GM,GP,configuration):
"""
Evaluate the current kernel using the in- and out-tunneling rates.
Args:
GM,GP: numpy arrays containing in- and out-tunneling rates
in the order of cfg.TLIST.
configuration: integer determining parallel (0) or anti-parallel(1)
configuration
Returns:
the current kernel as a 1-d numpy array.
"""
# We get a view on the transition list and its transpose
TLIST = cfg.TLIST[configuration]
TLIST_T = np.transpose(TLIST)
# ... and extract the list of groundstates (see also eval_DENKER)
PLIST = list(set(TLIST_T[0]).union(TLIST_T[1]))
PLIST.sort()
# this determines the size of the statespace
NP = len(PLIST)
CUR = np.zeros(NP)
# Note that the current kernel can be calculated by summing the diagonal elements
# of the density matrix with opposite sign
# compare eval_DENKER
for idx,num in enumerate(PLIST):
CUR[idx] = np.sum(np.where(TLIST_T[1] == num,GP,0.)) - np.sum(np.where(TLIST_T[0] == num,GM,0.))
return CUR
def current(GP,GM,POP,configuration):
"""
Calculate the current using the rates and populations.
Args:
GP, GM: np-arrays containing in- and out-tunneling rates.
POP: np-array for the populations
configuration: integer determining parallel (0) or anti-parallel(1)
configuration
Returns:
current as a float.
"""
# We calculate the current kernel
CURKER = eval_CURKER(GM,GP,configuration)
# and vector-multiply it with the population vector
I = -np.sum(cfg.conf["ELE"]*np.dot( CURKER, POP))
return I
def eval_tmr(fname,plotname,pop):
"""
Calculates the TMR by evaluating conductance through
parallel and anti-parallel polarized contacts.
Args:
fname: the h5 file to load the rates from.
plotname: A name for the pdf output to produce.
pop: If True, we plot the populations, too.
"""
# We prepare the current and conductance vectors for different
# values of gate and bias voltage
C_p = np.zeros((cfg.conf['NV'],cfg.conf['NVb']))
C_ap = np.zeros((cfg.conf['NV'],cfg.conf['NVb']))
G_p = np.zeros((cfg.conf['NV'],cfg.conf['NVb']-1))
G_ap = np.zeros((cfg.conf['NV'],cfg.conf['NVb']-1))
dVb = cfg.conf['Vb_range'][1]- cfg.conf['Vb_range'][0]
# the population vectors, for all values of gate and bias
POP_p = np.zeros((cfg.conf['NVb'],cfg.conf['NV'],cfg.N_GS[0]))
POP_ap = np.zeros((cfg.conf['NVb'],cfg.conf['NV'],cfg.N_GS[1]))
# We iterate over two bias values first
for nV,Vb in enumerate(cfg.conf["Vb_range"]):
# now the rates are loaded from the h5 file
# note that the label of the specific rate arrays are fixed
with h5py.File(fname) as file:
GP0_p = np.array(file['par_P0_V{}'.format(Vb)])
GP0_ap = np.array(file['apa_P0_V{}'.format(Vb)])
GP1_p = np.array(file['par_P1_V{}'.format(Vb)])
GP1_ap = np.array(file['apa_P1_V{}'.format(Vb)])
GM0_p = np.array(file['par_M0_V{}'.format(Vb)])
GM0_ap = np.array(file['apa_M0_V{}'.format(Vb)])
GM1_p = np.array(file['par_M1_V{}'.format(Vb)])
GM1_ap = np.array(file['apa_M1_V{}'.format(Vb)])
# for the density kernel, we sum all rates over both leads
DENKER_p = np.array([eval_DENKER(GM0_p[n]+GM1_p[n],GP0_p[n]+GP1_p[n],0)for n in range(cfg.conf["NV"])])
DENKER_ap = np.array([eval_DENKER(GM0_ap[n]+GM1_ap[n],GP0_ap[n]+GP1_ap[n],1)for n in range(cfg.conf["NV"])])
# the populations are calculated from the density kernel by an asymptotic
# approximation scheme
POP_ap[nV] = np.array([pop.asymptotic_ssp(DENKER_ap[n]) for n in range(cfg.conf["NV"])])
POP_p[nV] = np.array([pop.asymptotic_ssp(DENKER_p[n]) for n in range(cfg.conf["NV"])])
# note that the current is calculated from the rates in one of the leads only
C_p[:,nV] = np.array([ current(GP0_p[n],GM0_p[n],POP_p[nV,n],0) for n in np.arange(cfg.conf["NV"]) ])
C_ap[:,nV] = np.array([ current(GP0_ap[n],GM0_ap[n],POP_ap[nV,n],1) for n in np.arange(cfg.conf["NV"]) ])
# the numerical derivative gives the conductance
G_p = np.diff(C_p).flatten()/dVb
G_ap = np.diff(C_ap).flatten()/dVb
# we save the conductance traces to a h5 file specified as a global variable
# hdffile in the path datpath
# It is possible that the dataset already exists. In this case, we issue a warning.
try:
save_hdf5("{}{}".format(datpath,hdffile),G_p,G_ap)
except RuntimeError:
print "Unable to save to {}, maybe there is already a dataset with similar parameters...".format(hdffile)
# the tmr and conductance graphs are plotted to a pdf file for review.
plot_tmr_pdf(G_p,G_ap,plotname)
# if the pop flag is set, we also plot the population for one bias value
if pop:
plot_population([POP_p[0],POP_ap[0]],os.path.splitext(plotname)[0]+"_POP.pdf")
def plot_tmr_pdf(C_p,C_ap,fname):
"""
A helper routine to plot the conductance and TMR to a pdf file in the datpath.
Args:
C_p, C_ap: the parallel and anti-parallel conductance.
fname: the filename to plot to
"""
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
# we plot the conductance graph on top, p and ap with different colors
Axes1 = plt.subplot(2,1,1)
Axes1.set_xticklabels([])
plt.ylabel("Conductance (e^2/h)")
plt.title("Conductance at zero bias")
# parallel is plotted in red, and anti-parallel as blue dashed line
plt.plot( cfg.conf["V_g"],C_p,'r',cfg.conf["V_g"], C_ap, 'b--')
# on the second panel, the TMR is plotted
Axes2 = plt.subplot(2,1,2)
plt.xlabel("gate voltage (V)")
plt.ylabel("TMR")
plt.title("TMR")
plt.ylim((-0.3,1.5))
TMR = np.zeros(cfg.conf["NV"])
for i in range(cfg.conf["NV"]):
try:
TMR[i] = C_p[i]/C_ap[i]-1.
except ZeroDivisionError:
print "Zero Division, returning null."
TMR[i] = 0.
plt.plot( cfg.conf["V_g"], TMR)
plt.savefig(fname, bbox_inches='tight')
def plot_population(POP, fname):
"""
Calculates and plots selected populations of the quantum dot
with gate voltage. The edge states N=-1 and 5 are neglected.
Args:
POP: a list with the two population vectors
for parallel and anti-parallel configurations
fname: the filename to plot to
"""
import matplotlib.pyplot as plt
NV = cfg.conf["NV"]
print "Calculating populations..."
# We plot the populations for both configurations
# the parallel populations on top
# the anti-parallel on bottom
Ax = [plt.subplot(2,1,1),plt.subplot(2,1,2)]
cm = plt.get_cmap('gist_rainbow')
PopPlots = [1,4,8,12,17,18]
NP = len(PopPlots)
for gamidx in range(2):
TLIST = cfg.TLIST[gamidx]
TLIST_T = np.transpose(TLIST)
PLIST = list(set(TLIST_T[0]).union(TLIST_T[1]))
PLIST.sort()
# we cycle through the linecolors to distinguish the different
# groundstates
Ax[gamidx].set_color_cycle([cm(1.*k/NP) for k in range(NP)])
for i in PopPlots:
color = cm(1.*i/NP)
LABEL = "P_{}".format(cfg.int_to_state(PLIST[i]))
Ax[gamidx].plot( cfg.conf["V_g"], POP[gamidx][:,i],label=LABEL)
lines =Ax[gamidx].get_lines()
labels = [l.get_label() for l in lines]
leg = plt.figlegend(lines,labels,loc='upper right')
plt.savefig(fname)
plt.show()
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def main(argv=None):
"""
Interface routine to call the tmr module.
Example:
./tmr.py <jobname>
In principle, there were routines to plot rates, populations,
conductances etc. but apart from the population plotting,
none of the use cases was needed anymore.
"""
POP = False
# The default config file is called cnt.conf
cfile = "cnt.conf"
rlist = [0.,]
if argv is None:
argv = sys.argv
try:
try:
opts, args = getopt.getopt(argv[1:], "hc:P", ["help","config=","pop"])
except getopt.error, msg:
raise Usage(msg)
for o,a in opts:
if o in ('-h','--help'):
usage()
exit()
elif o in ('-c','--config'):
cfile = a
elif o in ('-P','--pop'):
POP = True
else:
raise Usage('Invalid argument.')
# we parse the config and initialize it
cfg.parse_conf("dat/{0}/{1}".format(args[0],cfile))
cfg.init()
h5file = "{}{}/{}".format(datpath,args[0],ratefile)
pdffile = "{}{}.pdf".format(datpath,args[0])
print "Try to open {}".format(h5file)
eval_tmr(h5file,pdffile,POP)
except Usage, err:
print >>sys.stderr, err.msg
print >>sys.stderr, "for help use --help"
return 2
def usage():
print "This is a tool to process rate files.\n\
\n\
usage: tmr.py [-hP] [--pop] jobname\n\
\n\
--pop or -P: Plot the populations.\n\
\n\
jobname: The name of the directory for the rate files.\n\
\n\
The script searches for files dat/jobname/running_calc.h5\n\
and dat/jobname/cnt.conf"
if __name__ == "__main__":
sys.exit(main())
| gpl-3.0 | 38,346,425,765,791,370 | 32.105263 | 402 | 0.610029 | false |
ict-felix/stack | vt_manager_kvm/src/python/vt_manager_kvm/controller/dispatchers/ui/GUIdispatcher.py | 1 | 17272 | from django.core.urlresolvers import reverse
from django.forms.models import modelformset_factory
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.views.generic import simple
from django.views.generic import list_detail, simple
from django.views.generic.create_update import apply_extra_context
from vt_manager_kvm.models import *
from vt_manager_kvm.communication.utils.XmlHelper import XmlHelper
import uuid, time, logging
from django.template import loader, RequestContext
from django.core.xheaders import populate_xheaders
from django.contrib import messages
#News
from vt_manager_kvm.controller.drivers.VTDriver import VTDriver
from vt_manager_kvm.utils.HttpUtils import HttpUtils
from vt_manager_kvm.models.NetworkInterface import NetworkInterface
from vt_manager_kvm.models.MacRange import MacRange
from vt_manager_kvm.controller.dispatchers.xmlrpc.InformationDispatcher import InformationDispatcher
from vt_manager_kvm.controller.dispatchers.forms.NetworkInterfaceForm import MgmtBridgeForm
from vt_manager_kvm.controller.dispatchers.forms.ServerForm import ServerForm
from django.db import transaction
def userIsIslandManager(request):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
@transaction.commit_on_success
def servers_crud(request, server_id=None):
"""Show a page for the user to add/edit an VTServer """
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
vmProjects = {}
vmSlices = {}
try:
for vm in VTDriver.getVMsInServer(VTDriver.getServerById(server_id)):
if vm.projectName not in vmProjects:
vmProjects[vm.projectName] = vm.projectId
if vm.sliceName not in vmSlices:
vmSlices[vm.sliceName] = vm.sliceId
except Exception as e:
print e
pass
serverFormClass = HttpUtils.getFormFromModel(VTServer)
ifaceFormClass = HttpUtils.getFormFromModel(NetworkInterface)
IfaceFormSetClass = modelformset_factory(NetworkInterface)
if server_id != None:
server = get_object_or_404(VTServer, pk=server_id)
else:
server = None
if request.method == "GET":
#serverForm = serverFormClass(instance=server)
serverForm = ServerForm(instance=server, prefix ="server")
if server != None:
mgmt = server.getNetworkInterfaces().filter(isMgmt = True)
if mgmt:
mgmt = mgmt.get()
mgmtIfaceForm = MgmtBridgeForm({'mgmtBridge-name':mgmt.getName(), 'mgmtBridge-mac':mgmt.getMacStr()}, prefix ="mgmtBridge")
else:
mgmtIfaceForm = MgmtBridgeForm(prefix ="mgmtBridge")
data = server.getNetworkInterfaces().filter(isMgmt = False)
if data:
IfaceFormSetClass = modelformset_factory(NetworkInterface,extra = 0)
ifaceformset = IfaceFormSetClass(queryset= data)
else:
mgmtIfaceForm = MgmtBridgeForm(prefix ="mgmtBridge")
ifaceformset = IfaceFormSetClass(queryset= NetworkInterface.objects.none())
elif request.method == "POST":
#serverForm = serverFormClass(request.POST, instance=server)
serverForm = ServerForm(request.POST, instance=server, prefix ="server")
ifaceformset = IfaceFormSetClass(request.POST)
mgmtIfaceForm = MgmtBridgeForm(request.POST, prefix ="mgmtBridge")
if serverForm.is_valid() and ifaceformset.is_valid() and mgmtIfaceForm.is_valid():
ifaces = ifaceformset.save(commit = False)
if server == None:
server = serverForm.save(commit = False)
try:
server = VTDriver.crudServerFromInstance(server)
VTDriver.setMgmtBridge(request, server)
VTDriver.crudDataBridgeFromInstance(server, ifaces,request.POST.getlist("DELETE"))
except Exception as e:
print e
e = HttpUtils.processException(e)
context = {"exception":e, "serverForm": serverForm, 'vmProjects': vmProjects, 'vmSlices': vmSlices,'ifaceformset' : ifaceformset, 'mgmtIfaceForm' : mgmtIfaceForm}
if server_id != None: context["server"] = server
return simple.direct_to_template(
request,
template="servers/servers_crud.html",
extra_context=context,
)
# Returns to server's admin page and rollback transactions
return HttpResponseRedirect('/servers/admin/')
else:
return HttpResponseNotAllowed("GET", "POST")
context = {"serverForm": serverForm, 'vmProjects': vmProjects, 'vmSlices': vmSlices,'ifaceformset' : ifaceformset, 'mgmtIfaceForm' : mgmtIfaceForm}
if server_id != None: context["server"] = server
return simple.direct_to_template(
request,
template="servers/servers_crud.html",
extra_context=context,
)
def admin_servers(request):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
servers = VTDriver.getAllServers()
return simple.direct_to_template(
request, template="servers/admin_servers.html",
extra_context={"servers_ids": servers})
def delete_server(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
if request.method == 'POST':
try:
VTDriver.deleteServer(VTDriver.getServerById(server_id))
return HttpResponseRedirect(reverse('dashboard'))
except Exception as e:
logging.error(e)
e = HttpUtils.processException(e)
return simple.direct_to_template(request,
template = 'servers/delete_server.html',
extra_context = {'user':request.user, 'exception':e, 'next':reverse("admin_servers")},
)
elif request.method == 'GET':
return simple.direct_to_template(request,
template = 'servers/delete_server.html',
extra_context = {'user':request.user, 'next':reverse("admin_servers"),'object':VTDriver.getServerById(server_id)},
)
def action_vm(request, server_id, vm_id, action):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
if(action == 'list'):
return simple.direct_to_template(
request, template="servers/server_vm_details.html",
extra_context={"vm": VTDriver.getVMbyId(vm_id), "server_id":server_id}
)
elif(action == 'check_status'):
#XXX: Do this function if needed
return simple.direct_to_template(
request, template="servers/list_vm.html",
extra_context={"vm": VM.objects.get(id = vm_id)}
)
elif(action == 'force_update_server'):
InformationDispatcher.forceListActiveVMs(serverID=server_id)
elif(action == 'force_update_vm'):
InformationDispatcher.forceListActiveVMs(vmID=vm_id)
else:
#XXX: serverUUID should be passed in a different way
VTDriver.PropagateActionToProvisioningDispatcher(vm_id, VTServer.objects.get(id=server_id).uuid, action)
#return HttpResponseRedirect(reverse('edit_server', args = [server_id]))
return HttpResponse("")
def subscribeEthernetRanges(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
macRanges = MacRange.objects.all()
if server_id != None:
server = get_object_or_404(VTServer, pk=server_id)
else:
raise Exception ("NO SERVER")
if request.method == "GET":
return simple.direct_to_template(request,
template = 'servers/servers_subscribeEthernetRanges.html',
extra_context = {'server': server, 'macRanges':macRanges},
)
elif request.method=='POST':
VTDriver.manageEthernetRanges(request,server,macRanges)
return HttpResponseRedirect(reverse('edit_server', args = [server_id]))
else:
return HttpResponseNotAllowed("GET", "POST")
def subscribeIp4Ranges(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
ipRanges = Ip4Range.objects.all()
if server_id != None:
server = get_object_or_404(VTServer, pk=server_id)
else:
raise Exception ("NO SERVER")
if request.method == "GET":
return simple.direct_to_template(request,
template = 'servers/servers_subscribeIp4Ranges.html',
extra_context = {'server': server, 'ipRanges':ipRanges},
)
elif request.method=='POST':
VTDriver.manageIp4Ranges(request,server,ipRanges)
return HttpResponseRedirect(reverse('edit_server', args = [server_id]))
else:
return HttpResponseNotAllowed("GET", "POST")
def list_vms(request, server_id):
if (not request.user.is_superuser):
return simple.direct_to_template(request,
template = 'not_admin.html',
extra_context = {'user':request.user},
)
vmProjects = {}
vmSlices = {}
try:
for vm in VTDriver.getVMsInServer(VTDriver.getServerById(server_id)):
if vm.projectName not in vmProjects:
vmProjects[vm.projectName] = vm.projectId
if vm.sliceName not in vmSlices:
vmSlices[vm.sliceName] = vm.sliceId
except Exception as e:
print e
pass
server = get_object_or_404(VTServer, pk=server_id)
context = { 'vmProjects': vmProjects, 'vmSlices': vmSlices,'server':server}
return simple.direct_to_template(
request,
template="servers/servers_list_vms.html",
extra_context=context,
)
'''
Networking point of entry
'''
from vt_manager_kvm.controller.networking.EthernetController import EthernetController
from vt_manager_kvm.controller.networking.Ip4Controller import Ip4Controller
from vt_manager_kvm.models.MacRange import MacRange
NETWORKING_ACTION_ADD="add"
NETWORKING_ACTION_EDIT="edit"
NETWORKING_ACTION_DELETE="delete"
NETWORKING_ACTION_SHOW="show"
NETWORKING_ACTION_ADDEXCLUDED="addExcluded"
NETWORKING_ACTION_REMOVEXCLUDED="removeExcluded"
NETWORKING_POSSIBLE_ACTIONS=(NETWORKING_ACTION_ADD,NETWORKING_ACTION_DELETE,NETWORKING_ACTION_EDIT,NETWORKING_ACTION_SHOW,NETWORKING_ACTION_ADDEXCLUDED,NETWORKING_ACTION_REMOVEXCLUDED,None)
def networkingDashboard(request):#,rangeId):
extra_context = {"section": "networking","subsection":"None"}
extra_context["macRanges"] = EthernetController.listRanges()
extra_context["MacRange"] = MacRange
extra_context["ip4Ranges"] = Ip4Controller.listRanges()
extra_context["Ip4Range"] = Ip4Range
template = "networking/index.html"
return simple.direct_to_template(
request,
extra_context=extra_context,
template=template,
)
def manageIp4(request,rangeId=None,action=None,ip4Id=None):
if not action in NETWORKING_POSSIBLE_ACTIONS:
raise Exception("Unknown action")
#Define context
extra_context = {"section": "networking","subsection":"ip4"+str(action),}
#Add process
if (action == NETWORKING_ACTION_ADD):
if request.method == "GET":
#Show form
extra_context["form"] = HttpUtils.getFormFromModel(Ip4Range)
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeCrud.html",
)
return
# return HttpResponseRedirect("/networking/ip4/")
elif request.method == "POST":
try:
instance = HttpUtils.getInstanceFromForm(request,Ip4Range)
#Create Range
Ip4Controller.createRange(instance)
return HttpResponseRedirect("/networking/ip4/")
except Exception as e:
print e
extra_context["form"] = HttpUtils.processExceptionForm(e,request,Ip4Range)
#Process creation query
#return HttpResponseRedirect("/networking/ip4/")
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeCrud.html",
)
#Show
if ((action == None) or (action==NETWORKING_ACTION_SHOW)) and (not rangeId==None):
instance = Ip4Controller.getRange(rangeId)
extra_context["range"] = instance
#return HttpResponseRedirect("/networking/ip4/")
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeDetail.html",
)
#Edit
#TODO
#Add excluded Ip
if (action == NETWORKING_ACTION_ADDEXCLUDED) and (request.method == "POST"):
if not request.method == "POST":
raise Exception("Invalid method")
try:
instance = Ip4Controller.getRange(rangeId)
extra_context["range"] = instance
#Create excluded
Ip4Controller.addExcludedIp4(instance,request)
return HttpResponseRedirect("/networking/ip4/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeDetail.html",
)
#Release excluded Ip
if (action == NETWORKING_ACTION_REMOVEXCLUDED) and (request.method == "POST"):
try:
instance = Ip4Controller.getRange(rangeId)
#Create excluded
Ip4Controller.removeExcludedIp4(instance,ip4Id)
#FIXME: Why initial instance is not refreshed?
instance = Ip4Controller.getRange(rangeId)
extra_context["range"] = instance
return HttpResponseRedirect("/networking/ip4/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ip4/rangeDetail.html",
)
#Delete
if (action == NETWORKING_ACTION_DELETE) and (request.method == "POST"):
try:
Ip4Controller.deleteRange(rangeId)
return HttpResponseRedirect("/networking/ip4/")
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
extra_context["ranges"] = Ip4Controller.listRanges()
template = "networking/ip4/index.html"
return simple.direct_to_template(
request,
extra_context = extra_context,
template=template,
)
def manageEthernet(request,rangeId=None,action=None,macId=None):
if not action in NETWORKING_POSSIBLE_ACTIONS:
raise Exception("Unknown action")
#Define context
extra_context = {"section": "networking","subsection":"ethernet",}
#Add process
if (action == NETWORKING_ACTION_ADD):
if request.method == "GET":
#Show form
extra_context["form"] = HttpUtils.getFormFromModel(MacRange)
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeCrud.html",
)
return
elif request.method == "POST":
try:
instance = HttpUtils.getInstanceFromForm(request,MacRange)
#Create Range
EthernetController.createRange(instance)
return HttpResponseRedirect("/networking/ethernet/")
except Exception as e:
print e
extra_context["form"] = HttpUtils.processExceptionForm(e,request,MacRange)
#Process creation query
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeCrud.html",
)
#Show
if ((action == None) or (action==NETWORKING_ACTION_SHOW)) and (not rangeId==None):
instance = EthernetController.getRange(rangeId)
extra_context["range"] = instance
#return HttpResponseRedirect("/networking/ethernet/")
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeDetail.html",
)
#Edit
#TODO
#Add excluded Mac
if (action == NETWORKING_ACTION_ADDEXCLUDED) and (request.method == "POST"):
if not request.method == "POST":
raise Exception("Invalid method")
try:
instance = EthernetController.getRange(rangeId)
extra_context["range"] = instance
#Create excluded
EthernetController.addExcludedMac(instance,request)
return HttpResponseRedirect("/networking/ethernet/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeDetail.html",
)
#Release excluded Mac
if (action == NETWORKING_ACTION_REMOVEXCLUDED) and (request.method == "POST"):
try:
instance = EthernetController.getRange(rangeId)
#Create excluded
#FIXME: Why initial instance is not refreshed?
EthernetController.removeExcludedMac(instance,macId)
instance = EthernetController.getRange(rangeId)
extra_context["range"] = instance
return HttpResponseRedirect("/networking/ethernet/"+rangeId)
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
return simple.direct_to_template(
request,
extra_context = extra_context,
template="networking/ethernet/rangeDetail.html",
)
#Delete
if (action == NETWORKING_ACTION_DELETE) and (request.method == "POST"):
try:
EthernetController.deleteRange(rangeId)
return HttpResponseRedirect("/networking/ethernet/")
except Exception as e:
print e
extra_context["errors"] = HttpUtils.processException(e)
pass
#Listing ranges
extra_context["ranges"] = EthernetController.listRanges()
return simple.direct_to_template(
request,
extra_context = extra_context,
template = "networking/ethernet/index.html",
)
| apache-2.0 | -5,939,845,024,590,403,000 | 29.898032 | 189 | 0.721688 | false |
edx-solutions/discussion-edx-platform-extensions | social_engagement/engagement.py | 1 | 14753 | """
Business logic tier regarding social engagement scores
"""
import logging
import sys
from collections import defaultdict
from datetime import datetime
import pytz
from django.conf import settings
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.http import HttpRequest
import openedx.core.djangoapps.django_comment_common.comment_client as cc
from edx_notifications.data import NotificationMessage
from edx_notifications.lib.publisher import (get_notification_type,
publish_notification_to_user)
from edx_solutions_api_integration.utils import get_aggregate_exclusion_user_ids
from lms.djangoapps.discussion.rest_api.exceptions import (CommentNotFoundError,
ThreadNotFoundError)
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.django_comment_common.comment_client.user import get_course_social_stats
from openedx.core.djangoapps.django_comment_common.comment_client.utils import CommentClientRequestError
from requests.exceptions import ConnectionError
from xmodule.modulestore.django import modulestore
from .models import StudentSocialEngagementScore
log = logging.getLogger(__name__)
def update_course_engagement(course_id, compute_if_closed_course=False, course_descriptor=None):
"""
Compute and save engagement scores and stats for whole course.
"""
if not settings.FEATURES.get('ENABLE_SOCIAL_ENGAGEMENT', False):
return
course_key = course_id if isinstance(course_id, CourseKey) else CourseKey.from_string(course_id)
# cs_comment_service works is slash separated course_id strings
slash_course_id = str(course_key)
if not course_descriptor:
# it course descriptor was not passed in (as an optimization)
course_descriptor = modulestore().get_course(course_key)
if not course_descriptor:
# couldn't find course?!?
return
if not compute_if_closed_course and course_descriptor.end:
# if course is closed then don't bother. Note we can override this if we want to force update
now_utc = datetime.now(pytz.UTC)
if now_utc > course_descriptor.end:
log.info('update_user_engagement_score() is skipping because the course is closed...')
return
score_update_count = 0
try:
for user_id, social_stats in _get_course_social_stats(slash_course_id):
log.info('Updating social engagement score for user_id {} in course_key {}'.format(user_id, course_key))
current_score = _compute_social_engagement_score(social_stats)
StudentSocialEngagementScore.save_user_engagement_score(
course_key, user_id, current_score, social_stats
)
score_update_count += 1
except (CommentClientRequestError, ConnectionError) as error:
log.exception(error)
return score_update_count
def _get_course_social_stats(course_id):
""""
Yield user and user's stats for whole course from Forum API.
"""
stats = get_course_social_stats(course_id)
yield from stats.items()
def get_social_metric_points():
"""
Get custom or default social metric points.
"""
return getattr(
settings,
'SOCIAL_METRIC_POINTS',
{
'num_threads': 10,
'num_comments': 15,
'num_replies': 15,
'num_upvotes': 25,
'num_thread_followers': 5,
'num_comments_generated': 15,
}
)
def _compute_social_engagement_score(social_metrics):
"""
For a list of social_stats, compute the social score
"""
social_metric_points = get_social_metric_points()
social_total = 0
for key, val in social_metric_points.items():
social_total += social_metrics.get(key, 0) * val
return social_total
#
# Support for Notifications, these two receivers should actually be migrated into a new Leaderboard django app.
# For now, put the business logic here, but it is pretty decoupled through event signaling
# so we should be able to move these files easily when we are able to do so
#
@receiver(pre_save, sender=StudentSocialEngagementScore)
def handle_progress_pre_save_signal(sender, instance, **kwargs):
"""
Handle the pre-save ORM event on StudentSocialEngagementScore
"""
if settings.FEATURES['ENABLE_NOTIFICATIONS']:
# If notifications feature is enabled, then we need to get the user's
# rank before the save is made, so that we can compare it to
# after the save and see if the position changes
instance.presave_leaderboard_rank = StudentSocialEngagementScore.get_user_leaderboard_position(
instance.course_id,
user_id=instance.user.id,
exclude_users=get_aggregate_exclusion_user_ids(instance.course_id)
)['position']
@receiver(post_save, sender=StudentSocialEngagementScore)
def handle_progress_post_save_signal(sender, instance, **kwargs):
"""
Handle the pre-save ORM event on CourseModuleCompletions
"""
if settings.FEATURES['ENABLE_NOTIFICATIONS']:
# If notifications feature is enabled, then we need to get the user's
# rank before the save is made, so that we can compare it to
# after the save and see if the position changes
leaderboard_rank = StudentSocialEngagementScore.get_user_leaderboard_position(
instance.course_id,
user_id=instance.user.id,
exclude_users=get_aggregate_exclusion_user_ids(instance.course_id)
)['position']
if leaderboard_rank == 0:
# quick escape when user is not in the leaderboard
# which means rank = 0. Trouble is 0 < 3, so unfortunately
# the semantics around 0 don't match the logic below
return
# logic for Notification trigger is when a user enters into the Leaderboard
leaderboard_size = getattr(settings, 'LEADERBOARD_SIZE', 3)
presave_leaderboard_rank = instance.presave_leaderboard_rank if instance.presave_leaderboard_rank else sys.maxsize
if leaderboard_rank <= leaderboard_size and presave_leaderboard_rank > leaderboard_size:
try:
notification_msg = NotificationMessage(
msg_type=get_notification_type('open-edx.lms.leaderboard.engagement.rank-changed'),
namespace=str(instance.course_id),
payload={
'_schema_version': '1',
'rank': leaderboard_rank,
'leaderboard_name': 'Engagement',
}
)
#
# add in all the context parameters we'll need to
# generate a URL back to the website that will
# present the new course announcement
#
# IMPORTANT: This can be changed to msg.add_click_link() if we
# have a particular URL that we wish to use. In the initial use case,
# we need to make the link point to a different front end website
# so we need to resolve these links at dispatch time
#
notification_msg.add_click_link_params({
'course_id': str(instance.course_id),
})
publish_notification_to_user(int(instance.user.id), notification_msg)
except Exception as ex:
# Notifications are never critical, so we don't want to disrupt any
# other logic processing. So log and continue.
log.exception(ex)
def get_involved_users_in_thread(request, thread):
"""
Compute all the users involved in the children of a specific thread.
"""
params = {"thread_id": thread.id, "page_size": 100}
is_question = getattr(thread, "thread_type", None) == "question"
author_id = getattr(thread, 'user_id', None)
results = _detail_results_factory()
if is_question:
# get users of the non-endorsed comments in thread
params.update({"endorsed": False})
_get_details_for_deletion(_get_request(request, params), results=results, is_thread=True)
# get users of the endorsed comments in thread
if getattr(thread, 'has_endorsed', False):
params.update({"endorsed": True})
_get_details_for_deletion(_get_request(request, params), results=results, is_thread=True)
else:
_get_details_for_deletion(_get_request(request, params), results=results, is_thread=True)
users = results['users']
if author_id:
users[author_id]['num_upvotes'] += thread.votes.get('count', 0)
users[author_id]['num_threads'] += 1
users[author_id]['num_comments_generated'] += results['all_comments']
users[author_id]['num_thread_followers'] += thread.get_num_followers()
if thread.abuse_flaggers:
users[author_id]['num_flagged'] += 1
return users
def get_involved_users_in_comment(request, comment):
"""
Method used to extract the involved users in the comment.
This method also returns the creator of the post.
"""
params = {"page_size": 100}
comment_author_id = getattr(comment, 'user_id', None)
thread_author_id = None
if hasattr(comment, 'thread_id'):
thread_author_id = _get_author_of_thread(comment.thread_id)
results = _get_details_for_deletion(_get_request(request, params), comment.id, nested=True)
users = results['users']
if comment_author_id:
users[comment_author_id]['num_upvotes'] += comment.votes.get('count', 0)
if getattr(comment, 'parent_id', None):
# It's a reply.
users[comment_author_id]['num_replies'] += 1
else:
# It's a comment.
users[comment_author_id]['num_comments'] += 1
if comment.abuse_flaggers:
users[comment_author_id]['num_flagged'] += 1
if thread_author_id:
users[thread_author_id]['num_comments_generated'] += results['replies'] + 1
return users
def _detail_results_factory():
"""
Helper method to maintain organized result structure while getting involved users.
"""
return {
'replies': 0,
'all_comments': 0,
'users': defaultdict(lambda: defaultdict(int)),
}
def _get_users_in_thread(request):
from lms.djangoapps.discussion.rest_api.views import CommentViewSet
users = set()
response_page = 1
has_results = True
while has_results:
try:
params = {"page": response_page}
response = CommentViewSet().list(
_get_request(request, params)
)
for comment in response.data["results"]:
users.add(comment["author"])
if comment["child_count"] > 0:
users.update(_get_users_in_comment(request, comment["id"]))
has_results = response.data["pagination"]["next"]
response_page += 1
except (ThreadNotFoundError, InvalidKeyError):
return users
return users
def _get_users_in_comment(request, comment_id):
from lms.djangoapps.discussion.rest_api.views import CommentViewSet
users = set()
response_page = 1
has_results = True
while has_results:
try:
response = CommentViewSet().retrieve(_get_request(request, {"page": response_page}), comment_id)
for comment in response.data["results"]:
users.add(comment["author"])
if comment["child_count"] > 0:
users.update(_get_users_in_comment(request, comment["id"]))
has_results = response.data["pagination"]["next"]
response_page += 1
except (ThreadNotFoundError, InvalidKeyError):
return users
return users
def _get_request(incoming_request, params):
request = HttpRequest()
request.method = 'GET'
request.user = incoming_request.user
request.META = incoming_request.META.copy()
request.GET = incoming_request.GET.copy()
request.GET.update(params)
return request
def _get_author_of_comment(parent_id):
comment = cc.Comment.find(parent_id)
if comment and hasattr(comment, 'user_id'):
return comment.user_id
def _get_author_of_thread(thread_id):
thread = cc.Thread.find(thread_id)
if thread and hasattr(thread, 'user_id'):
return thread.user_id
def _get_details_for_deletion(request, comment_id=None, results=None, nested=False, is_thread=False):
"""
Get details of comment or thread and related users that are required for deletion purposes.
"""
if not results:
results = _detail_results_factory()
for page, response in enumerate(_get_paginated_results(request, comment_id, is_thread)):
if page == 0:
results['all_comments'] += response.data['pagination']['count']
if results['replies'] == 0:
results['replies'] = response.data['pagination']['count']
for comment in response.data['results']:
_extract_stats_from_comment(request, comment, results, nested)
return results
def _get_paginated_results(request, comment_id, is_thread):
"""
Yield paginated comments of comment or thread.
"""
from lms.djangoapps.discussion.rest_api.views import CommentViewSet
response_page = 1
has_next = True
while has_next:
try:
if is_thread:
response = CommentViewSet().list(_get_request(request, {"page": response_page}))
else:
response = CommentViewSet().retrieve(_get_request(request, {"page": response_page}), comment_id)
except (ThreadNotFoundError, CommentNotFoundError, InvalidKeyError):
raise StopIteration
has_next = response.data["pagination"]["next"]
response_page += 1
yield response
def _extract_stats_from_comment(request, comment, results, nested):
"""
Extract results from comment and its nested comments.
"""
user_id = comment.serializer.instance['user_id']
if not nested:
results['users'][user_id]['num_comments'] += 1
else:
results['users'][user_id]['num_replies'] += 1
results['users'][user_id]['num_upvotes'] += comment['vote_count']
if comment.serializer.instance['abuse_flaggers']:
results['users'][user_id]['num_flagged'] += 1
if comment['child_count'] > 0:
_get_details_for_deletion(request, comment['id'], results, nested=True)
| agpl-3.0 | -3,449,035,946,806,721,000 | 35.790524 | 122 | 0.638582 | false |
egnyte/gitlabform | gitlabform/gitlabform/test/test_group_shared_with.py | 1 | 5232 | import pytest
from gitlabform.gitlabform.test import (
run_gitlabform,
)
@pytest.fixture(scope="function")
def one_owner(gitlab, group, groups, users):
gitlab.add_member_to_group(group, users[0], 50)
gitlab.remove_member_from_group(group, "root")
yield group
# we are running tests with root's token, so every group is created
# with a single user - root as owner. we restore the group to
# this state here.
gitlab.add_member_to_group(group, "root", 50)
# we try to remove all users, not just those added above,
# on purpose, as more may have been added in the tests
for user in users:
gitlab.remove_member_from_group(group, user)
for share_with in groups:
gitlab.remove_share_from_group(group, share_with)
class TestGroupSharedWith:
def test__add_group(self, gitlab, group, users, groups, one_owner):
no_of_members_before = len(gitlab.get_group_members(group))
add_shared_with = f"""
projects_and_groups:
{group}/*:
group_members:
{users[0]}:
access_level: 50
group_shared_with:
{groups[0]}:
group_access_level: 30
{groups[1]}:
group_access_level: 30
"""
run_gitlabform(add_shared_with, group)
members = gitlab.get_group_members(group)
assert len(members) == no_of_members_before, members
shared_with = gitlab.get_group_shared_with(group)
assert len(shared_with) == 2
def test__remove_group(self, gitlab, group, users, groups, one_owner):
gitlab.add_share_to_group(group, groups[0], 50)
gitlab.add_share_to_group(group, groups[1], 50)
no_of_members_before = len(gitlab.get_group_members(group))
no_of_shared_with_before = len(gitlab.get_group_shared_with(group))
remove_group = f"""
projects_and_groups:
{group}/*:
enforce_group_members: true
group_members:
{users[0]}:
access_level: 50
group_shared_with:
{groups[0]}:
group_access_level: 30
"""
run_gitlabform(remove_group, group)
members = gitlab.get_group_members(group)
assert len(members) == no_of_members_before
shared_with = gitlab.get_group_shared_with(group)
assert len(shared_with) == no_of_shared_with_before - 1
assert [sw["group_name"] for sw in shared_with] == [groups[0]]
def test__not_remove_groups_with_enforce_false(
self, gitlab, group, users, groups, one_owner
):
no_of_members_before = len(gitlab.get_group_members(group))
no_of_shared_with_before = len(gitlab.get_group_shared_with(group))
setups = [
# flag explicitly set to false
f"""
projects_and_groups:
{group}/*:
enforce_group_members: false
group_members:
{users[0]}:
access_level: 50
group_shared_with: []
""",
# flag not set at all (but the default is false)
f"""
projects_and_groups:
{group}/*:
group_members:
{users[0]}:
access_level: 50
group_shared_with: []
""",
]
for setup in setups:
run_gitlabform(setup, group)
members = gitlab.get_group_members(group)
assert len(members) == no_of_members_before
members_usernames = {member["username"] for member in members}
assert members_usernames == {
f"{users[0]}",
}
shared_with = gitlab.get_group_shared_with(group)
assert len(shared_with) == no_of_shared_with_before
def test__change_group_access(self, gitlab, group, groups, users, one_owner):
change_some_users_access = f"""
projects_and_groups:
{group}/*:
group_members:
{users[0]}:
access_level: 50
group_shared_with:
{groups[0]}:
group_access_level: 30
{groups[1]}:
group_access_level: 50
"""
run_gitlabform(change_some_users_access, group)
shared_with = gitlab.get_group_shared_with(group)
for shared_with_group in shared_with:
if shared_with_group["group_name"] == f"{groups[0]}":
assert shared_with_group["group_access_level"] == 30
if shared_with_group["group_name"] == f"{groups[1]}":
assert shared_with_group["group_access_level"] == 50
def test__remove_all(self, gitlab, group, users, one_owner):
no_shared_with = f"""
projects_and_groups:
{group}/*:
enforce_group_members: true
group_members:
{users[0]}:
access_level: 50
group_shared_with: []
"""
run_gitlabform(no_shared_with, group)
shared_with = gitlab.get_group_shared_with(group)
assert len(shared_with) == 0
| mit | -4,561,875,075,274,683,400 | 30.518072 | 81 | 0.546445 | false |
arkanister/minitickets | lib/utils/html/templatetags/icons.py | 1 | 2009 | # -*- coding: utf-8 -*-
from django import template
from django.template import TemplateSyntaxError, Node
from ..icons.base import Icon
from ..tags import token_kwargs, resolve_kwargs
register = template.Library()
class IconNode(Node):
def __init__(self, _icon, kwargs=None):
super(IconNode, self).__init__()
self.icon = _icon
self.kwargs = kwargs or {}
def render(self, context):
icon = self.icon.resolve(context)
if isinstance(icon, Icon):
return icon.as_html()
attrs = resolve_kwargs(self.kwargs, context)
prefix = attrs.pop('prefix', None)
content = attrs.pop('content', None)
html_tag = attrs.pop('html_tag', None)
icon = Icon(icon, prefix=prefix, content=content,
html_tag=html_tag, attrs=attrs)
return icon.as_html()
@register.tag
def icon(parser, token):
"""
Render a HTML icon.
The tag can be given either a `.Icon` object or a name of the icon.
An optional second argument can specify the icon prefix to use.
An optional third argument can specify the icon html tag to use.
An optional fourth argument can specify the icon content to use.
Others arguments can specify any html attribute to use.
Example::
{% icon 'icon' 'kwarg1'='value1' 'kwarg2'='value2' ... %}
{% icon 'icon' 'prefix'='fa-' 'kwarg1'='value1' 'kwarg2'='value2' ... %}
{% icon 'icon' 'prefix'='fa-' 'html_tag'='b' 'kwarg1'='value1' 'kwarg2'='value2' ... %}
{% icon 'icon' 'prefix'='fa-' 'html_tag'='b' 'content'='R$' 'kwarg1'='value1' 'kwarg2'='value2' ... %}
"""
bits = token.split_contents()
try:
tag, _icon = bits.pop(0), parser.compile_filter(bits.pop(0))
except ValueError:
raise TemplateSyntaxError("'%s' must be given a icon." % bits[0])
kwargs = {}
# split optional args
if len(bits):
kwargs = token_kwargs(bits, parser)
return IconNode(_icon, kwargs=kwargs) | apache-2.0 | 8,514,085,994,507,164,000 | 29.923077 | 110 | 0.610254 | false |
isaacbernat/awis | setup.py | 1 | 1887 | from setuptools import setup, find_packages
# from codecs import open
# from os import path
# here = path.abspath(path.dirname(__file__))
# # Get the long description from the README file
# with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
# long_description = f.read()
setup(
# Application name:
name="myawis",
# Version number (initial):
version="0.2.4",
# Application author details:
author="Ashim Lamichhane",
author_email="[email protected]",
# Packages
packages=['myawis'],
# data_files
data_files=[('awis', ['LICENSE.txt', 'README.rst'])],
# Include additional files into the package
include_package_data=True,
# Details
url="https://github.com/ashim888/awis",
# Keywords
keywords='python awis api call',
#
license='GNU General Public License v3.0',
description="A simple AWIS python wrapper",
long_description=open('README.rst').read(),
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 2 - Pre-Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
# Pick your license as you wish (should match "license" above)
'License :: Public Domain',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=[
"requests",
"beautifulsoup4",
"lxml",
],
entry_points={
'console_scripts': [
'myawis=myawis:main',
],
},
)
| gpl-3.0 | -1,147,754,553,041,353,200 | 27.590909 | 77 | 0.608903 | false |
BeeeOn/server | t/gws/t1006-sensor-data-export.py | 1 | 4343 | #! /usr/bin/env python3
import config
config.import_libs()
import unittest
import websocket
import json
import time
import uuid
from gws import assureIsClosed, assureNotClosed, registerGateway, ZMQConnection
class TestSensorData(unittest.TestCase):
def setUp(self):
self.zmq = ZMQConnection(config.gws_zmq_endpoint)
self.zmq.accept(lambda m: "gateway_id" in m)
self.zmq.open()
self.ws = websocket.WebSocket()
self.ws.connect(config.gws_ws_uri)
registerGateway(self, self.ws, config.gateway_id)
event = self.zmq.pop_data()
self.assertEqual("on-connected", event["event"])
self.assertEqual(config.gateway_id, event["gateway_id"])
def tearDown(self):
self.ws.close()
try:
event = self.zmq.pop_data()
self.assertEqual("on-disconnected", event["event"])
self.assertEqual(config.gateway_id, event["gateway_id"])
finally:
self.zmq.close()
"""
Server just confirms that it received valid sensor data message,
but nothing more can be determined from its response
"""
def test1_export_successful(self):
id = str(uuid.uuid4())
timestamp = int(time.time() * 1000000)
msg = json.dumps(
{
"message_type" : "sensor_data_export",
"id" : id,
"data" : [
{
"device_id" : "0xa32d27aa5e94ecfd",
"timestamp" : timestamp,
"values": [
{
"module_id" : "0",
"value" : 30.0,
"valid" : "true"
},
{
"module_id" : "1",
"valid" : "false"
},
{
"module_id" : "2",
"value" : 60.0,
"valid" : "true"
}
]
}
]
}
)
self.ws.send(msg)
msg = json.loads(self.ws.recv())
self.assertEqual("sensor_data_confirm", msg["message_type"])
self.assertEqual(id, msg["id"])
assureNotClosed(self, self.ws)
event = self.zmq.pop_data()
self.assertEqual("on-sensor-data", event["event"])
self.assertEqual(config.gateway_id, event["gateway_id"])
self.assertEqual("0xa32d27aa5e94ecfd", event["device_id"])
self.assertEqual(timestamp, event["timestamp"])
self.assertEqual(30, event["data"]["0"])
self.assertIsNone(event["data"]["1"])
self.assertEqual(60, event["data"]["2"])
"""
Even if we send an invalid export message, we get just "confirm" response.
This test is semi-automatic, it requires to check the server log.
"""
def test2_export_fails_due_to_unexisting_device(self):
id = str(uuid.uuid4())
msg = json.dumps(
{
"message_type" : "sensor_data_export",
"id" : id,
"data" : [
{
"device_id" : "0xa32d275555555555",
"timestamp" : 0,
"values": [
{
"module_id" : "5",
"value" : -1230.0,
"valid" : "true"
},
{
"module_id" : "22",
"valid" : "false"
},
{
"module_id" : "89",
"value" : 3460.132,
"valid" : "true"
}
]
}
]
}
)
self.ws.send(msg)
msg = json.loads(self.ws.recv())
self.assertEqual("sensor_data_confirm", msg["message_type"])
self.assertEqual(id, msg["id"])
assureNotClosed(self, self.ws)
"""
Send conflicting data (same timestamp). We cannot test anything there
automatically. But it allows at least a semi-automatic test.
"""
def test3_export_fails_due_to_conflicts(self):
id = str(uuid.uuid4())
timestamp = int(time.time() * 1000000)
msg = json.dumps(
{
"message_type" : "sensor_data_export",
"id" : id,
"data" : [
{
"device_id" : "0xa32d27aa5e94ecfd",
"timestamp" : timestamp,
"values": [
{
"module_id" : "0",
"value" : 30.0,
"valid" : "true"
},
{
"module_id" : "0",
"valid" : "false"
}
]
}
]
}
)
self.ws.send(msg)
msg = json.loads(self.ws.recv())
self.assertEqual("sensor_data_confirm", msg["message_type"])
self.assertEqual(id, msg["id"])
assureNotClosed(self, self.ws)
event = self.zmq.pop_data()
self.assertEqual("on-sensor-data", event["event"])
self.assertEqual(config.gateway_id, event["gateway_id"])
self.assertEqual("0xa32d27aa5e94ecfd", event["device_id"])
self.assertEqual(timestamp, event["timestamp"])
self.assertEqual(30, event["data"]["0"])
if __name__ == '__main__':
import sys
import taprunner
unittest.main(testRunner=taprunner.TAPTestRunner(stream = sys.stdout))
| bsd-3-clause | -1,322,154,437,284,042,500 | 22.603261 | 79 | 0.596822 | false |
google-research/google-research | kws_streaming/models/lstm.py | 1 | 3941 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""LSTM with Mel spectrum and fully connected layers."""
from kws_streaming.layers import lstm
from kws_streaming.layers import modes
from kws_streaming.layers import speech_features
from kws_streaming.layers import stream
from kws_streaming.layers.compat import tf
import kws_streaming.models.model_utils as utils
def model_parameters(parser_nn):
"""LSTM model parameters."""
parser_nn.add_argument(
'--lstm_units',
type=str,
default='500',
help='Output space dimensionality of lstm layer ',
)
parser_nn.add_argument(
'--return_sequences',
type=str,
default='0',
help='Whether to return the last output in the output sequence,'
'or the full sequence',
)
parser_nn.add_argument(
'--stateful',
type=int,
default='1',
help='If True, the last state for each sample at index i'
'in a batch will be used as initial state for the sample '
'of index i in the following batch',
)
parser_nn.add_argument(
'--num_proj',
type=str,
default='200',
help='The output dimensionality for the projection matrices.',
)
parser_nn.add_argument(
'--use_peepholes',
type=int,
default='1',
help='True to enable diagonal/peephole connections',
)
parser_nn.add_argument(
'--dropout1',
type=float,
default=0.3,
help='Percentage of data dropped',
)
parser_nn.add_argument(
'--units1',
type=str,
default='',
help='Number of units in the last set of hidden layers',
)
parser_nn.add_argument(
'--act1',
type=str,
default='',
help='Activation function of the last set of hidden layers',
)
def model(flags):
"""LSTM model.
Similar model in papers:
Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting
https://arxiv.org/pdf/1703.05390.pdf (with no conv layer)
Model topology is similar with "Hello Edge: Keyword Spotting on
Microcontrollers" https://arxiv.org/pdf/1711.07128.pdf
Args:
flags: data/model parameters
Returns:
Keras model for training
"""
input_audio = tf.keras.layers.Input(
shape=modes.get_input_data_shape(flags, modes.Modes.TRAINING),
batch_size=flags.batch_size)
net = input_audio
if flags.preprocess == 'raw':
# it is a self contained model, user need to feed raw audio only
net = speech_features.SpeechFeatures(
speech_features.SpeechFeatures.get_params(flags))(
net)
for units, return_sequences, num_proj in zip(
utils.parse(flags.lstm_units), utils.parse(flags.return_sequences),
utils.parse(flags.num_proj)):
net = lstm.LSTM(
units=units,
return_sequences=return_sequences,
stateful=flags.stateful,
use_peepholes=flags.use_peepholes,
num_proj=num_proj)(
net)
net = stream.Stream(cell=tf.keras.layers.Flatten())(net)
net = tf.keras.layers.Dropout(rate=flags.dropout1)(net)
for units, activation in zip(
utils.parse(flags.units1), utils.parse(flags.act1)):
net = tf.keras.layers.Dense(units=units, activation=activation)(net)
net = tf.keras.layers.Dense(units=flags.label_count)(net)
if flags.return_softmax:
net = tf.keras.layers.Activation('softmax')(net)
return tf.keras.Model(input_audio, net)
| apache-2.0 | 9,010,055,643,208,554,000 | 30.031496 | 78 | 0.678508 | false |
Hybrid-Cloud/conveyor | conveyor/tests/unit/clone/drivers/openstack/test_driver.py | 1 | 5751 | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from conveyor.clone.drivers import driver as base_driver
from conveyor.clone.drivers.openstack import driver
from conveyor.clone.resources import common
from conveyor.common import config
from conveyor.conveyoragentclient.v1 import client as birdiegatewayclient
from conveyor.conveyorheat.api import api
from conveyor.resource import resource
from conveyor.tests import test
from conveyor.tests.unit import fake_constants
from conveyor import context
from conveyor import utils
CONF = config.CONF
class OpenstackDriverTestCase(test.TestCase):
def setUp(self):
super(OpenstackDriverTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake', is_admin=False)
self.manager = driver.OpenstackDriver()
def test_handle_resources(self):
pass
@mock.patch.object(base_driver.BaseDriver, '_handle_dv_for_svm')
def test_add_extra_properties_for_server(self, mock_svm):
template = fake_constants.FAKE_INSTANCE_TEMPLATE['template']
template['resources']['server_0']['extra_properties'].pop('gw_url')
res_map = {}
for key, value in template['resources'].items():
res_map[key] = resource.Resource.from_dict(value)
undo_mgr = utils.UndoManager()
utils.get_next_vgw = mock.MagicMock()
utils.get_next_vgw.return_value = ('123', '10.0.0.1')
self.assertEqual(
None,
self.manager.add_extra_properties_for_server(
self.context, res_map['server_0'], res_map,
False, True, undo_mgr))
@mock.patch.object(base_driver.BaseDriver, '_handle_dv_for_svm')
def test_add_extra_properties_for_server_with_active(self, mock_svm):
template = fake_constants.FAKE_INSTANCE_TEMPLATE['template']
template['resources']['server_0']['extra_properties'].pop('gw_url')
template['resources']['server_0']['extra_properties']['vm_state'] = \
'active'
res_map = {}
for key, value in template['resources'].items():
res_map[key] = resource.Resource.from_dict(value)
undo_mgr = utils.UndoManager()
utils.get_next_vgw = mock.MagicMock()
utils.get_next_vgw.return_value = ('123', '10.0.0.1')
self.assertEqual(
None,
self.manager.add_extra_properties_for_server(
self.context, res_map['server_0'], res_map,
False, True, undo_mgr))
def test_add_extra_properties_for_stack(self):
undo_mgr = utils.UndoManager()
template = fake_constants.FAKE_PLAN['updated_resources']
stack = resource.Resource.from_dict(template['stack_0'])
self.manager.heat_api.get_resource = mock.MagicMock()
self.manager.heat_api.get_resource.return_value = \
api.Resource(api.format_resource(fake_constants.FAKE_RESOURCE))
self.manager.compute_api.get_server = mock.MagicMock()
self.manager.compute_api.get_server.return_value = \
{'OS-EXT-STS:vm_state': 'active'}
self.assertEqual(
None,
self.manager.add_extra_properties_for_stack(
self.context, stack, False, True, undo_mgr
))
@mock.patch.object(base_driver.BaseDriver, '_wait_for_volume_status')
@mock.patch.object(birdiegatewayclient, 'get_birdiegateway_client')
def test_handle_server_after_clone(self, mock_client, mock_wait):
template = \
fake_constants.FAKE_INSTANCE_TEMPLATE['template']['resources']
template['volume_1']['extra_properties']['sys_clone'] = True
self.manager.compute_api.migrate_interface_detach = mock.MagicMock()
self.manager.compute_api.migrate_interface_detach.return_value = None
mock_client.return_value = birdiegatewayclient.Client()
mock_client.return_value.vservices._force_umount_disk = \
mock.MagicMock()
mock_client.return_value.vservices._force_umount_disk.return_value = \
None
self.manager.compute_api.stop_server = mock.MagicMock()
self.manager.compute_api.stop_server.return_value = None
self.manager.compute_api.detach_volume = mock.MagicMock()
self.manager.compute_api.detach_volume.return_value = None
common.ResourceCommon._await_instance_status = mock.MagicMock()
common.ResourceCommon._await_instance_status.return_value = None
self.manager.compute_api.attach_volume = mock.MagicMock()
self.manager.compute_api.attach_volume.return_value = None
self.manager.compute_api.start_server = mock.MagicMock()
self.manager.compute_api.start_server.return_value = None
self.assertEqual(
None,
self.manager.handle_server_after_clone(
self.context, template['server_0'], template
))
def test_handle_stack_after_clone(self):
template = \
fake_constants.FAKE_PLAN['updated_resources']['stack_0']
self.assertEqual(
None,
self.manager.handle_stack_after_clone(
self.context, template, {}
))
| apache-2.0 | 7,160,822,864,810,321,000 | 43.929688 | 78 | 0.657799 | false |
fake-name/ReadableWebProxy | WebMirror/management/GravityTalesManage.py | 1 | 1202 |
import calendar
import datetime
import json
import os
import os.path
import shutil
import traceback
from concurrent.futures import ThreadPoolExecutor
import urllib.error
import urllib.parse
from sqlalchemy import and_
from sqlalchemy import or_
import sqlalchemy.exc
from sqlalchemy_continuum_vendored.utils import version_table
if __name__ == "__main__":
import logSetup
logSetup.initLogging()
import common.database as db
import common.Exceptions
import common.management.file_cleanup
import Misc.HistoryAggregator.Consolidate
import flags
import pprint
import config
from config import C_RAW_RESOURCE_DIR
import WebMirror.OutputFilters.rss.FeedDataParser
def exposed_delete_gravitytales_bot_blocked_pages():
'''
Delete the "checking you're not a bot" garbage pages
that sometimes get through the gravitytales scraper.
'''
with db.session_context() as sess:
tables = [
db.WebPages.__table__,
version_table(db.WebPages.__table__)
]
for ctbl in tables:
update = ctbl.delete() \
.where(ctbl.c.netloc == "gravitytales.com") \
.where(ctbl.c.content.like('%<div id="bot-alert" class="alert alert-info">%'))
print(update)
sess.execute(update)
sess.commit()
| bsd-3-clause | 2,929,935,509,704,662,000 | 21.259259 | 82 | 0.75624 | false |
McIntyre-Lab/papers | newman_t1d_cases_2017/scripts/bwa_sam_parse.py | 1 | 2304 | #!/usr/bin/env python
import argparse
## This script parses a sam file from BWA-MEM and outputs a log of alignment counts and percentages.
# Parse command line arguments
parser = argparse.ArgumentParser(description='Parse sam file to get alignment counts.')
parser.add_argument('-sam','--sam_file',dest='sam', action='store', required=True, help='A Sam file to parse [Required]')
parser.add_argument('-o','--out', dest='out', action='store', required=True, help='Output file for alignment log [Required]')
args = parser.parse_args()
flags=list()
# Open sam file and create a list that contains only the second column from the sam file, (the bitwise flags).
with open(args.sam,'r') as sam:
for line in sam.readlines():
cols=line.split('\t')
flags.append(cols[1])
# Count the flags. These flags are based on BWA sam output, may not be the same for other aligners.
# The flags are different for paired data. There is another python script 'bwa_sam_parse_se.py' for single-end alignments.
unaln=flags.count('77') + flags.count('141') + flags.count('181') + flags.count('121') + flags.count('133') + flags.count('117') + flags.count('69')
aln=flags.count('99') + flags.count('73') + flags.count('185') + flags.count('147') + flags.count('83') + flags.count('163') + flags.count('97') + flags.count('137') + flags.count('145') + flags.count('81') + flags.count('161')+ flags.count('177') + flags.count('113') + flags.count('65') + flags.count('129')
ambig=flags.count('337') + flags.count('417') + flags.count('369') + flags.count('433') + flags.count('353') + flags.count('401') + flags.count('371')+ flags.count('355') + flags.count('403') + flags.count('419') + flags.count('339') + flags.count('387') + flags.count('385') + flags.count('323') + flags.count('435') + flags.count('321')
total = unaln + aln
# Get percentages
percent_aln = float (aln) / (total) * 100
percent_unaln = float (unaln) / (total) * 100
percent_ambig = float (ambig) / (total) * 100
# Write the counts to the output.
with open(args.out,'w') as dataout:
dataout.write('Total reads '+str(total)+'\nAligned '+str(aln)+'\nUnaligned '+str(unaln)+'\nAmbiguous '+str(ambig)+'\nPercent aligned '+str(percent_aln)+'\nPercent unaligned '+str(percent_unaln)+'\nPercent ambiguous '+str(percent_ambig))
| lgpl-3.0 | -7,280,314,419,809,801,000 | 52.581395 | 338 | 0.680122 | false |
Alignak-monitoring-contrib/alignak-app | test/test_panel_widget.py | 1 | 9176 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2018:
# Matthieu Estrada, [email protected]
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from PyQt5.Qt import QApplication, QItemSelectionModel
from alignak_app.utils.config import settings
from alignak_app.backend.datamanager import data_manager
from alignak_app.items.host import Host
from alignak_app.items.service import Service
from alignak_app.qobjects.panel import PanelQWidget
class TestPanelQWidget(unittest2.TestCase):
"""
This file test the PanelQWidget class.
"""
settings.init_config()
# Host data test
host_list = []
for i in range(0, 10):
host = Host()
host.create(
'_id%d' % i,
{
'name': 'host%d' % i,
'alias': 'Host %d' % i,
'_id': '_id%d' % i,
'ls_downtimed': False,
'ls_acknowledged': False,
'ls_state': 'UNREACHABLE',
'ls_output': 'output host %d' % i,
'ls_last_check': '',
'_realm': '59c4e38535d17b8dcb0bed42',
'address': '127.0.0.1',
'business_impact': '2',
'notes': 'host notes',
'passive_checks_enabled': False,
'active_checks_enabled': True,
'_overall_state_id': 1,
'customs': {}
},
'host%d' % i
)
host_list.append(host)
# Service data test
service_list = []
for i in range(0, 10):
service = Service()
service.create(
'_id%d' % i,
{
'name': 'service%d' % i,
'alias': 'Service %d' % i,
'host': '_id%d' % i,
'_id': '_id%d' % i,
'ls_acknowledged': False,
'ls_downtimed': False,
'ls_state': 'CRITICAL',
'ls_output': 'output host %d' % i,
'aggregation': 'disk',
'_overall_state_id': 4,
'passive_checks_enabled': False,
'active_checks_enabled': True,
},
'service%d' % i
)
service_list.append(service)
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
except:
pass
def test_create_widget(self):
"""Inititalize PanelQWidget"""
# Add problems
data_manager.update_database('host', self.host_list)
data_manager.database['problems'] = []
for item in self.host_list:
data_manager.database['problems'].append(item)
for item in self.service_list:
data_manager.database['problems'].append(item)
for item in self.host_list:
assert 'host' in item.item_type
under_test = PanelQWidget()
self.assertIsNotNone(under_test.layout)
self.assertIsNotNone(under_test.dashboard_widget)
self.assertIsNotNone(under_test.synthesis_widget)
self.assertIsNotNone(under_test.spy_widget)
self.assertFalse(under_test.hostnames_list)
under_test.initialize()
self.assertIsNotNone(under_test.layout)
self.assertIsNotNone(under_test.dashboard_widget)
self.assertIsNotNone(under_test.synthesis_widget)
self.assertIsNotNone(under_test.spy_widget)
self.assertEqual(
['host0', 'host1', 'host2', 'host3', 'host4', 'host5',
'host6', 'host7', 'host8', 'host9'],
under_test.hostnames_list
)
def test_spy_host(self):
"""Panel Add Spy Host"""
# init_event_widget()
under_test = PanelQWidget()
under_test.initialize()
# Host is not in hostname_list
under_test.synthesis_widget.line_search.setText('no_host')
under_test.spy_host()
spy_index = under_test.get_tab_order().index('s')
self.assertTrue(under_test.synthesis_widget.host_widget.spy_btn.isEnabled())
self.assertEqual('Spy Hosts', under_test.tab_widget.tabText(spy_index))
# Host Id is not added in spied_hosts of SpyQWidget.SpyQListWidget
self.assertFalse('_id0' in under_test.spy_widget.spy_list_widget.spied_hosts)
def test_update_panels(self):
"""Update QTabPanel Problems"""
data_manager.database['problems'] = []
data_manager.update_database('host', self.host_list)
for item in self.host_list:
data_manager.database['problems'].append(item)
for item in self.service_list:
data_manager.database['problems'].append(item)
under_test = PanelQWidget()
under_test.initialize()
# 20 problems for CRITICAL services and UNREACHABLE hosts
problems_index = under_test.get_tab_order().index('p')
self.assertEqual('Problems (20)', under_test.tab_widget.tabText(problems_index))
# Remove a service from problems
data_manager.database['problems'].remove(self.service_list[0])
under_test.tab_widget.widget(problems_index).update_problems_data()
# There are only 9 services in CRITICAL condition
self.assertEqual('Problems (19)', under_test.tab_widget.tabText(problems_index))
def test_display_host(self):
"""Display Host in Panel"""
under_test = PanelQWidget()
under_test.initialize()
self.assertTrue(under_test.synthesis_widget.host_widget.spy_btn.isEnabled())
self.assertEqual(
'Host Synthesis',
under_test.tab_widget.tabText(
under_test.tab_widget.indexOf(under_test.synthesis_widget))
)
under_test.display_host()
# Host is not spied, so button is enable
self.assertTrue(under_test.synthesis_widget.host_widget.spy_btn.isEnabled())
# No customs, so button is not enabled
self.assertTrue(under_test.synthesis_widget.host_widget.customs_btn.isEnabled())
# Host and Services Qwidgets are hidden
self.assertTrue(under_test.synthesis_widget.host_widget.isHidden())
self.assertTrue(under_test.synthesis_widget.services_widget.isHidden())
# Hint QWidget is shown
self.assertFalse(under_test.synthesis_widget.hint_widget.isHidden())
self.assertEqual(
'Host Synthesis',
under_test.tab_widget.tabText(
under_test.tab_widget.indexOf(under_test.synthesis_widget))
)
under_test.synthesis_widget.line_search.setText(self.host_list[0].name)
under_test.display_host()
# Host is not spied, so button is enable
self.assertTrue(under_test.synthesis_widget.host_widget.spy_btn.isEnabled())
# No customs, so button is not enabled
self.assertFalse(under_test.synthesis_widget.host_widget.customs_btn.isEnabled())
# Host and Services Qwidgets are displayed
self.assertFalse(under_test.synthesis_widget.host_widget.isHidden())
self.assertFalse(under_test.synthesis_widget.services_widget.isHidden())
# Hint QWidget is hidden
self.assertTrue(under_test.synthesis_widget.hint_widget.isHidden())
self.assertEqual(
'Host "Host 0"',
under_test.tab_widget.tabText(
under_test.tab_widget.indexOf(under_test.synthesis_widget))
)
def test_set_host_from_problems(self):
"""Set Host in Panel from Problems QWidget"""
under_test = PanelQWidget()
under_test.initialize()
self.assertEqual('', under_test.synthesis_widget.line_search.text())
self.assertIsNone(under_test.problems_widget.get_current_user_role_item())
# Make an item as current in problems table
under_test.problems_widget.problems_table.update_view({'problems': [self.host_list[8]]})
index_test = under_test.problems_widget.problems_table.model().index(0, 0)
under_test.problems_widget.problems_table.selectionModel().setCurrentIndex(
index_test,
QItemSelectionModel.SelectCurrent
)
self.assertIsNotNone(under_test.problems_widget.get_current_user_role_item())
self.assertEqual('', under_test.synthesis_widget.line_search.text())
under_test.set_host_from_problems()
# Host is set in line search
self.assertEqual('host8', under_test.synthesis_widget.line_search.text())
| agpl-3.0 | -1,336,474,038,663,556,400 | 35.125984 | 96 | 0.612249 | false |
golharam/rgtools | scripts/galaxy/api/addFilesToLibrary.py | 1 | 4458 | #!/usr/bin/env python
"""
Author: Ryan Golhar <[email protected]>
Date: 06/24/15
This script adds readme.txt, *.fastq.gz to a Galaxy Library
Usage: addFilesToLibrary [-h] [--api-key <API_KEY>] [--api-url <API_URL>] <path of directory to scan> <library_name>
"""
import ConfigParser
import os
import argparse
import sys
from common import display
from common import submit
import re
from bioblend import galaxy
import time
_debug = 1
_filesUploaded = 0
def uploadFile(fileToUpload, galaxyInstance, galaxyLibrary, destFolder = '/'):
# Note: Right now, Galaxy strips .gz files of .gz. So when searching of files, make sure to compare to data_set file_name
libraryContents = galaxyInstance.libraries.show_library(galaxyLibrary['id'], contents = True)
# Get the folder
galaxyFolder_id = None
for libraryEntry in libraryContents:
if libraryEntry['name'] == destFolder and libraryEntry['type'] == 'folder':
galaxyFolder_id = libraryEntry['id']
break
# Make sure the file doesn't exist in the destFolder
for libraryEntry in libraryContents:
if libraryEntry['type'] == 'file':
dataset = galaxyInstance.libraries.show_dataset(galaxyLibrary['id'], libraryEntry['id'])
if fileToUpload == dataset['file_name']:
print "File already exists in library: %s. Skipping." % libraryEntry['name']
return
# Upload file
if os.access(fileToUpload, os.R_OK):
print "Uploading file %s -> %s:%s" % (fileToUpload, galaxyLibrary['name'], destFolder)
result = galaxyInstance.libraries.upload_from_galaxy_filesystem(galaxyLibrary['id'], fileToUpload, galaxyFolder_id, file_type='fastq', link_data_only='link_to_files')
print result
global _filesUploaded
_filesUploaded = _filesUploaded+1
else:
print "%s is not accessbile" % fileToUpload
def main():
if _debug == 1:
print 'Galaxy API URL: %s' % args.api_url
print 'Galaxy API Key: %s' % args.api_key
print 'Path to Upload: %s' % args.pathToUpload
print 'Library: %s' % args.library
# 1. Make sure Galaxy library exist
# 2. Scan path for readme.txt and *.fastq.gz and upload files to library
# 1.
gi = galaxy.GalaxyInstance(url=args.api_url, key=args.api_key)
galaxyLibraries = gi.libraries.get_libraries(name=args.library, deleted=False)
for library in galaxyLibraries:
if library['deleted'] == False:
galaxyLibrary = library
if galaxyLibrary == None:
print "library %s not found" % args.library
exit(-1)
# 2. Scan the path for readme.txt, *.fastq.gz, *.fq.gz and upload to library
if os.path.isfile(args.pathToUpload):
uploadFile(args.pathToUpload, gi, galaxyLibrary)
elif os.path.isdir(args.pathToUpload):
# if args.pathToUpload.endswith('/'):
# Upload files in directory to dest
# 3. Scan the directory for *.fastq.gz and add each file
for root, dirs, files in os.walk(args.pathToUpload):
#for dir in dirs:
for file in files:
if (file.endswith('.gz')):
fileToUpload = os.path.join(root, file)
uploadFile(fileToUpload, gi, galaxyLibrary)
# else:
# Upload directory and contents
# print "make directory and upload to directory"
print "Uploaded %s files." % _filesUploaded
if __name__ == '__main__':
# Get defaults from ~/.galaxy.ini
config = ConfigParser.RawConfigParser()
if os.path.exists(os.path.expanduser("~/.galaxy.ini")):
config.read(os.path.expanduser("~/.galaxy.ini"))
_api_key = config.get('default', 'api_key')
_api_url = config.get('default', 'api_url')
else:
_api_key = None
_api_url = None
# Parse Command-Line Arguments
parser = argparse.ArgumentParser()
parser.add_argument('--api-url', help="Galaxy URL", default=_api_url)
parser.add_argument('--api-key', help="User's Galaxy Key", default=_api_key)
parser.add_argument('pathToUpload', help="File or Directory to upload")
parser.add_argument('library', help="Name of Library to add data to")
args = parser.parse_args()
# Do work
main()
| lgpl-3.0 | -6,479,788,889,856,856,000 | 38.451327 | 174 | 0.61799 | false |
sbg/sevenbridges-python | sevenbridges/meta/collection.py | 1 | 4097 | from sevenbridges.errors import PaginationError, SbgError
from sevenbridges.models.compound.volumes.volume_object import VolumeObject
from sevenbridges.models.compound.volumes.volume_prefix import VolumePrefix
from sevenbridges.models.link import Link, VolumeLink
class Collection(list):
"""
Wrapper for SevenBridges pageable resources.
Among the actual collection items it contains information regarding
the total number of entries available in on the server and resource href.
"""
resource = None
def __init__(self, resource, href, total, items, links, api):
super().__init__(items)
self.resource = resource
self.href = href
self.links = links
self._items = items
self._total = total
self._api = api
@property
def total(self):
return int(self._total)
def all(self):
"""
Fetches all available items.
:return: Collection object.
"""
page = self._load(self.href)
while True:
try:
for item in page._items:
yield item
page = page.next_page()
except PaginationError:
break
def _load(self, url):
if self.resource is None:
raise SbgError('Undefined collection resource.')
else:
response = self._api.get(url, append_base=False)
data = response.json()
total = response.headers['x-total-matching-query']
items = [
self.resource(api=self._api, **group)
for group in data['items']
]
links = [Link(**link) for link in data['links']]
href = data['href']
return Collection(
resource=self.resource, href=href, total=total,
items=items, links=links, api=self._api
)
def next_page(self):
"""
Fetches next result set.
:return: Collection object.
"""
for link in self.links:
if link.rel.lower() == 'next':
return self._load(link.href)
raise PaginationError('No more entries.')
def previous_page(self):
"""
Fetches previous result set.
:return: Collection object.
"""
for link in self.links:
if link.rel.lower() == 'prev':
return self._load(link.href)
raise PaginationError('No more entries.')
def __repr__(self):
return (
f'<Collection: total={self.total}, available={len(self._items)}>'
)
class VolumeCollection(Collection):
def __init__(self, href, items, links, prefixes, api):
super().__init__(
VolumeObject, href, 0, items, links, api)
self.prefixes = prefixes
@property
def total(self):
return -1
def next_page(self):
"""
Fetches next result set.
:return: VolumeCollection object.
"""
for link in self.links:
if link.next:
return self._load(link.next)
raise PaginationError('No more entries.')
def previous_page(self):
raise PaginationError('Cannot paginate backwards')
def _load(self, url):
if self.resource is None:
raise SbgError('Undefined collection resource.')
else:
response = self._api.get(url, append_base=False)
data = response.json()
items = [
self.resource(api=self._api, **group) for group in
data['items']
]
prefixes = [
VolumePrefix(api=self._api, **prefix) for prefix in
data['prefixes']
]
links = [VolumeLink(**link) for link in data['links']]
href = data['href']
return VolumeCollection(
href=href, items=items, links=links,
prefixes=prefixes, api=self._api
)
def __repr__(self):
return f'<VolumeCollection: items={len(self._items)}>'
| apache-2.0 | -4,023,204,058,258,332,700 | 30.037879 | 77 | 0.547962 | false |
RNAcentral/rnacentral-webcode | rnacentral/portal/management/commands/update_example_locations.py | 1 | 4551 | """
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
from django.core.management.base import BaseCommand
from portal.models import EnsemblAssembly
from portal.models import SequenceRegion
example_locations = {
'homo_sapiens': {
'chromosome': 'X',
'start': 73819307,
'end': 73856333,
},
'mus_musculus': {
'chromosome': 1,
'start': 86351908,
'end': 86352200,
},
'danio_rerio': {
'chromosome': 9,
'start': 7633910,
'end': 7634210,
},
'bos_taurus': {
'chromosome': 15,
'start': 82197673,
'end': 82197837,
},
'rattus_norvegicus': {
'chromosome': 'X',
'start': 118277628,
'end': 118277850,
},
'felis_catus': {
'chromosome': 'X',
'start': 18058223,
'end': 18058546,
},
'macaca_mulatta': {
'chromosome': 1,
'start': 146238837,
'end': 146238946,
},
'pan_troglodytes': {
'chromosome': 11,
'start': 78369004,
'end': 78369219,
},
'canis_familiaris': {
'chromosome': 19,
'start': 22006909,
'end': 22007119,
},
'gallus_gallus': {
'chromosome': 9,
'start': 15676031,
'end': 15676160,
},
'xenopus_tropicalis': {
'chromosome': 'NC_006839',
'start': 11649,
'end': 11717,
},
'saccharomyces_cerevisiae': {
'chromosome': 'XII',
'start': 856709,
'end': 856919,
},
'schizosaccharomyces_pombe': {
'chromosome': 'I',
'start': 540951,
'end': 544327,
},
'triticum_aestivum': {
'chromosome': '6A',
'start': 100656614,
'end': 100656828,
},
'caenorhabditis_elegans': {
'chromosome': 'III',
'start': 11467363,
'end': 11467705,
},
'drosophila_melanogaster': {
'chromosome': '3R',
'start': 7474331,
'end': 7475217,
},
'bombyx_mori': {
'chromosome': 'scaf16',
'start': 6180018,
'end': 6180422,
},
'anopheles_gambiae': {
'chromosome': '2R',
'start': 34644956,
'end': 34645131,
},
'dictyostelium_discoideum': {
'chromosome': 2,
'start': 7874546,
'end': 7876498,
},
'plasmodium_falciparum': {
'chromosome': 13,
'start': 2796339,
'end': 2798488,
},
'arabidopsis_thaliana': {
'chromosome': 2,
'start': 18819643,
'end': 18822629,
}
}
def update_example_locations():
"""
"""
for assembly in EnsemblAssembly.objects.filter().all():
print(assembly.ensembl_url)
if assembly.ensembl_url in example_locations:
assembly.example_chromosome = example_locations[assembly.ensembl_url]['chromosome']
assembly.example_start = example_locations[assembly.ensembl_url]['start']
assembly.example_end = example_locations[assembly.ensembl_url]['end']
assembly.save()
continue
try:
region = SequenceRegion.objects.filter(assembly_id=assembly.assembly_id).all()[:1].get()
assembly.example_chromosome = region.chromosome
assembly.example_start = region.region_start
assembly.example_end = region.region_stop
print('\t', assembly.assembly_id, region.chromosome, region.region_start, region.region_stop)
assembly.save()
except SequenceRegion.DoesNotExist:
print('No regions found {}'.format(assembly.ensembl_url))
except SequenceRegion.MultipleObjectsReturned:
print('Multiple assemblies found {}'.format(assembly.ensembl_url))
class Command(BaseCommand):
"""
Usage:
python manage.py update_example_locations
"""
def handle(self, *args, **options):
"""Main function, called by django."""
update_example_locations()
| apache-2.0 | -2,314,792,031,606,948,400 | 27.622642 | 105 | 0.5735 | false |
mikelum/pyspeckit | pyspeckit/spectrum/readers/read_class.py | 1 | 67070 | """
------------------------
GILDAS CLASS file reader
------------------------
Read a CLASS file into an :class:`pyspeckit.spectrum.ObsBlock`
"""
from __future__ import print_function
from astropy.extern.six.moves import xrange
from astropy.extern.six import iteritems
try:
import astropy.io.fits as pyfits
except ImportError:
import pyfits
import numpy
import numpy as np
from numpy import pi
from astropy import log
# from astropy.time import Time
from astropy import units as u
import pyspeckit
import sys
import re
try:
from astropy.utils.console import ProgressBar
except ImportError:
ProgressBar = lambda x: None
ProgressBar.update = lambda x: None
import struct
import time
# 'range' is needed as a keyword
irange = range
def print_timing(func):
"""
Prints execution time of decorated function.
Included here because CLASS files can take a little while to read;
this should probably be replaced with a progressbar
"""
def wrapper(*arg,**kwargs):
t1 = time.time()
res = func(*arg,**kwargs)
t2 = time.time()
log.info('%s took %0.5g s' % (func.func_name, (t2-t1)))
return res
wrapper.__doc__ = func.__doc__
return wrapper
""" Specification: http://iram.fr/IRAMFR/GILDAS/doc/html/class-html/node58.html """
filetype_dict = {'1A ':'Multiple_IEEE','1 ':'Multiple_Vax','1B ':'Multiple_EEEI',
'2A ':'v2','2 ':'v2','2B ':'v2',
'9A ':'Single_IEEE','9 ':'Single_Vax','9B ':'Single_EEEI'}
fileversion_dict = {'1A ':'v1',
'2A ':'v2'}
record_lengths = {'1A': 512,
'2A': 1024*4}
header_id_numbers = {0: 'USER CODE',
-1: 'COMMENT',
-2: 'GENERAL',
-3: 'POSITION',
-4: 'SPECTRO',
-5: 'BASELINE',
-6: 'HISTORY',
# -8: 'SWITCH',
-10: 'DRIFT',
-14: 'CALIBRATION',
}
header_id_lengths = {-2: 9, # may really be 10?
-3: 17,
-4: 17,
-5: None, # variable length
-6: 3, # variable length
-14: 25,
}
# from packages/classic/lib/classic_mod.f90
filedescv2_nw1=14
"""
GENERAL
integer(kind=obsnum_length) :: num ! [ ] Observation number
integer(kind=4) :: ver ! [ ] Version number
integer(kind=4) :: teles(3) ! [ ] Telescope name
integer(kind=4) :: dobs ! [MJD-60549] Date of observation
integer(kind=4) :: dred ! [MJD-60549] Date of reduction
integer(kind=4) :: typec ! [ code] Type of coordinates
integer(kind=4) :: kind ! [ code] Type of data
integer(kind=4) :: qual ! [ code] Quality of data
integer(kind=4) :: subscan ! [ ] Subscan number
integer(kind=obsnum_length) :: scan ! [ ] Scan number
! Written in the entry
real(kind=8) :: ut ! 1-2 [ rad] UT of observation
real(kind=8) :: st ! 3-4 [ rad] LST of observation
real(kind=4) :: az ! 5 [ rad] Azimuth
real(kind=4) :: el ! 6 [ rad] Elevation
real(kind=4) :: tau ! 7 [neper] Opacity
real(kind=4) :: tsys ! 8 [ K] System temperature
real(kind=4) :: time ! 9 [ s] Integration time
! Not in this section in file
integer(kind=4) :: xunit ! [ code] X unit (if X coordinates section is present)
! NOT in data ---
character(len=12) :: cdobs ! [string] Duplicate of dobs
character(len=12) :: cdred ! [string] Duplicate of dred
"""
keys_lengths = {
'unknown': [
( 'NUM' ,1,'int32'), # Observation number
( 'VER' ,1,'int32'), # Version number
( 'TELES' ,3,'|S12') , # Telescope name
( 'DOBS' ,1,'int32'), # Date of observation
( 'DRED' ,1,'int32'), # Date of reduction
( 'TYPEC' ,1,'int32'), # Type of coordinates
( 'KIND' ,1,'int32'), # Type of data
( 'QUAL' ,1,'int32'), # Quality of data
( 'SCAN' ,1,'int32'), # Scan number
( 'SUBSCAN' ,1,'int32'), # Subscan number
],
'COMMENT': [ # -1
('LTEXT',1,'int32'), # integer(kind=4) :: ltext ! Length of comment
('CTEXT',1024/4,'|S1024'), # character ctext*1024 ! Comment string
],
'GENERAL': [ # -2
( 'UT' ,2,'float64'), # rad UT of observation
( 'ST' ,2,'float64'), # rad LST of observation
( 'AZ' ,1,'float32'), # rad Azimuth
( 'EL' ,1,'float32'), # rad Elevation
( 'TAU' ,1,'float32'), # neper Opacity
( 'TSYS' ,1,'float32'), # K System temperature
( 'TIME' ,1,'float32'), # s Integration time
# XUNIT should not be there?
#( 'XUNIT' ,1,'int32'), # code X unit (if xcoord_sec is present)
] ,
'POSITION': [ # -3
('SOURC',3,'|S12') , # [ ] Source name
('EPOCH',1,'float32'), # [ ] Epoch of coordinates
('LAM' ,2,'float64'), #[rad] Lambda
('BET' ,2,'float64'), #[rad] Beta
('LAMOF',1,'float32'), # [rad] Offset in Lambda
('BETOF',1,'float32'), # [rad] Offset in Beta
('PROJ' ,1,'int32') , # [rad] Projection system
('SL0P' ,1,'float64'), # lambda of descriptive system # MAY NOT EXIST IN OLD CLASS
('SB0P' ,1,'float64'), # beta of descriptive system # MAY NOT EXIST IN OLD CLASS
('SK0P' ,1,'float64'), # angle of descriptive system # MAY NOT EXIST IN OLD CLASS
],
'SPECTRO': [ # -4
#('align' ,1,'int32'), # [ ] Alignment padding
('LINE' ,3,'|S12'), # [ ] Line name
('RESTF' ,2,'float64'), # [ MHz] Rest frequency
('NCHAN' ,1,'int32'), # [ ] Number of channels
('RCHAN' ,1,'float32'), # [ ] Reference channels
('FRES' ,1,'float32'), # [ MHz] Frequency resolution
('FOFF' ,1,'float32'), # [ MHz] Frequency offset
('VRES' ,1,'float32'), # [km/s] Velocity resolution
('VOFF' ,1,'float32'), # [km/s] Velocity at reference channel
('BAD' ,1,'float32'), # [ ] Blanking value
#('ALIGN_1',1,'int32'), # [ ] Alignment padding
('IMAGE' ,2,'float64'), # [ MHz] Image frequency
#('ALIGN_2',1,'int32'), # [ ] Alignment padding
('VTYPE' ,1,'int32'), # [code] Type of velocity
('DOPPLER',2,'float64'), # [ ] Doppler factor = -V/c (CLASS convention)
],
'CALIBRATION': [ # -14
('ALIGN',1,'int32'), # BUFFER (it's a zero - it is not declared in the docs!!!!)
('BEEFF',1,'float32'), # [ ] Beam efficiency
('FOEFF',1,'float32'), # [ ] Forward efficiency
('GAINI',1,'float32'), # [ ] Image/Signal gain ratio
('H2OMM',1,'float32'), # [ mm] Water vapor content
('PAMB',1,'float32'), # [ hPa] Ambient pressure
('TAMB',1,'float32'), # [ K] Ambient temperature
('TATMS',1,'float32'), # [ K] Atmosphere temp. in signal band
('TCHOP',1,'float32'), # [ K] Chopper temperature
('TCOLD',1,'float32'), # [ K] Cold load temperature
('TAUS',1,'float32'), # [neper] Opacity in signal band
('TAUI',1,'float32'), # [neper] Opacity in image band
('TATMI',1,'float32'), # [ K] Atmosphere temp. in image band
('TREC',1,'float32'), # [ K] Receiver temperature
('CMODE',1,'int32'), # [ code] Calibration mode
('ATFAC',1,'float32'), # [ ] Applied calibration factor
('ALTI',1,'float32'), # [ m] Site elevation
('COUNT',3,'3float32'), # [count] Power of Atm., Chopp., Cold
('LCALOF',1,'float32'), # [ rad] Longitude offset for sky measurement
('BCALOF',1,'float32'), # [ rad] Latitude offset for sky measurement
('GEOLONG',1,'float64'), # [ rad] Geographic longitude of observatory # MAY NOT EXIST IN OLD CLASS
('GEOLAT',1,'float64'), # [ rad] Geographic latitude of observatory # MAY NOT EXIST IN OLD CLASS
],
'BASELINE':[
('DEG',1,'int32'), #! [ ] Degree of last baseline
('SIGFI',1,'float32'), #! [Int. unit] Sigma
('AIRE',1,'float32'), #! [Int. unit] Area under windows
('NWIND',1,'int32'), #! [ ] Number of line windows
# WARNING: These should probably have 'n', the second digit, = NWIND
# The docs are really unclear about this, they say "W1(MWIND)"
('W1MWIND',1,'float32'), #! [km/s] Lower limits of windows
('W2MWIND',1,'float32'), #! [km/s] Upper limits of windows
('SINUS',3,'float32'), #![] Sinus baseline results
],
'DRIFT':[ # 16?
('FREQ',1,'float64') , #! [ MHz] Rest frequency real(kind=8) ::
('WIDTH',1,'float32'), #! [ MHz] Bandwidth real(kind=4) ::
('NPOIN',1,'int32') , #! [ ] Number of data points integer(kind=4) ::
('RPOIN',1,'float32'), #! [ ] Reference point real(kind=4) ::
('TREF',1,'float32') , #! [ ?] Time at reference real(kind=4) ::
('AREF',1,'float32') , #! [ rad] Angular offset at ref. real(kind=4) ::
('APOS',1,'float32') , #! [ rad] Position angle of drift real(kind=4) ::
('TRES',1,'float32') , #! [ ?] Time resolution real(kind=4) ::
('ARES',1,'float32') , #! [ rad] Angular resolution real(kind=4) ::
('BAD',1,'float32') , #! [ ] Blanking value real(kind=4) ::
('CTYPE',1,'int32') , #! [code] Type of offsets integer(kind=4) ::
('CIMAG',1,'float64'), #! [ MHz] Image frequency real(kind=8) ::
('COLLA',1,'float32'), #! [ ?] Collimation error Az real(kind=4) ::
('COLLE',1,'float32'), #! [ ?] Collimation error El real(kind=4) ::
],
}
def _read_bytes(f, n):
'''Read the next `n` bytes (from idlsave)'''
return f.read(n)
"""
Warning: UNCLEAR what endianness should be!
Numpy seemed to get it right, and I think numpy assumes NATIVE endianness
"""
def _read_byte(f):
'''Read a single byte (from idlsave)'''
return numpy.uint8(struct.unpack('=B', f.read(4)[:1])[0])
def _read_int16(f):
'''Read a signed 16-bit integer (from idlsave)'''
return numpy.int16(struct.unpack('=h', f.read(4)[2:4])[0])
def _read_int32(f):
'''Read a signed 32-bit integer (from idlsave)'''
return numpy.int32(struct.unpack('=i', f.read(4))[0])
def _read_int64(f):
'''Read a signed 64-bit integer '''
return numpy.int64(struct.unpack('=q', f.read(8))[0])
def _read_float32(f):
'''Read a 32-bit float (from idlsave)'''
return numpy.float32(struct.unpack('=f', f.read(4))[0])
def _align_32(f):
'''Align to the next 32-bit position in a file (from idlsave)'''
pos = f.tell()
if pos % 4 != 0:
f.seek(pos + 4 - pos % 4)
return
def _read_word(f,length):
if length > 0:
chars = _read_bytes(f, length)
_align_32(f)
else:
chars = None
return chars
def _read_int(f):
return struct.unpack('i',f.read(4))
def is_ascii(s):
try:
s.decode('ascii')
return True
except UnicodeDecodeError:
return False
except UnicodeEncodeError:
return False
def is_all_null(s):
return all(x=='\x00' for x in s)
"""
from clic_file.f90: v1, v2
integer(kind=4) :: bloc ! 1 : observation address [records] integer(kind=8) :: bloc ! 1- 2: observation address [records] integer(kind=4) :: bloc ! 1 : block read from index
integer(kind=4) :: num ! 2 : observation number integer(kind=4) :: word ! 3 : address offset [4-bytes] integer(kind=4) :: num ! 2 : number read
integer(kind=4) :: ver ! 3 : observation version integer(kind=4) :: ver ! 4 : observation version integer(kind=4) :: ver ! 3 : version read from index
integer(kind=4) :: sourc(3) ! 4- 6: source name integer(kind=8) :: num ! 5- 6: observation number character(len=12) :: csour ! 4- 6: source read from index
integer(kind=4) :: line(3) ! 7- 9: line name integer(kind=4) :: sourc(3) ! 7- 9: source name character(len=12) :: cline ! 7- 9: line read from index
integer(kind=4) :: teles(3) ! 10-12: telescope name integer(kind=4) :: line(3) ! 10-12: line name character(len=12) :: ctele ! 10-12: telescope read from index
integer(kind=4) :: dobs ! 13 : observation date [class_date] integer(kind=4) :: teles(3) ! 13-15: telescope name integer(kind=4) :: dobs ! 13 : date obs. read from index
integer(kind=4) :: dred ! 14 : reduction date [class_date] integer(kind=4) :: dobs ! 16 : observation date [class_date] integer(kind=4) :: dred ! 14 : date red. read from index
real(kind=4) :: off1 ! 15 : lambda offset [radian] integer(kind=4) :: dred ! 17 : reduction date [class_date] real(kind=4) :: off1 ! 15 : read offset 1
real(kind=4) :: off2 ! 16 : beta offset [radian] real(kind=4) :: off1 ! 18 : lambda offset [radian] real(kind=4) :: off2 ! 16 : read offset 2
integer(kind=4) :: typec ! 17 : coordinates types real(kind=4) :: off2 ! 19 : beta offset [radian] integer(kind=4) :: type ! 17 : type of read offsets
integer(kind=4) :: kind ! 18 : data kind integer(kind=4) :: typec ! 20 : coordinates types integer(kind=4) :: kind ! 18 : type of observation
integer(kind=4) :: qual ! 19 : data quality integer(kind=4) :: kind ! 21 : data kind integer(kind=4) :: qual ! 19 : Quality read from index
integer(kind=4) :: scan ! 20 : scan number integer(kind=4) :: qual ! 22 : data quality integer(kind=4) :: scan ! 20 : Scan number read from index
integer(kind=4) :: proc ! 21 : procedure type integer(kind=4) :: scan ! 23 : scan number real(kind=4) :: posa ! 21 : Position angle
integer(kind=4) :: itype ! 22 : observation type integer(kind=4) :: proc ! 24 : procedure type integer(kind=4) :: subscan ! 22 : Subscan number
real(kind=4) :: houra ! 23 : hour angle [radian] integer(kind=4) :: itype ! 25 : observation type integer(kind=4) :: pad(10) ! 23-32: Pad to 32 words
integer(kind=4) :: project ! 24 : project name real(kind=4) :: houra ! 26 : hour angle [radian]
integer(kind=4) :: pad1 ! 25 : unused word integer(kind=4) :: project(2) ! 27 : project name
integer(kind=4) :: bpc ! 26 : baseline bandpass cal status integer(kind=4) :: bpc ! 29 : baseline bandpass cal status
integer(kind=4) :: ic ! 27 : instrumental cal status integer(kind=4) :: ic ! 30 : instrumental cal status
integer(kind=4) :: recei ! 28 : receiver number integer(kind=4) :: recei ! 31 : receiver number
real(kind=4) :: ut ! 29 : UT [s] real(kind=4) :: ut ! 32 : UT [s]
integer(kind=4) :: pad2(3) ! 30-32: padding to 32 4-bytes word
equivalently
integer(kind=obsnum_length) :: num ! [ ] Observation number
integer(kind=4) :: ver ! [ ] Version number
integer(kind=4) :: teles(3) ! [ ] Telescope name
integer(kind=4) :: dobs ! [MJD-60549] Date of observation
integer(kind=4) :: dred ! [MJD-60549] Date of reduction
integer(kind=4) :: typec ! [ code] Type of coordinates
integer(kind=4) :: kind ! [ code] Type of data
integer(kind=4) :: qual ! [ code] Quality of data
integer(kind=4) :: subscan ! [ ] Subscan number
integer(kind=obsnum_length) :: scan ! [ ] Scan number
"""
"""
index.f90:
call conv%read%i8(data(1), indl%bloc, 1) ! bloc
call conv%read%i4(data(3), indl%word, 1) ! word
call conv%read%i8(data(4), indl%num, 1) ! num
call conv%read%i4(data(6), indl%ver, 1) ! ver
call conv%read%cc(data(7), indl%csour, 3) ! csour
call conv%read%cc(data(10),indl%cline, 3) ! cline
call conv%read%cc(data(13),indl%ctele, 3) ! ctele
call conv%read%i4(data(16),indl%dobs, 1) ! dobs
call conv%read%i4(data(17),indl%dred, 1) ! dred
call conv%read%r4(data(18),indl%off1, 1) ! off1
call conv%read%r4(data(19),indl%off2, 1) ! off2
call conv%read%i4(data(20),indl%type, 1) ! type
call conv%read%i4(data(21),indl%kind, 1) ! kind
call conv%read%i4(data(22),indl%qual, 1) ! qual
call conv%read%r4(data(23),indl%posa, 1) ! posa
call conv%read%i8(data(24),indl%scan, 1) ! scan
call conv%read%i4(data(26),indl%subscan,1) ! subscan
if (isv3) then
call conv%read%r8(data(27),indl%ut, 1) ! ut
else
"""
def _read_indices(f, file_description):
#if file_description['version'] in (1,2):
# extension_positions = (file_description['aex']-1)*file_description['reclen']*4
# all_indices = {extension:
# [_read_index(f,
# filetype=file_description['version'],
# entry=ii,
# #position=position,
# )
# for ii in range(file_description['lex1'])]
# for extension,position in enumerate(extension_positions)
# if position > 0
# }
#elif file_description['version'] == 1:
extension_positions = ((file_description['aex'].astype('int64')-1)
*file_description['reclen']*4)
all_indices = [_read_index(f,
filetype=file_description['version'],
# 1-indexed files
entry_number=ii+1,
file_description=file_description,
)
for ii in range(file_description['xnext']-1)]
#else:
# raise ValueError("Invalid file version {0}".format(file_description['version']))
return all_indices
def _find_index(entry_number, file_description, return_position=False):
if file_description['gex'] == 10:
kex=(entry_number-1)/file_description['lex1'] + 1
else:
# exponential growth:
#kex = gi8_dicho(file_description['nex'], file_description['lexn'], entry_number) - 1
kex = len([xx for xx in file_description['lexn'] if xx<entry_number])
ken = entry_number - file_description['lexn'][kex-1]
#! Find ken (relative entry number in the extension, starts from 1)
#ken = entry_num - file%desc%lexn(kex-1)
kb = ((ken-1)*file_description['lind'])/file_description['reclen']
#kb = ((ken-1)*file%desc%lind)/file%desc%reclen ! In the extension, the
# ! relative record position (as an offset, starts from 0) where the
# ! Entry Index starts. NB: there can be a non-integer number of Entry
# ! Indexes per record
# Subtract 1: 'aex' is 1-indexed
kbl = (file_description['aex'][kex-1]+kb)-1
# kbl = file%desc%aex(kex)+kb ! The absolute record number where the Entry Index goes
k = ((ken-1)*file_description['lind']) % file_description['reclen']
#k = mod((ken-1)*file%desc%lind,file%desc%reclen)+1 ! = in the record, the
# ! first word of the Entry Index of the entry number 'entry_num'
if return_position:
return (kbl*file_description['reclen']+k)*4
else:
return kbl,k
def _read_index(f, filetype='v1', DEBUG=False, clic=False, position=None,
entry_number=None, file_description=None):
if position is not None:
f.seek(position)
if entry_number is not None:
indpos = _find_index(entry_number, file_description, return_position=True)
f.seek(indpos)
x0 = f.tell()
if filetype in ('1A ','v1', 1):
log.debug('Index filetype 1A')
index = {
"XBLOC":_read_int32(f),
"XNUM":_read_int32(f),
"XVER":_read_int32(f),
"XSOURC":_read_word(f,12),
"XLINE":_read_word(f,12),
"XTEL":_read_word(f,12),
"XDOBS":_read_int32(f),
"XDRED":_read_int32(f),
"XOFF1":_read_float32(f),# first offset (real, radians)
"XOFF2":_read_float32(f),# second offset (real, radians)
"XTYPE":_read_int32(f),# coordinate system ('EQ'', 'GA', 'HO')
"XKIND":_read_int32(f),# Kind of observation (0: spectral, 1: continuum, )
"XQUAL":_read_int32(f),# Quality (0-9)
"XSCAN":_read_int32(f),# Scan number
}
index['BLOC'] = index['XBLOC'] # v2 compatibility
index['WORD'] = 1 # v2 compatibility
index['SOURC'] = index['CSOUR'] = index['XSOURC']
index['DOBS'] = index['CDOBS'] = index['XDOBS']
index['CTELE'] = index['XTEL']
index['LINE'] = index['XLINE']
index['OFF1'] = index['XOFF1']
index['OFF2'] = index['XOFF2']
index['QUAL'] = index['XQUAL']
index['SCAN'] = index['XSCAN']
index['KIND'] = index['XKIND']
if clic: # use header set up in clic
nextchunk = {
"XPROC":_read_int32(f),# "procedure type"
"XITYPE":_read_int32(f),#
"XHOURANG":_read_float32(f),#
"XPROJNAME":_read_int32(f),#
"XPAD1":_read_int32(f),
"XBPC" :_read_int32(f),
"XIC" :_read_int32(f),
"XRECEI" :_read_int32(f),
"XUT":_read_float32(f),
"XPAD2":numpy.fromfile(f,count=3,dtype='int32') # BLANK is NOT ALLOWED!!! It is a special KW
}
else:
nextchunk = {"XPOSA":_read_float32(f),
"XSUBSCAN":_read_int32(f),
'XPAD2': numpy.fromfile(f,count=10,dtype='int32'),
}
nextchunk['SUBSCAN'] = nextchunk['XSUBSCAN']
nextchunk['POSA'] = nextchunk['XPOSA']
index.update(nextchunk)
if (f.tell() - x0 != 128):
missed_bits = (f.tell()-x0)
X = f.read(128-missed_bits)
if DEBUG: print("read_index missed %i bits: %s" % (128-missed_bits,X))
#raise IndexError("read_index did not successfully read 128 bytes at %i. Read %i bytes." % (x0,f.tell()-x0))
if any(not is_ascii(index[x]) for x in ('XSOURC','XLINE','XTEL')):
raise ValueError("Invalid index read from {0}.".format(x0))
elif filetype in ('2A ','v2', 2):
log.debug('Index filetype 2A')
index = {
"BLOC" : _read_int64(f) , #(data(1), 1) ! bloc
"WORD" : _read_int32(f) , #(data(3), 1) ! word
"NUM" : _read_int64(f) , #(data(4), 1) ! num
"VER" : _read_int32(f) , #(data(6), 1) ! ver
"CSOUR" : _read_word(f,12), #(data(7), 3) ! csour
"CLINE" : _read_word(f,12), #(data(10), 3) ! cline
"CTELE" : _read_word(f,12), #(data(13), 3) ! ctele
"DOBS" : _read_int32(f) , #(data(16), 1) ! dobs
"DRED" : _read_int32(f) , #(data(17), 1) ! dred
"OFF1" : _read_float32(f), #(data(18), 1) ! off1
"OFF2" : _read_float32(f), #(data(19), 1) ! off2
"TYPE" : _read_int32(f) , #(data(20), 1) ! type
"KIND" : _read_int32(f) , #(data(21), 1) ! kind
"QUAL" : _read_int32(f) , #(data(22), 1) ! qual
"POSA" : _read_float32(f), #(data(23), 1) ! posa
"SCAN" : _read_int64(f) , #(data(24), 1) ! scan
"SUBSCAN": _read_int32(f) , #(data(26), 1) ! subscan
}
#last24bits = f.read(24)
#log.debug("Read 24 bits: '{0}'".format(last24bits))
if any((is_all_null(index[x]) or not is_ascii(index[x]))
for x in ('CSOUR','CLINE','CTELE')):
raise ValueError("Invalid index read from {0}.".format(x0))
index['SOURC'] = index['XSOURC'] = index['CSOUR']
index['LINE'] = index['XLINE'] = index['CLINE']
index['XKIND'] = index['KIND']
try:
index['DOBS'] = index['XDOBS'] = index['CDOBS']
except KeyError:
index['CDOBS'] = index['XDOBS'] = index['DOBS']
else:
raise NotImplementedError("Filetype {0} not implemented.".format(filetype))
# from kernel/lib/gsys/date.f90: gag_julda
class_dobs = index['DOBS']
index['DOBS'] = ((class_dobs + 365*2025)/365.2425 + 1)
# SLOW
#index['DATEOBS'] = Time(index['DOBS'], format='jyear')
#index['DATEOBSS'] = index['DATEOBS'].iso
log.debug("Indexing finished at {0}".format(f.tell()))
return index
def _read_header(f, type=0, position=None):
"""
Read a header entry from a CLASS file
(helper function)
"""
if position is not None:
f.seek(position)
if type in keys_lengths:
hdrsec = [(x[0],numpy.fromfile(f,count=1,dtype=x[2])[0])
for x in keys_lengths[type]]
return dict(hdrsec)
else:
return {}
raise ValueError("Unrecognized type {0}".format(type))
def _read_first_record(f):
f.seek(0)
filetype = f.read(4)
if fileversion_dict[filetype] == 'v1':
return _read_first_record_v1(f)
else:
return _read_first_record_v2(f)
def _read_first_record_v1(f, record_length_words=128):
r"""
Position & Parameter & Fortran Kind & Purpose \\
\hline
1 & {\tt code} & Character*4 & File code \\
2 & {\tt next} & Integer*4 & Next free record \\
3 & {\tt lex} & Integer*4 & Length of first extension (number of entries) \\
4 & {\tt nex} & Integer*4 & Number of extensions \\
5 & {\tt xnext} & Integer*4 & Next available entry number \\
6:2*{\tt reclen} & {\tt ex(:)} & Integer*4 & Array of extension addresses
from classic_mod.f90:
integer(kind=4) :: code ! 1 File code
integer(kind=4) :: next ! 2 Next free record
integer(kind=4) :: lex ! 3 Extension length (number of entries)
integer(kind=4) :: nex ! 4 Number of extensions
integer(kind=4) :: xnext ! 5 Next available entry number
integer(kind=4) :: aex(mex_v1) ! 6:256 Extension addresses
from old (<dec2013) class, file.f90:
read(ilun,rec=1,err=11,iostat=ier) ibx%code,ibx%next, &
& ibx%ilex,ibx%imex,ibx%xnext
also uses filedesc_v1tov2 from classic/lib/file.f90
"""
# OLD NOTES
# hdr = header
# hdr.update(obshead) # re-overwrite things
# hdr.update({'OBSNUM':obsnum,'RECNUM':spcount})
# hdr.update({'RA':hdr['LAM']/pi*180,'DEC':hdr['BET']/pi*180})
# hdr.update({'RAoff':hdr['LAMOF']/pi*180,'DECoff':hdr['BETOF']/pi*180})
# hdr.update({'OBJECT':hdr['SOURC'].strip()})
# hdr.update({'BUNIT':'Tastar'})
# hdr.update({'EXPOSURE':hdr['TIME']})
f.seek(0)
file_description = {
'code': f.read(4),
'next': _read_int32(f),
'lex': _read_int32(f),
'nex': _read_int32(f),
'xnext': _read_int32(f),
'gex': 10.,
'vind': 1, # classic_vind_v1 packages/classic/lib/classic_mod.f90
'version': 1,
'nextrec': 3,
'nextword': 1,
'lind': 32, #classic_lind_v1 packages/classic/lib/classic_mod.f90
'kind': 'unknown',
'flags': 0,
}
file_description['reclen'] = record_length_words # should be 128w = 512 bytes
ex = np.fromfile(f, count=(record_length_words*2-5), dtype='int32')
file_description['ex'] = ex[ex!=0]
file_description['nextrec'] = file_description['next'] # this can't be...
file_description['lex1'] = file_description['lex'] # number of entries
file_description['lexn'] = (np.arange(file_description['nex']+1) *
file_description['lex1'])
file_description['nentries'] = np.sum(file_description['lexn'])
file_description['aex'] = file_description['ex'][:file_description['nex']]
#file_description['version'] = fileversion_dict[file_description['code']]
assert f.tell() == 1024
# Something is not quite right with the 'ex' parsing
#assert len(file_description['ex']) == file_description['nex']
return file_description
def _read_first_record_v2(f):
r""" packages/classic/lib/file.f90
Position & Parameter & Fortran Kind & Purpose & Unit \\
\hline
1 & {\tt code} & Character*4 & File code & - \\
2 & {\tt reclen} & Integer*4 & Record length & words \\
3 & {\tt kind} & Integer*4 & File kind & - \\
4 & {\tt vind} & Integer*4 & Index version & - \\
5 & {\tt lind} & Integer*4 & Index length & words \\
6 & {\tt flags} & Integer*4 & Bit flags. \#1: single or multiple, & - \\
& & & \#2-32: provision (0-filled) & \\
\hline
7:8 & {\tt xnext} & Integer*8 & Next available entry number & - \\
9:10 & {\tt nextrec} & Integer*8 & Next record which contains free space & record \\
11 & {\tt nextword} & Integer*4 & Next free word in this record & word \\
\hline
12 & {\tt lex1} & Integer*4 & Length of first extension index & entries \\
13 & {\tt nex} & Integer*4 & Number of extensions & - \\
14 & {\tt gex} & Integer*4 & Extension growth rule & - \\
15:{\tt reclen} & {\tt aex(:)} & Integer*8 & Array of extension addresses & record
"""
f.seek(0)
file_description = {
'code': f.read(4),
'reclen': _read_int32(f),
'kind': _read_int32(f),
'vind': _read_int32(f),
'lind': _read_int32(f),
'flags': _read_int32(f),
'xnext': _read_int64(f),
'nextrec': _read_int64(f),
'nextword': _read_int32(f),
'lex1': _read_int32(f),
'nex': _read_int32(f),
'gex': _read_int32(f),
}
file_description['lexn'] = [0]
if file_description['gex'] == 10:
for ii in range(1, file_description['nex']+1):
file_description['lexn'].append(file_description['lexn'][-1]+file_description['lex1'])
else:
#! Exponential growth. Only growth with mantissa 2.0 is supported
for ii in range(1, file_description['nex']):
# I don't know what the fortran does here!!!
# ahh, maybe 2_8 means int(2, dtype='int64')
nent = int(file_description['lex1'] * 2**(ii-1))
#nent = int(file%desc%lex1,kind=8) * 2_8**(iex-1)
file_description['lexn'].append(file_description['lexn'][-1]+nent)
#file%desc%lexn(iex) = file%desc%lexn(iex-1) + nent
file_description['nentries'] = np.sum(file_description['lexn'])
record_length_words = file_description['reclen']
aex = numpy.fromfile(f, count=(record_length_words-15)/2, dtype='int64')
file_description['aex'] = aex[aex!=0]
assert len(file_description['aex']) == file_description['nex']
file_description['version'] = 2
return file_description
def gi8_dicho(ninp,lexn,xval,ceil=True):
"""
! @ public
! Find ival such as
! X(ival-1) < xval <= X(ival) (ceiling mode)
! or
! X(ival) <= xval < X(ival+1) (floor mode)
! for input data ordered. Use a dichotomic search for that.
call gi8_dicho(nex,file%desc%lexn,entry_num,.true.,kex,error)
"""
#integer(kind=size_length), intent(in) :: np ! Number of input points
#integer(kind=8), intent(in) :: x(np) ! Input ordered Values
#integer(kind=8), intent(in) :: xval ! The value we search for
#logical, intent(in) :: ceil ! Ceiling or floor mode?
#integer(kind=size_length), intent(out) :: ival ! Position in the array
#logical, intent(inout) :: error ! Logical error flag
iinf = 1
isup = ninp
#! Ceiling mode
while isup > (iinf+1):
imid = int(np.floor((isup + iinf)/2.))
if (lexn[imid-1] < xval):
iinf = imid
else:
isup = imid
ival = isup
return ival
def _read_obshead(f, file_description, position=None):
if file_description['version'] == 1:
return _read_obshead_v1(f, position=position)
if file_description['version'] == 2:
return _read_obshead_v2(f, position=position)
else:
raise ValueError("Invalid file version {0}.".
format(file_description['version']))
def _read_obshead_v2(f, position=None):
"""
! Version 2 (public)
integer(kind=4), parameter :: entrydescv2_nw1=11 ! Number of words, in 1st part
integer(kind=4), parameter :: entrydescv2_nw2=5 ! Number of words for 1 section in 2nd part
type classic_entrydesc_t
sequence
integer(kind=4) :: code ! 1 : code observation icode
integer(kind=4) :: version ! 2 : observation version
integer(kind=4) :: nsec ! 3 : number of sections
integer(kind=4) :: pad1 ! - : memory padding (not in data)
integer(kind=8) :: nword ! 4- 5: number of words
integer(kind=8) :: adata ! 6- 7: data address
integer(kind=8) :: ldata ! 8- 9: data length
integer(kind=8) :: xnum ! 10-11: entry number
! Out of the 'sequence' block:
integer(kind=4) :: msec ! Not in data: maximum number of sections the
! Observation Index can hold
integer(kind=4) :: pad2 ! Memory padding for 8 bytes alignment
integer(kind=4) :: seciden(classic_maxsec) ! Section Numbers (on disk: 1 to ed%nsec)
integer(kind=8) :: secleng(classic_maxsec) ! Section Lengths (on disk: 1 to ed%nsec)
integer(kind=8) :: secaddr(classic_maxsec) ! Section Addresses (on disk: 1 to ed%nsec)
end type classic_entrydesc_t
"""
if position is not None:
f.seek(position)
else:
position = f.tell()
IDcode = f.read(4)
if IDcode.strip() != '2':
raise IndexError("Observation Header reading failure at {0}. "
"Record does not appear to be an observation header.".
format(position))
f.seek(position)
entrydescv2_nw1=11
entrydescv2_nw2=5
obshead = {
'CODE': f.read(4),
'VERSION': _read_int32(f),
'NSEC': _read_int32(f),
#'_blank': _read_int32(f),
'NWORD': _read_int64(f),
'ADATA': _read_int64(f),
'LDATA': _read_int64(f),
'XNUM': _read_int64(f),
#'MSEC': _read_int32(f),
#'_blank2': _read_int32(f),
}
section_numbers = np.fromfile(f, count=obshead['NSEC'], dtype='int32')
section_lengths = np.fromfile(f, count=obshead['NSEC'], dtype='int64')
section_addresses = np.fromfile(f, count=obshead['NSEC'], dtype='int64')
return obshead['XNUM'],obshead,dict(zip(section_numbers,section_addresses))
def _read_obshead_v1(f, position=None, verbose=False):
"""
Read the observation header of a CLASS file
(helper function for read_class; should not be used independently)
"""
if position is not None:
f.seek(position)
IDcode = f.read(4)
if IDcode.strip() != '2':
raise IndexError("Observation Header reading failure at {0}. "
"Record does not appear to be an observation header.".
format(f.tell() - 4))
(nblocks, nbyteob, data_address, nheaders, data_length, obindex, nsec,
obsnum) = numpy.fromfile(f, count=8, dtype='int32')
if verbose:
print("nblocks,nbyteob,data_address,data_length,nheaders,obindex,nsec,obsnum",nblocks,nbyteob,data_address,data_length,nheaders,obindex,nsec,obsnum)
print("DATA_LENGTH: ",data_length)
seccodes = numpy.fromfile(f,count=nsec,dtype='int32')
# Documentation says addresses then length: It is apparently wrong
seclen = numpy.fromfile(f,count=nsec,dtype='int32')
secaddr = numpy.fromfile(f,count=nsec,dtype='int32')
if verbose: print("Section codes, addresses, lengths: ",seccodes,secaddr,seclen)
hdr = {'NBLOCKS':nblocks, 'NBYTEOB':nbyteob, 'DATAADDR':data_address,
'DATALEN':data_length, 'NHEADERS':nheaders, 'OBINDEX':obindex,
'NSEC':nsec, 'OBSNUM':obsnum}
#return obsnum,seccodes
return obsnum,hdr,dict(zip(seccodes,secaddr))
# THIS IS IN READ_OBSHEAD!!!
# def _read_preheader(f):
# """
# Not entirely clear what this is, but it is stuff that precedes the actual data
#
# Looks something like this:
# array([ 1, -2, -3, -4, -14,
# 9, 17, 18, 25, 55,
# 64, 81, 99, -1179344801, 979657591,
#
# -2, -3, -4, -14 indicate the 4 header types
# 9,17,18,25 *MAY* indicate the number of bytes in each
#
#
# HOW is it indicated how many entries there are?
# """
# # 13 comes from counting 1, -2,....99 above
# numbers = np.fromfile(f, count=13, dtype='int32')
# sections = [n for n in numbers if n in header_id_numbers]
# return sections
def downsample_1d(myarr,factor,estimator=np.mean, weight=None):
"""
Downsample a 1D array by averaging over *factor* pixels.
Crops right side if the shape is not a multiple of factor.
This code is pure numpy and should be fast.
keywords:
estimator - default to mean. You can downsample by summing or
something else if you want a different estimator
(e.g., downsampling error: you want to sum & divide by sqrt(n))
weight: np.ndarray
An array of weights to use for the downsampling. If None,
assumes uniform 1
"""
if myarr.ndim != 1:
raise ValueError("Only works on 1d data. Says so in the title.")
xs = myarr.size
crarr = myarr[:xs-(xs % int(factor))]
if weight is None:
dsarr = estimator(np.concatenate([[crarr[i::factor] for i in
range(factor)]]),axis=0)
else:
dsarr = estimator(np.concatenate([[crarr[i::factor]*weight[i::factor] for i in
range(factor)]]),axis=0)
warr = estimator(np.concatenate([[weight[i::factor] for i in
range(factor)]]),axis=0)
dsarr = dsarr/warr
return dsarr
# unit test
def test_downsample1d():
data = np.arange(10)
weight = np.ones(10)
weight[5]=0
assert np.all(downsample_1d(data, 2, weight=weight, estimator=np.mean) ==
np.array([ 0.5, 2.5, 4. , 6.5, 8.5]))
def read_observation(f, obsid, file_description=None, indices=None,
my_memmap=None, memmap=True):
if isinstance(f, str):
f = open(f,'rb')
opened = True
if memmap:
my_memmap = numpy.memmap(filename, offset=0, dtype='float32',
mode='r')
else:
my_memmap = None
elif my_memmap is None and memmap:
raise ValueError("Must pass in a memmap object if passing in a file object.")
else:
opened = False
if file_description is None:
file_description = _read_first_record(f)
if indices is None:
indices = _read_indices(f, file_description)
index = indices[obsid]
obs_position = (index['BLOC']-1)*file_description['reclen']*4 + (index['WORD']-1)*4
obsnum,obshead,sections = _read_obshead(f, file_description,
position=obs_position)
header = obshead
datastart = 0
for section_id,section_address in iteritems(sections):
# Section addresses are 1-indexed byte addresses
# in the current "block"
sec_position = obs_position + (section_address-1)*4
temp_hdr = _read_header(f, type=header_id_numbers[section_id],
position=sec_position)
header.update(temp_hdr)
datastart = max(datastart,f.tell())
hdr = header
hdr.update(obshead) # re-overwrite things
hdr.update({'OBSNUM':obsnum,'RECNUM':obsid})
hdr.update({'RA':hdr['LAM']/pi*180,'DEC':hdr['BET']/pi*180})
hdr.update({'RAoff':hdr['LAMOF']/pi*180,'DECoff':hdr['BETOF']/pi*180})
hdr.update({'OBJECT':hdr['SOURC'].strip()})
hdr.update({'BUNIT':'Tastar'})
hdr.update({'EXPOSURE':float(hdr['TIME'])})
hdr['HDRSTART'] = obs_position
hdr['DATASTART'] = datastart
hdr.update(indices[obsid])
# Apparently the data are still valid in this case?
#if hdr['XNUM'] != obsid+1:
# log.error("The spectrum read was {0} but {1} was requested.".
# format(hdr['XNUM']-1, obsid))
if hdr['KIND'] == 1: # continuum
nchan = hdr['NPOIN']
elif 'NCHAN' in hdr:
nchan = hdr['NCHAN']
else:
log.error("No NCHAN in header. This is not a spectrum.")
import ipdb; ipdb.set_trace()
# There may be a 1-channel offset? CHECK!!!
# (changed by 1 pixel - October 14, 2014)
# (changed back - October 21, 2014 - I think the ends are just bad, but not
# zero.)
f.seek(datastart-1)
spec = _read_spectrum(f, position=datastart-1, nchan=nchan,
memmap=memmap, my_memmap=my_memmap)
if opened:
f.close()
return spec, hdr
def _read_spectrum(f, position, nchan, my_memmap=None, memmap=True):
if position != f.tell():
log.warn("Reading data from {0}, but the file is wound "
"to {1}.".format(position, f.tell()))
if memmap:
here = position
#spectrum = numpy.memmap(filename, offset=here, dtype='float32',
# mode='r', shape=(nchan,))
spectrum = my_memmap[here/4:here/4+nchan]
f.seek(here+nchan*4)
else:
f.seek(position)
spectrum = numpy.fromfile(f,count=nchan,dtype='float32')
return spectrum
def _spectrum_from_header(fileobj, header, memmap=None):
return _read_spectrum(fileobj, position=header['DATASTART'],
nchan=header['NCHAN'] if 'NCHAN' in hdr else hdr['NPOIN'],
my_memmap=memmap)
def clean_header(header):
newheader = {}
for k in header:
if not isinstance(header[k], (int, float, str)):
if isinstance(header[k], np.ndarray) and header[k].size > 1:
if header[k].size > 10:
raise ValueError("Large array being put in header. That's no good. key={0}".format(k))
for ii,val in enumerate(header[k]):
newheader[k[:7]+str(ii)] = val
else:
newheader[k[:8]] = str(header[k])
else:
newheader[k[:8]] = header[k]
return newheader
class ClassObject(object):
def __init__(self, filename, verbose=False):
t0 = time.time()
self._file = open(filename, 'rb')
self.file_description = _read_first_record(self._file)
self.allind = _read_indices(self._file, self.file_description)
self._data = np.memmap(self._file, dtype='float32', mode='r')
if verbose: log.info("Setting _spectra")
self._spectra = LazyItem(self)
t1 = time.time()
if verbose: log.info("Setting posang. t={0}".format(t1-t0))
self.set_posang()
t2 = time.time()
if verbose: log.info("Identifying otf scans. t={0}".format(t2-t1))
self._identify_otf_scans(verbose=verbose)
t3 = time.time()
#self._load_all_spectra()
if verbose:
log.info("Loaded CLASS object with {3} indices. Time breakdown:"
" {0}s for indices, "
"{1}s for posang, and {2}s for OTF scan identification"
.format(t1-t0, t2-t1, t3-t2, len(self.allind)))
def __repr__(self):
s = "\n".join(["{k}: {v}".format(k=k,v=v)
for k,v in iteritems(self.getinfo())])
return "ClassObject({id}) with {nspec} entries\n".format(id=id(self),
nspec=len(self.allind)) + s
def getinfo(self, allsources=False):
info = dict(
tels = self.tels,
lines = self.lines,
scans = self.scans,
sources = self.sources if allsources else self.sci_sources,
)
return info
def set_posang(self):
h0 = self.headers[0]
for h in self.headers:
dx = h['OFF1'] - h0['OFF1']
dy = h['OFF2'] - h0['OFF2']
h['COMPPOSA'] = np.arctan2(dy,dx)*180/np.pi
h0 = h
def _identify_otf_scans(self, verbose=False):
h0 = self.allind[0]
st = 0
otfscan = 0
posangs = [h['COMPPOSA'] for h in self.allind]
if verbose:
pb = ProgressBar(len(self.allind))
for ii,h in enumerate(self.allind):
if (h['SCAN'] != h0['SCAN']
or h['SOURC'] != h0['SOURC']):
h0['FIRSTSCAN'] = st
cpa = np.median(posangs[st:ii])
for hh in self.allind[st:ii]:
hh['SCANPOSA'] = cpa % 180
st = ii
if h['SCAN'] == h0['SCAN']:
h0['OTFSCAN'] = otfscan
otfscan += 1
h['OTFSCAN'] = otfscan
else:
otfscan = 0
h['OTFSCAN'] = otfscan
else:
h['OTFSCAN'] = otfscan
if verbose:
pb.update(ii)
def listscans(self, source=None, telescope=None, out=sys.stdout):
minid=0
scan = -1
sourc = ""
#tel = ''
minoff1,maxoff1 = np.inf,-np.inf
minoff2,maxoff2 = np.inf,-np.inf
ttlangle,nangle = 0.0,0
print("{entries:15s} {SOURC:12s} {XTEL:12s} {SCAN:>8s} {SUBSCAN:>8s} "
"[ {RAmin:>12s}, {RAmax:>12s} ] "
"[ {DECmin:>12s}, {DECmax:>12s} ] "
"{angle:>12s} {SCANPOSA:>12s} {OTFSCAN:>8s} {TSYS:>8s} {UTD:>12s}"
.format(entries='Scans', SOURC='Source', XTEL='Telescope',
SCAN='Scan', SUBSCAN='Subscan',
RAmin='min(RA)', RAmax='max(RA)',
DECmin='min(DEC)', DECmax='max(DEC)',
SCANPOSA='Scan PA',
angle='Angle', OTFSCAN='OTFscan',
TSYS='TSYS', UTD='UTD'),
file=out)
data_rows = []
for ii,row in enumerate(self.headers):
if (row['SCAN'] == scan
and row['SOURC'] == sourc
#and row['XTEL'] == tel
):
minoff1 = min(minoff1, row['OFF1'])
maxoff1 = max(maxoff1, row['OFF1'])
minoff2 = min(minoff2, row['OFF2'])
maxoff2 = max(maxoff2, row['OFF2'])
ttlangle += np.arctan2(row['OFF2'] - prevrow['OFF2'],
row['OFF1'] - prevrow['OFF1'])%np.pi
nangle += 1
prevrow = row
else:
if scan == -1:
scan = row['SCAN']
sourc = row['SOURC']
#tel = row['XTEL']
prevrow = row
continue
ok = True
if source is not None:
if isinstance(source, (list,tuple)):
ok = ok and any(re.search((s), prevrow['SOURC'])
for s in source)
else:
ok = ok and re.search((source), prevrow['SOURC'])
if telescope is not None:
ok = ok and re.search((telescope), prevrow['XTEL'])
if ok:
data = dict(RAmin=minoff1*180/np.pi*3600,
RAmax=maxoff1*180/np.pi*3600,
DECmin=minoff2*180/np.pi*3600,
DECmax=maxoff2*180/np.pi*3600,
angle=(ttlangle/nangle)*180/np.pi if nangle>0 else 0,
e0=minid,
e1=ii-1,
#TSYS=row['TSYS'] if 'TSYS' in row else '--',
UTD=row['DOBS']+row['UT'] if 'UT' in row else -99,
**prevrow)
print("{e0:7d}-{e1:7d} {SOURC:12s} {XTEL:12s} {SCAN:8d} {SUBSCAN:8d} "
"[ {RAmin:12f}, {RAmax:12f} ] "
"[ {DECmin:12f}, {DECmax:12f} ] "
"{angle:12.1f} {SCANPOSA:12.1f} {OTFSCAN:8d}"
" {TSYS:>8.1f} {UTD:12f}".
format(**data),
file=out)
data_rows.append(data)
minoff1,maxoff1 = np.inf,-np.inf
minoff2,maxoff2 = np.inf,-np.inf
ttlangle,nangle = 0.0,0
scan = row['SCAN']
sourc = row['SOURC']
#tel = row['XTEL']
minid = ii
return data
@property
def tels(self):
if hasattr(self,'_tels'):
return self._tels
else:
self._tels = set([h['XTEL'] for h in self.allind])
return self._tels
@property
def sources(self):
if hasattr(self,'_source'):
return self._source
else:
self._source = set([h['SOURC'] for h in self.allind])
return self._source
@property
def scans(self):
if hasattr(self,'_scan'):
return self._scan
else:
self._scan = set([h['SCAN'] for h in self.allind])
return self._scan
@property
def sci_sources(self):
return set([s for s in self.sources
if s[:4] not in ('SKY-', 'TSYS', 'TCAL', 'TREC', 'HOT-',
'COLD')])
@property
def lines(self):
if hasattr(self,'_lines'):
return self._lines
else:
self._lines = set([h['LINE'] for h in self.allind])
return self._lines
def _load_all_spectra(self, indices=None):
if indices is None:
indices = range(self.file_description['xnext']-1)
if hasattr(self, '_loaded_indices'):
indices_set = set(indices)
indices_to_load = (indices_set.difference(self._loaded_indices))
self._loaded_indices = self._loaded_indices.union(indices_set)
if any(indices_to_load):
pb = ProgressBar(len(indices_to_load))
for ii,k in enumerate(xrange(indices_to_load)):
self._spectra[k]
pb.update(ii)
else:
self._loaded_indices = set(indices)
self._spectra.load_all()
@property
def spectra(self):
return [x[0] for x in self._spectra]
@property
def headers(self):
return [self._spectra[ii][1]
if ii in self._spectra else x
for ii,x in enumerate(self.allind)]
def select_spectra(self,
all=None,
line=None,
linere=None,
linereflags=re.IGNORECASE,
number=None,
scan=None,
offset=None,
source=None,
sourcere=None,
sourcereflags=re.IGNORECASE,
range=None,
quality=None,
telescope=None,
telescopere=None,
telescopereflags=re.IGNORECASE,
subscan=None,
entry=None,
posang=None,
#observed=None,
#reduced=None,
frequency=None,
section=None,
user=None,
include_old_versions=False,
):
"""
Parameters
----------
include_old_versions: bool
Include spectra with XVER numbers <0? These are CLASS spectra that
have been "overwritten" (re-reduced?)
"""
if entry is not None and len(entry)==2:
return irange(entry[0], entry[1])
if frequency is not None:
self._load_all_spectra()
sel = [(re.search(re.escape(line), h['LINE'], re.IGNORECASE)
if line is not None else True) and
(re.search(linere, h['LINE'], linereflags)
if linere is not None else True) and
(h['SCAN'] == scan if scan is not None else True) and
((h['OFF1'] == offset or
h['OFF2'] == offset) if offset is not None else True) and
(re.search(re.escape(source), h['CSOUR'], re.IGNORECASE)
if source is not None else True) and
(re.search(sourcere, h['CSOUR'], sourcereflags)
if sourcere is not None else True) and
(h['OFF1']>range[0] and h['OFF1'] < range[1] and
h['OFF2']>range[2] and h['OFF2'] < range[3]
if range is not None and len(range)==4 else True) and
(h['QUAL'] == quality if quality is not None else True) and
(re.search(re.escape(telescope), h['CTELE'], re.IGNORECASE)
if telescope is not None else True) and
(re.search(telescopere, h['CTELE'], telescopereflags)
if telescopere is not None else True) and
(h['SUBSCAN']==subscan if subscan is not None else True) and
(h['NUM'] >= number[0] and h['NUM'] < number[1]
if number is not None else True) and
('RESTF' in h and # Need to check that it IS a spectrum: continuum data can't be accessed this way
h['RESTF'] > frequency[0] and
h['RESTF'] < frequency[1]
if frequency is not None and len(frequency)==2
else True) and
(h['COMPPOSA']%180 > posang[0] and
h['COMPPOSA']%180 < posang[1]
if posang is not None and len(posang)==2
else True) and
(h['XVER'] > 0 if not include_old_versions else True)
for h in self.headers
]
return [ii for ii,k in enumerate(sel) if k]
def get_spectra(self, progressbar=True, **kwargs):
selected_indices = self.select_spectra(**kwargs)
if not any(selected_indices):
raise ValueError("Selection yielded empty.")
self._spectra.load(selected_indices, progressbar=progressbar)
return [self._spectra[ii] for ii in selected_indices]
def get_pyspeckit_spectra(self, progressbar=True, **kwargs):
spdata = self.get_spectra(progressbar=progressbar, **kwargs)
spectra = [pyspeckit.Spectrum(data=data,
xarr=make_axis(header),
header=clean_header(header))
for data,header in spdata]
return spectra
def read_observations(self, observation_indices, progressbar=True):
self._spectra.load(observation_indices, progressbar=progressbar)
return [self._spectra[ii] for ii in observation_indices]
@print_timing
def read_class(filename, downsample_factor=None, sourcename=None,
telescope=None, posang=None, verbose=False,
flag_array=None):
"""
Read a binary class file.
Based on the
`GILDAS CLASS file type Specification
<http://iram.fr/IRAMFR/GILDAS/doc/html/class-html/node58.html>`_
Parameters
----------
filename: str
downsample_factor: None or int
Factor by which to downsample data by averaging. Useful for
overresolved data.
sourcename: str or list of str
Source names to match to the data (uses regex)
telescope: str or list of str
'XTEL' or 'TELE' parameters: the telescope & instrument
flag_array: np.ndarray
An array with the same shape as the data used to flag out
(remove) data when downsampling. True = flag out
"""
classobj = ClassObject(filename)
if not isinstance(sourcename, (list,tuple)):
sourcename = [sourcename]
if not isinstance(telescope, (list,tuple)):
telescope = [telescope]
spectra,headers = [],[]
if verbose:
log.info("Reading...")
selection = [ii
for source in sourcename
for tel in telescope
for ii in classobj.select_spectra(sourcere=source,
telescope=tel,
posang=posang)]
sphdr = classobj.read_observations(selection)
if len(sphdr) == 0:
return None
spec,hdr = zip(*sphdr)
spectra += spec
headers += hdr
indexes = headers
weight = ~flag_array if flag_array is not None else None
if downsample_factor is not None:
if verbose:
log.info("Downsampling...")
spectra = [downsample_1d(spec, downsample_factor,
weight=weight)
for spec in ProgressBar(spectra)]
headers = [downsample_header(h, downsample_factor)
for h in ProgressBar(headers)]
return spectra,headers,indexes
def downsample_header(hdr, downsample_factor):
for k in ('NCHAN','NPOIN','DATALEN'):
if k in hdr:
hdr[k] = hdr[k] / downsample_factor
# maybe wrong? h['RCHAN'] = (h['RCHAN']-1) / downsample_factor + 1
scalefactor = 1./downsample_factor
hdr['RCHAN'] = (hdr['RCHAN']-1)*scalefactor + 0.5 + scalefactor/2.
for kw in ['FRES','VRES']:
if kw in hdr:
hdr[kw] *= downsample_factor
return hdr
def make_axis(header,imagfreq=False):
"""
Create a :class:`pyspeckit.spectrum.units.SpectroscopicAxis` from the CLASS "header"
"""
from .. import units
rest_frequency = header.get('RESTF')
xunits = 'MHz'
nchan = header.get('NCHAN')
voff = header.get('VOFF')
foff = header.get('FOFF')
doppler = header.get('DOPPLER')
fres = header.get('FRES')
refchan = header.get('RCHAN')
imfreq = header.get('IMAGE')
if foff in (None, 0.0) and voff not in (None, 0.0):
# Radio convention
foff = -voff/2.997924580e5 * rest_frequency
if not imagfreq:
xarr = rest_frequency + foff + (numpy.arange(1, nchan+1) - refchan) * fres
XAxis = units.SpectroscopicAxis(xarr,unit='MHz',refX=rest_frequency*u.MHz)
else:
xarr = imfreq - (numpy.arange(1, nchan+1) - refchan) * fres
XAxis = units.SpectroscopicAxis(xarr,unit='MHz',refX=imfreq*u.MHz)
return XAxis
@print_timing
def class_to_obsblocks(filename, telescope, line, datatuple=None, source=None,
imagfreq=False, DEBUG=False, **kwargs):
"""
Load an entire CLASS observing session into a list of ObsBlocks based on
matches to the 'telescope', 'line' and 'source' names
Parameters
----------
filename : string
The Gildas CLASS data file to read the spectra from.
telescope : list
List of telescope names to be matched.
line : list
List of line names to be matched.
source : list (optional)
List of source names to be matched. Defaults to None.
imagfreq : bool
Create a SpectroscopicAxis with the image frequency.
"""
if datatuple is None:
spectra,header,indexes = read_class(filename,DEBUG=DEBUG, **kwargs)
else:
spectra,header,indexes = datatuple
obslist = []
lastscannum = -1
spectrumlist = None
for sp,hdr,ind in zip(spectra,header,indexes):
hdr.update(ind)
# this is slow but necessary...
H = pyfits.Header()
for k,v in iteritems(hdr):
if hasattr(v,"__len__") and not isinstance(v,str):
if len(v) > 1:
for ii,vv in enumerate(v):
H.update(k[:7]+str(ii),vv)
else:
H.update(k,v[0])
elif pyfits.Card._comment_FSC_RE.match(str(v)) is not None:
H.update(k,v)
scannum = hdr['SCAN']
if 'XTEL' in hdr and hdr['XTEL'].strip() not in telescope:
continue
if hdr['LINE'].strip() not in line:
continue
if (source is not None) and (hdr['SOURC'].strip() not in source):
continue
hdr.update({'RESTFREQ':hdr.get('RESTF')})
H.update('RESTFREQ',hdr.get('RESTF'))
#print "Did not skip %s,%s. Scannum, last: %i,%i" % (hdr['XTEL'],hdr['LINE'],scannum,lastscannum)
if scannum != lastscannum:
lastscannum = scannum
if spectrumlist is not None:
obslist.append(pyspeckit.ObsBlock(spectrumlist))
xarr = make_axis(hdr,imagfreq=imagfreq)
spectrumlist = [(
pyspeckit.Spectrum(xarr=xarr,
header=H,
data=sp))]
else:
spectrumlist.append(
pyspeckit.Spectrum(xarr=xarr,
header=H,
data=sp))
return obslist
class LazyItem(object):
"""
Simple lazy spectrum-retriever wrapper
"""
def __init__(self, parent):
self.parent = parent
self.sphdr = {}
self.nind = len(self.parent.allind)
self.nloaded = 0
def __repr__(self):
return ("Set of {0} spectra & headers, {1} loaded"
" ({2:0.2f}%)".format(self.nind, self.nloaded,
(float(self.nloaded)/self.nind)*100))
def load_all(self, progressbar=True):
self.load(range(self.nind))
def load(self, indices, progressbar=True):
pb = ProgressBar(len(indices))
counter = 0
for k in indices:
self[k]
counter += 1
pb.update(counter)
def __getitem__(self, key):
if key in self.sphdr:
return self.sphdr[key]
elif isinstance(key, slice):
return [self[k] for k in xrange(key.start or 0,
key.end or len(self.parent.allind),
key.step or 1)]
else:
sphd = read_observation(self.parent._file, key,
file_description=self.parent.file_description,
indices=self.parent.allind,
my_memmap=self.parent._data)
# Update the header with OTFSCAN and POSANG info
sphd[1].update(self.parent.allind[key])
self.sphdr[key] = sphd
self.nloaded += 1
return sphd
def __iter__(self):
return self.next()
def __next__(self):
for k in self.spheader:
yield self.spheader[k]
def __contains__(self, key):
return key in self.sphdr
@print_timing
def class_to_spectra(filename, datatuple=None, **kwargs):
"""
Load each individual spectrum within a CLASS file into a list of Spectrum
objects
"""
if datatuple is None:
spectra,header,indexes = read_class(filename, **kwargs)
else:
spectra,header,indexes = datatuple
spectrumlist = []
for sp,hdr,ind in zip(spectra,header,indexes):
hdr.update(ind)
xarr = make_axis(hdr)
spectrumlist.append(
pyspeckit.Spectrum(xarr=xarr,
header=hdr,
data=sp))
return pyspeckit.Spectra(spectrumlist)
def tests():
"""
Tests are specific to the machine on which this code was developed.
"""
fn1 = '/Users/adam/work/bolocam/hht/class_003.smt'
#fn1 = '/Users/adam/work/bolocam/hht/class_001.smt'
#fn1 = '/Users/adam/work/bolocam/hht/test_SMT-F1M-VU-20824-073.cls'
#fn2 = '/Users/adam/work/bolocam/hht/test_SMT-F1M-VU-79472+203.cls'
#F1 = read_class(fn1)#,DEBUG=True)
#F2 = read_class(fn2)
n2hp = class_to_obsblocks(fn1,telescope=['SMT-F1M-HU','SMT-F1M-VU'],line=['N2HP(3-2)','N2H+(3-2)'])
hcop = class_to_obsblocks(fn1,telescope=['SMT-F1M-HL','SMT-F1M-VL'],line=['HCOP(3-2)','HCO+(3-2)'])
| mit | -4,625,620,523,675,145,000 | 41.584127 | 220 | 0.516267 | false |
xbuf/blender_io_xbuf | protocol.py | 1 | 4603 | # This file is part of blender_io_xbuf. blender_io_xbuf is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright David Bernard
# <pep8 compliant>
import struct
import asyncio
import atexit
import xbuf
import xbuf.datas_pb2
import xbuf.cmds_pb2
from . import xbuf_export # pylint: disable=W0406
# TODO better management off the event loop (eg on unregister)
loop = asyncio.get_event_loop()
atexit.register(loop.close)
class Kind:
pingpong = 0x01
logs = 0x02
ask_screenshot = 0x03
raw_screenshot = 0x04
msgpack = 0x05
xbuf_cmd = 0x06
class Client:
def __init__(self):
self.writer = None
self.reader = None
self.host = None
self.port = None
def __del__(self):
self.close()
def close(self):
if self.writer is not None:
print('Close the socket/writer')
self.writer.write_eof()
self.writer.close()
self.writer = None
self.reader = None
@asyncio.coroutine
def connect(self, host, port):
if (host != self.host) or (port != self.port):
self.close()
if self.writer is None:
self.host = host
self.port = port
(self.reader, self.writer) = yield from asyncio.open_connection(host, port, loop=loop)
return self
@asyncio.coroutine
def readHeader(reader):
"""return (size, kind)"""
header = yield from reader.readexactly(5)
return struct.unpack('>iB', header)
@asyncio.coroutine
def readMessage(reader):
"""return (kind, raw_message)"""
(size, kind) = yield from readHeader(reader)
# kind = header[4]
raw = yield from reader.readexactly(size)
return (kind, raw)
def writeMessage(writer, kind, body):
writer.write((len(body)).to_bytes(4, byteorder='big'))
writer.write((kind).to_bytes(1, byteorder='big'))
writer.write(body)
def askScreenshot(writer, width, height):
b = bytearray()
b.extend((width).to_bytes(4, byteorder='big'))
b.extend((height).to_bytes(4, byteorder='big'))
writeMessage(writer, Kind.ask_screenshot, b)
def setEye(writer, location, rotation, projection_matrix, near, far, is_ortho):
# sendCmd(writer, 'updateCamera', (_encode_vec3(location), _encode_quat(rotation), _encode_mat4(projection_matrix)))
cmd = xbuf.cmds_pb2.Cmd()
# cmd.setCamera = xbuf.cmds_pb2.SetCamera()
xbuf_export.cnv_translation(location, cmd.setEye.location)
xbuf_export.cnv_quatZupToYup(rotation, cmd.setEye.rotation)
xbuf_export.cnv_mat4(projection_matrix, cmd.setEye.projection)
cmd.setEye.near = near
cmd.setEye.far = far
cmd.setEye.projMode = xbuf.cmds_pb2.SetEye.orthographic if is_ortho else xbuf.cmds_pb2.SetEye.perspective
writeMessage(writer, Kind.xbuf_cmd, cmd.SerializeToString())
def setData(writer, scene, cfg):
cmd = xbuf.cmds_pb2.Cmd()
xbuf_export.export(scene, cmd.setData, cfg)
send = (len(cmd.setData.relations) > 0 or
len(cmd.setData.tobjects) > 0 or
len(cmd.setData.geometries) > 0 or
len(cmd.setData.materials) > 0 or
len(cmd.setData.lights) > 0
)
if send:
# print("send setData")
writeMessage(writer, Kind.xbuf_cmd, cmd.SerializeToString())
def changeAssetFolders(writer, cfg):
cmd = xbuf.cmds_pb2.Cmd()
cmd.changeAssetFolders.path.append(cfg.assets_path)
cmd.changeAssetFolders.register = True
cmd.changeAssetFolders.unregisterOther = True
writeMessage(writer, Kind.xbuf_cmd, cmd.SerializeToString())
def playAnimation(writer, ref, anims):
cmd = xbuf.cmds_pb2.Cmd()
cmd.playAnimation.ref = ref
cmd.playAnimation.animationsNames.extend(anims)
writeMessage(writer, Kind.xbuf_cmd, cmd.SerializeToString())
def run_until_complete(f, *args, **kwargs):
if asyncio.iscoroutine(f):
loop.run_until_complete(f)
else:
coro = asyncio.coroutine(f)
future = coro(*args, **kwargs)
loop.run_until_complete(future)
| gpl-3.0 | -6,606,697,315,505,430,000 | 30.101351 | 120 | 0.669781 | false |
ModulousSmash/Modulous | KerbalStuff/blueprints/mods.py | 1 | 19423 | from flask import Blueprint, render_template, request, g, Response, redirect, session, abort, send_file, make_response, url_for
from flask.ext.login import current_user
from sqlalchemy import desc
from KerbalStuff.objects import User, Mod, ModVersion, DownloadEvent, FollowEvent, ReferralEvent, Featured, Media, GameVersion, Category, Report
from KerbalStuff.email import send_update_notification, send_autoupdate_notification
from KerbalStuff.database import db
from KerbalStuff.common import *
from KerbalStuff.config import _cfg
from KerbalStuff.blueprints.api import default_description
from KerbalStuff.ckan import send_to_ckan
from werkzeug.utils import secure_filename
from datetime import datetime, timedelta
from shutil import rmtree, copyfile
from urllib.parse import urlparse
import os
import zipfile
import urllib
import random
mods = Blueprint('mods', __name__, template_folder='../../templates/mods')
@mods.route("/random")
def random_mod():
mods = Mod.query.filter(Mod.published == True).all()
mod = random.choice(mods)
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name))
@mods.route("/mod/<int:id>/<path:mod_name>/update")
def update(id, mod_name):
mod = Mod.query.filter(Mod.id == id).first()
if not mod:
abort(404)
editable = False
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if any([u.accepted and u.user == current_user for u in mod.shared_authors]):
editable = True
if not editable:
abort(401)
return render_template("update.html", mod=mod, game_versions=GameVersion.query.order_by(desc(GameVersion.id)).all())
@mods.route("/mod/<int:id>.rss", defaults={'mod_name': None})
@mods.route("/mod/<int:id>/<path:mod_name>.rss")
def mod_rss(id, mod_name):
mod = Mod.query.filter(Mod.id == id).first()
if not mod:
abort(404)
return render_template("rss-mod.xml", mod=mod)
@mods.route("/mod/<int:id>", defaults={'mod_name': None})
@mods.route("/mod/<int:id>/<path:mod_name>")
@with_session
def mod(id, mod_name):
mod = Mod.query.filter(Mod.id == id).first()
if not mod:
abort(404)
editable = False
if current_user:
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if not mod.published and not editable:
abort(401)
latest = mod.default_version()
referral = request.referrer
if referral:
host = urllib.parse.urlparse(referral).hostname
event = ReferralEvent.query\
.filter(ReferralEvent.mod_id == mod.id)\
.filter(ReferralEvent.host == host)\
.first()
if not event:
event = ReferralEvent()
event.mod = mod
event.events = 1
event.host = host
db.add(event)
db.flush()
db.commit()
mod.referrals.append(event)
else:
event.events += 1
download_stats = None
follower_stats = None
referrals = None
json_versions = None
thirty_days_ago = datetime.now() - timedelta(days=30)
referrals = list()
for r in ReferralEvent.query\
.filter(ReferralEvent.mod_id == mod.id)\
.order_by(desc(ReferralEvent.events)):
referrals.append( { 'host': r.host, 'count': r.events } )
download_stats = list()
for d in DownloadEvent.query\
.filter(DownloadEvent.mod_id == mod.id)\
.filter(DownloadEvent.created > thirty_days_ago)\
.order_by(DownloadEvent.created):
download_stats.append(dumb_object(d))
follower_stats = list()
for f in FollowEvent.query\
.filter(FollowEvent.mod_id == mod.id)\
.filter(FollowEvent.created > thirty_days_ago)\
.order_by(FollowEvent.created):
follower_stats.append(dumb_object(f))
json_versions = list()
for v in mod.versions:
json_versions.append({ 'name': v.friendly_version, 'id': v.id })
if request.args.get('noedit') != None:
editable = False
forumThread = False
if mod.external_link != None:
try:
u = urlparse(mod.external_link)
if u.netloc == 'forum.kerbalspaceprogram.com':
forumThread = True
except e:
print(e)
pass
total_authors = 1
pending_invite = False
owner = editable
for a in mod.shared_authors:
if a.accepted:
total_authors += 1
if current_user:
if current_user.id == a.user_id and not a.accepted:
pending_invite = True
if current_user.id == a.user_id and a.accepted:
editable = True
game_versions = GameVersion.query.order_by(desc(GameVersion.id)).all()
outdated = False
if latest:
outdated = game_versions[0].friendly_version != latest.ksp_version
return render_template("mod.html",
**{
'mod': mod,
'latest': latest,
'safe_name': secure_filename(mod.name)[:64],
'featured': any(Featured.query.filter(Featured.mod_id == mod.id).all()),
'editable': editable,
'owner': owner,
'pending_invite': pending_invite,
'download_stats': download_stats,
'follower_stats': follower_stats,
'referrals': referrals,
'json_versions': json_versions,
'thirty_days_ago': thirty_days_ago,
'share_link': urllib.parse.quote_plus(_cfg("protocol") + "://" + _cfg("domain") + "/mod/" + str(mod.id)),
'game_versions': game_versions,
'outdated': outdated,
'forum_thread': forumThread,
'new': request.args.get('new') != None,
'stupid_user': request.args.get('stupid_user') != None,
'total_authors': total_authors
})
@mods.route("/mod/<int:id>/<path:mod_name>/edit", methods=['GET', 'POST'])
@with_session
@loginrequired
def edit_mod(id, mod_name):
mod = Mod.query.filter(Mod.id == id).first()
if not mod:
abort(404)
editable = False
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if any([u.accepted and u.user == current_user for u in mod.shared_authors]):
editable = True
if not editable:
abort(401)
if request.method == 'GET':
return render_template("edit_mod.html", mod=mod, original=mod.user == current_user, categories = Category.query.all())
else:
short_description = request.form.get('short-description')
tags = request.form.get('tags')
other_authors = request.form.get('other-authors')
print(request.form.get('other-authors'))
tags_array = request.form.get('tags')
modmm = request.form.get('modmm')
if modmm == None:
modmm = False
else:
modmm = (modmm.lower() == "true" or modmm.lower() == "yes" or modmm.lower() == "on")
license = request.form.get('license')
category = request.form.get('category')
donation_link = request.form.get('donation-link')
external_link = request.form.get('external-link')
source_link = request.form.get('source-link')
description = request.form.get('description')
background = request.form.get('background')
bgOffsetY = request.form.get('bg-offset-y')
if not license or license == '':
return render_template("edit_mod.html", mod=mod, error="All mods must have a license.")
if not category or category == '':
abort(401)
else:
category = Category.query.filter(Category.name == category).first()
mod.short_description = short_description
mod.license = license
mod.donation_link = donation_link
mod.external_link = external_link
mod.source_link = source_link
mod.description = description
mod.tags = tags
mod.modmm = modmm
mod.category = category
if other_authors == 'None' or other_authors == '':
mod.other_authors = None
else:
mod.other_authors = other_authors
if background and background != '':
mod.background = background
try:
mod.bgOffsetY = int(bgOffsetY)
except:
pass
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name))
@mods.route("/create/mod")
@loginrequired
@with_session
def create_mod():
return render_template("create.html", **{ 'game_versions': GameVersion.query.order_by(desc(GameVersion.id)).all(), 'categories': Category.query.all()})
@mods.route("/mod/<int:mod_id>/stats/downloads", defaults={'mod_name': None})
@mods.route("/mod/<int:mod_id>/<path:mod_name>/stats/downloads")
def export_downloads(mod_id, mod_name):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
download_stats = DownloadEvent.query\
.filter(DownloadEvent.mod_id == mod.id)\
.order_by(DownloadEvent.created)
response = make_response(render_template("downloads.csv", stats=download_stats))
response.headers['Content-Type'] = 'text/csv'
response.headers['Content-Disposition'] = 'attachment;filename=downloads.csv'
return response
@mods.route("/mod/<int:mod_id>/stats/followers", defaults={'mod_name': None})
@mods.route("/mod/<int:mod_id>/<path:mod_name>/stats/followers")
def export_followers(mod_id, mod_name):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
follower_stats = FollowEvent.query\
.filter(FollowEvent.mod_id == mod.id)\
.order_by(FollowEvent.created)
response = make_response(render_template("followers.csv", stats=follower_stats))
response.headers['Content-Type'] = 'text/csv'
response.headers['Content-Disposition'] = 'attachment;filename=followers.csv'
return response
@mods.route("/mod/<int:mod_id>/stats/referrals", defaults={'mod_name': None})
@mods.route("/mod/<mod_id>/<path:mod_name>/stats/referrals")
def export_referrals(mod_id, mod_name):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
referral_stats = ReferralEvent.query\
.filter(ReferralEvent.mod_id == mod.id)\
.order_by(desc(ReferralEvent.events))
response = make_response(render_template("referrals.csv", stats=referral_stats))
response.headers['Content-Type'] = 'text/csv'
response.headers['Content-Disposition'] = 'attachment;filename=referrals.csv'
return response
@mods.route("/mod/<int:mod_id>/delete", methods=['POST'])
@loginrequired
@with_session
def delete(mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
editable = False
if current_user:
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if not editable:
abort(401)
db.delete(mod)
for feature in Featured.query.filter(Featured.mod_id == mod.id).all():
db.delete(feature)
for media in Media.query.filter(Media.mod_id == mod.id).all():
db.delete(media)
for version in ModVersion.query.filter(ModVersion.mod_id == mod.id).all():
db.delete(version)
base_path = os.path.join(secure_filename(mod.user.username) + '_' + str(mod.user.id), secure_filename(mod.name))
full_path = os.path.join(_cfg('storage'), base_path)
db.commit()
rmtree(full_path)
return redirect("/profile/" + current_user.username)
@mods.route("/mod/<int:mod_id>/follow", methods=['POST'])
@loginrequired
@json_output
@with_session
def follow(mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
if any(m.id == mod.id for m in current_user.following):
abort(418)
event = FollowEvent.query\
.filter(FollowEvent.mod_id == mod.id)\
.order_by(desc(FollowEvent.created))\
.first()
# Events are aggregated hourly
if not event or ((datetime.now() - event.created).seconds / 60 / 60) >= 1:
event = FollowEvent()
event.mod = mod
event.delta = 1
event.events = 1
db.add(event)
db.flush()
db.commit()
mod.follow_events.append(event)
else:
event.delta += 1
event.events += 1
mod.follower_count += 1
current_user.following.append(mod)
return { "success": True }
@mods.route("/mod/<int:mod_id>/unfollow", methods=['POST'])
@loginrequired
@json_output
@with_session
def unfollow(mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
if not any(m.id == mod.id for m in current_user.following):
abort(418)
event = FollowEvent.query\
.filter(FollowEvent.mod_id == mod.id)\
.order_by(desc(FollowEvent.created))\
.first()
# Events are aggregated hourly
if not event or ((datetime.now() - event.created).seconds / 60 / 60) >= 1:
event = FollowEvent()
event.mod = mod
event.delta = -1
event.events = 1
mod.follow_events.append(event)
db.add(event)
else:
event.delta -= 1
event.events += 1
mod.follower_count -= 1
current_user.following = [m for m in current_user.following if m.id != int(mod_id)]
return { "success": True }
@mods.route('/mod/<int:mod_id>/feature', methods=['POST'])
@adminrequired
@json_output
@with_session
def feature(mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
if any(Featured.query.filter(Featured.mod_id == mod_id).all()):
abort(409)
feature = Featured()
feature.mod = mod
db.add(feature)
return { "success": True }
@mods.route('/mod/<mod_id>/unfeature', methods=['POST'])
@adminrequired
@json_output
@with_session
def unfeature(mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
feature = Featured.query.filter(Featured.mod_id == mod_id).first()
if not feature:
abort(404)
db.delete(feature)
return { "success": True }
@mods.route('/mod/<int:mod_id>/<path:mod_name>/publish')
@with_session
@loginrequired
def publish(mod_id, mod_name):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
if current_user.id != mod.user_id:
abort(401)
if mod.description == default_description:
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name, stupid_user=True))
mod.published = True
mod.updated = datetime.now()
send_to_ckan(mod)
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name))
@mods.route('/mod/<int:mod_id>/download/<version>', defaults={ 'mod_name': None })
@mods.route('/mod/<int:mod_id>/<path:mod_name>/download/<version>')
@with_session
def download(mod_id, mod_name, version):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
if not mod.published and (not current_user or current_user.id != mod.user_id):
abort(401)
version = ModVersion.query.filter(ModVersion.mod_id == mod_id, \
ModVersion.friendly_version == version).first()
if not version:
abort(404)
download = DownloadEvent.query\
.filter(DownloadEvent.mod_id == mod.id and DownloadEvent.version_id == version.id)\
.order_by(desc(DownloadEvent.created))\
.first()
if not os.path.isfile(os.path.join(_cfg('storage'), version.download_path)):
abort(404)
if not 'Range' in request.headers:
# Events are aggregated hourly
if not download or ((datetime.now() - download.created).seconds / 60 / 60) >= 1:
download = DownloadEvent()
download.mod = mod
download.version = version
download.downloads = 1
db.add(download)
db.flush()
db.commit()
mod.downloads.append(download)
else:
download.downloads += 1
mod.download_count += 1
response = make_response(send_file(os.path.join(_cfg('storage'), version.download_path), as_attachment = True))
if _cfg("use-x-accel") == 'true':
response = make_response("")
response.headers['Content-Type'] = 'application/zip'
response.headers['Content-Disposition'] = 'attachment; filename=' + os.path.basename(version.download_path)
response.headers['X-Accel-Redirect'] = '/internal/' + version.download_path
return response
@mods.route('/mod/<int:mod_id>/version/<version_id>/delete', methods=['POST'])
@with_session
@loginrequired
def delete_version(mod_id, version_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
editable = False
if current_user:
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if any([u.accepted and u.user == current_user for u in mod.shared_authors]):
editable = True
if not editable:
abort(401)
version = [v for v in mod.versions if v.id == int(version_id)]
if len(mod.versions) == 1:
abort(400)
if len(version) == 0:
abort(404)
if version[0].id == mod.default_version_id:
abort(400)
db.delete(version[0])
mod.versions = [v for v in mod.versions if v.id != int(version_id)]
db.commit()
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name))
@mods.route('/mod/<int:mod_id>/<mod_name>/edit_version', methods=['POST'])
@mods.route('/mod/<int:mod_id>/edit_version', methods=['POST'], defaults={ 'mod_name': None })
@with_session
@loginrequired
def edit_version(mod_name, mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
editable = False
if current_user:
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if any([u.accepted and u.user == current_user for u in mod.shared_authors]):
editable = True
if not editable:
abort(401)
version_id = int(request.form.get('version-id'))
changelog = request.form.get('changelog')
version = [v for v in mod.versions if v.id == version_id]
if len(version) == 0:
abort(404)
version = version[0]
version.changelog = changelog
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name))
@mods.route('/mod/<int:mod_id>/autoupdate', methods=['POST'])
@with_session
@loginrequired
def autoupdate(mod_id):
mod = Mod.query.filter(Mod.id == mod_id).first()
if not mod:
abort(404)
editable = False
if current_user:
if current_user.admin:
editable = True
if current_user.id == mod.user_id:
editable = True
if any([u.accepted and u.user == current_user for u in mod.shared_authors]):
editable = True
if not editable:
abort(401)
default = mod.default_version()
default.ksp_version = GameVersion.query.order_by(desc(GameVersion.id)).first().friendly_version
send_autoupdate_notification(mod)
return redirect(url_for("mods.mod", id=mod.id, mod_name=mod.name))
| mit | -6,892,305,928,326,822,000 | 35.855787 | 155 | 0.615713 | false |
samstern/Greengraph | Greengraph/tests/test_maps.py | 1 | 2937 | from ..greengraph import Greengraph
from ..map import Map
import geopy
from nose.tools import assert_equal, assert_almost_equal
import numpy.testing as np_test
from mock import Mock, patch
import requests
from matplotlib import image
import yaml
import os
import numpy as np
#@patch.object(Greengraph, 'location_sequence')
#@patch.object(Map, 'count_green')
def test_map_constructor():
mock_image= open(os.path.join(os.path.dirname(__file__),'fixtures','NY_2.png'),'rb') #as mock_imgfile:
with patch.object(requests,'get',return_value=Mock(content=mock_image.read())) as mock_get:
with patch.object(image,'imread') as mock_img:
#default
Map(40.7127837, -74.0059413) # New York
#Longon Map(51.5073509,-0.1277583)
mock_get.assert_called_with(
"http://maps.googleapis.com/maps/api/staticmap?",
params={
'sensor':'false',
'zoom':10,
'size':'400x400',
'center':'40.7127837,-74.0059413',
'style':'feature:all|element:labels|visibility:off',
'maptype': 'satellite'
}
)
#changing parameters
Map(41.8781136, -87.6297982,satellite=False,zoom=15,size=(500,350),sensor=True) # New York
mock_get.assert_called_with(
"http://maps.googleapis.com/maps/api/staticmap?",
params={
'sensor':'true',
'zoom':15,
'size':'500x350',
'center':'41.8781136,-87.6297982',
'style':'feature:all|element:labels|visibility:off',
#'maptype': 'satellite'
}
)
def test_green():
mock_image= open(os.path.join(os.path.dirname(__file__),'fixtures','NY_2.png'),'rb')
fixture_green = np.load(os.path.join(os.path.dirname(__file__),'fixtures','ny_green.npy'))
threshold = 1.1
with patch('requests.get', return_value=Mock(content=mock_image.read())) as mock_get:
testMap = Map(41.8781136, -87.6297982) # New York
assert_equal(fixture_green.shape,testMap.green(threshold).shape)
assert (testMap.green(threshold) == fixture_green).all() == True
assert (testMap.green(1.5) == fixture_green).all() == False
def test_count_green():
mock_image= open(os.path.join(os.path.dirname(__file__),'fixtures','NY_2.png'),'rb')
fixture_green = np.load(os.path.join(os.path.dirname(__file__),'fixtures','ny_green.npy'))
threshold = 1.1
with patch('requests.get', return_value=Mock(content=mock_image.read())) as mock_get:
testMap = Map(41.8781136, -87.6297982) # New York
count_from_fixture=np.sum(fixture_green)
assert (testMap.count_green() == count_from_fixture)
assert (testMap.count_green(1.5) != count_from_fixture) | mit | 2,774,564,202,367,730,000 | 43.515152 | 106 | 0.58461 | false |
ratschlab/RNA-geeq | SAFT/find_optimal_param_set.py | 1 | 11493 | """
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Written (W) 2095-2010 Andre Kahles
Copyright (C) 2009-2010 by Friedrich Miescher Laboratory, Tuebingen, Germany
This script finds an optimal parameter set to maximize the performance of a
given intronfeature file.
For detailed usage information type:
python find_optimal_param_set.py
"""
import sys
import cPickle
class Feature(object):
"""Is an intron feature object"""
def __init__(self, max_mm=80, feature_string=''):
if feature_string == '':
self.alignment_support = 0
self.submission_support = 1
self.mm_ex = dict()
self.max_mm = max_mm + 1
else:
self.alignment_support = int(feature_string[0])
self.submission_support = int(feature_string[1])
self.mm_ex = dict()
self.max_mm = max_mm + 1
for _sl in feature_string[2:]:
(key, value) = _sl.split(':')
self.mm_ex[key] = int(value)
def merge_features(self, feature_string):
"""Merges information in feature_string into current feature object"""
self.alignment_support += int(feature_string[0])
self.submission_support += int(feature_string[1])
for _sl in feature_string[2:]:
(key, value) = _sl.split(':')
try:
self.mm_ex[key] += int(value)
except KeyError:
self.mm_ex[key] = int(value)
def add_mm_ex(self, ex, mm):
"""Adds mm ex information"""
self.alignment_support += 1
try:
self.mm_ex[(ex*self.max_mm) + mm] += 1
except KeyError:
self.mm_ex[(ex*self.max_mm) + mm] = 1
def get_feature_string(self):
"""Returns string with mm ex elements."""
_line = (str(self.alignment_support) + '\t' + str(self.submission_support) + '\t')
for key in self.mm_ex:
_line += (str(key) + ':' + str(self.mm_ex[key]) + '\t')
return _line[:-1]
def get_submission_support(self):
"""Returns submission support"""
return int(self.submission_support)
def is_valid(self, mm, ex, mc, options):
"""Returns true, if at least one alignment fulfills the requirements with respect to mm, ex, and mc. False otherwise."""
if self.alignment_support < mc:
return False
is_valid = False
for key in self.mm_ex.keys():
_ex = int(key) / (options.max_feat_mismatches + 1)
_mm = int(key) % (options.max_feat_mismatches + 1)
if _mm <= mm and _ex >= ex:
is_valid = True
break
return is_valid
def parse_options(argv):
"""Parses options from the command line """
from optparse import OptionParser, OptionGroup
parser = OptionParser()
required = OptionGroup(parser, 'REQUIRED')
required.add_option('-b', '--best_score', dest='best_scores', metavar='FILE', help='file to store the best scoring parameters', default='-')
required.add_option('-m', '--matrix', dest='matrix', metavar='FILE', help='file to store the full performance matrix', default='-')
required.add_option('-f', '--features', dest='features', metavar='FILE', help='alignment intron features', default='-')
required.add_option('-i', '--annotation_introns', dest='anno_int', metavar='FILE', help='annotation intron list', default='-')
optional = OptionGroup(parser, 'OPTIONAL')
optional.add_option('-E', '--exclude_introns', dest='exclude_introns', metavar='STRINGLIST', help='list of comma separated intron files to exclude from submitted features', default='-')
optional.add_option('-I', '--max_intron_len', dest='max_intron_len', metavar='INT', type='int', help='maximal intron length [10000000]', default=10000000)
optional.add_option('-s', '--ignore_strand', dest='ignore_strand', action='store_true', help='ignore strand information present in annotation', default=False)
optional.add_option('-X', '--max_feat_mismatches', dest='max_feat_mismatches', metavar='INT', type='int', help='max number of mismatches for feat generation [80] (do only change, if you are absolutely sure!)', default=80)
optional.add_option('-v', '--verbose', dest='verbose', action='store_true', help='verbosity', default=False)
parser.add_option_group(required)
parser.add_option_group(optional)
(options, args) = parser.parse_args()
if len(argv) < 2:
parser.print_help()
sys.exit(2)
return options
def get_performance_value(full_features, mm, ex, mc, annotation_list, options):
"""Builds up a filtered intron list from the given alignment features and compares to the annotation."""
alignment_list = dict()
for feat in full_features.keys():
chrm = feat[0]
intron = (0, int(feat[1]), int(feat[2]))
### filter step
if (intron[2] - intron[1]) > options.max_intron_len:
continue
if not full_features[feat].is_valid(mm, ex, mc, options):
continue
try:
alignment_list[chrm][intron] = 0
except KeyError:
alignment_list[chrm] = {intron:0}
### match intron lists
total_precision = float(0)
total_recall = float(0)
key_count = 0
for chrm in annotation_list.keys():
if alignment_list.has_key(chrm):
matches = len(set(annotation_list[chrm].keys()).intersection(set(alignment_list[chrm].keys())))
total_precision += (float(matches) / float(max(1, len(alignment_list[chrm].keys()))))
total_recall += (float(matches) / float(max(1, len(annotation_list[chrm].keys()))))
### do not include chromosomes with zero values into average
if matches > 0:
key_count += 1
total_precision /= max(1.0, float(key_count))
total_recall /= max(1.0, float(key_count))
return (total_precision, total_recall)
def main():
"""Main function extracting intron features."""
options = parse_options(sys.argv)
### get list of annotated introns
annotation_list = cPickle.load(open(options.anno_int, 'r'))
if options.ignore_strand:
for chrm in annotation_list.keys():
skiplist = set()
for intron in annotation_list[chrm].keys():
if intron[0] == 0:
continue
annotation_list[chrm][(0, intron[1], intron[2])] = annotation_list[chrm][intron]
skiplist.add(intron)
for intron in skiplist:
del annotation_list[chrm][intron]
del skiplist
### filter annotation for max intron length
print '\nFiltering intron list for max intron len'
print '-----------------------------------------'
skipped = 0
for chrm in annotation_list.keys():
skiplist = set()
for intron in annotation_list[chrm].keys():
if (intron[2] - intron[1]) > options.max_intron_len:
skiplist.add(intron)
for intron in skiplist:
del annotation_list[chrm][intron]
skipped += len(skiplist)
print '%s introns removed from annotation' % skipped
del skiplist
full_features = dict()
if options.verbose:
print 'Parsing %s' % options.features
line_counter = 0
for line in open(options.features, 'r'):
if options.verbose and line_counter % 1000 == 0:
print 'parsed %i features from %s' % (line_counter, options.features)
line_counter += 1
sl = line.strip().split('\t')
(chrm, start, stop) = sl[:3]
try:
full_features[(chrm, start, stop)].full_features(sl[3:])
except KeyError:
full_features[(chrm, start, stop)] = Feature(80, sl[3:])
### filter full feature list for excluded introns
if options.exclude_introns != '-':
_ex_introns = options.exclude_introns.strip().split(',')
### handle leading or trailing commas
if _ex_introns[0] == '':
_ex_introns = _ex_introns[1:]
if _ex_introns[-1] == '':
_ex_introns = _ex_introns[:-1]
for _infile in _ex_introns:
_ex_intron = cPickle.load(open(_infile, 'r'))
for chrm in _ex_intron.keys():
for _intron in _ex_intron[chrm].keys():
try:
del full_features[(chrm, str(_intron[1]), str(_intron[2]))]
except KeyError:
continue
del _ex_intron
if options.verbose:
print 'Parsing completed.'
print 'parsed %i features from %s' % (line_counter, options.features)
### SEARCH SPACE
### iterate over different filter dimensions
#ex_list = [2, 4, 6, 8, 10, 12, 15, 20, 25, 30] # 10
#ex_list = [2, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18 ] # 15
ex_list = [1, 2, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18 ] # 15
mm_list = [0, 1, 2, 3, 4, 5, 6] # 7
mc_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] # 10 ==> 700 combinations
checked_combs = 0
# pre rec fsc
max_pre = (0.0, 0.0, 0.0)
max_rec = (0.0, 0.0, 0.0)
max_fsc = (0.0, 0.0, 0.0)
max_pre_idx = (0, 0, 0)
max_rec_idx = (0, 0, 0)
max_fsc_idx = (0, 0, 0)
matrix_file = open(options.matrix, 'w')
for ex in ex_list:
for mm in mm_list:
for mc in mc_list:
if options.verbose and checked_combs % 10 == 0:
print 'checked %i parameter combinations' % checked_combs
print 'best scores so far:\n \tbest fScore: %0.2f, best recall: %0.2f, best precision: %0.2f' % (max_fsc[2], max_rec[1], max_pre[0])
checked_combs += 1
(pre, rec) = get_performance_value(full_features, mm, ex, mc, annotation_list, options)
if float(rec) + float(pre) > 0:
fsc = (2 * float(rec) * float(pre)) / (float(rec) + float(pre))
else:
fsc = 0.0
if pre > max_pre[0]:
max_pre = (pre, rec, fsc)
max_pre_idx = (ex, mm, mc)
if rec > max_rec[1]:
max_rec = (pre, rec, fsc)
max_rec_idx = (ex, mm, mc)
if fsc > max_fsc[2]:
max_fsc = (pre, rec, fsc)
max_fsc_idx = (ex, mm, mc)
### store information
### ex mm mc pre rec fsc
print >> matrix_file, '%s\t%s\t%s\t%s\t%s\t%s' % (ex, mm, mc, pre, rec, fsc)
matrix_file.close()
best_file = open(options.best_scores, 'w')
# best precision
print >> best_file, '%s\t%s\t%s\t%s\t%s\t%s' % (max_pre_idx[0], max_pre_idx[1], max_pre_idx[2], max_pre[0], max_pre[1], max_pre[2])
# best recall
print >> best_file, '%s\t%s\t%s\t%s\t%s\t%s' % (max_rec_idx[0], max_rec_idx[1], max_rec_idx[2], max_rec[0], max_rec[1], max_rec[2])
# best fScore
print >> best_file, '%s\t%s\t%s\t%s\t%s\t%s' % (max_fsc_idx[0], max_fsc_idx[1], max_fsc_idx[2], max_fsc[0], max_fsc[1], max_fsc[2])
best_file.close()
if __name__ == "__main__":
main()
| mit | 5,016,354,970,529,404,000 | 37.69697 | 225 | 0.560515 | false |
intel-ctrlsys/actsys | datastore/datastore/database_schema/schema_migration/versions/d43655797899_changing_table_name_from_group_to_.py | 1 | 2060 | """Changing table name from 'group' to 'device_group'
Revision ID: d43655797899
Revises: 38f3c80e9932
Create Date: 2017-08-24 15:17:10.671537
"""
import textwrap
from alembic import op
# revision identifiers, used by Alembic.
revision = 'd43655797899'
down_revision = '38f3c80e9932'
branch_labels = None
depends_on = None
def upgrade():
op.execute(textwrap.dedent("""ALTER TABLE public.group RENAME TO device_group;"""))
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.upsert_group(p_group_name character varying, p_device_list character varying)
RETURNS integer AS
$BODY$
DECLARE num_rows integer;
BEGIN
INSERT INTO public.device_group AS gro (group_name, device_list)
VALUES (p_group_name, p_device_list)
ON CONFLICT (group_name) DO UPDATE
SET
device_list = p_device_list
WHERE gro.group_name = p_group_name;
GET DIAGNOSTICS num_rows = ROW_COUNT;
RETURN num_rows;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;"""))
def downgrade():
op.execute(textwrap.dedent("""ALTER TABLE device_group RENAME TO "group";"""))
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.upsert_group(p_group_name character varying, p_device_list character varying)
RETURNS integer AS
$BODY$
DECLARE num_rows integer;
BEGIN
INSERT INTO public.group AS gro (group_name, device_list)
VALUES (p_group_name, p_device_list)
ON CONFLICT (group_name) DO UPDATE
SET
device_list = p_device_list
WHERE gro.group_name = p_group_name;
GET DIAGNOSTICS num_rows = ROW_COUNT;
RETURN num_rows;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;"""))
| apache-2.0 | -1,239,599,158,944,896,800 | 33.333333 | 123 | 0.571845 | false |
NYU-DevOps-S17/Orders | tests/test_server.py | 1 | 6572 | # Test cases can be run with either of the following:
# python -m unittest discover
# nosetests -v --rednose --nologcapture
import unittest
import logging
import json
from app import server
# Status Codes
HTTP_200_OK = 200
HTTP_201_CREATED = 201
HTTP_204_NO_CONTENT = 204
HTTP_400_BAD_REQUEST = 400
HTTP_404_NOT_FOUND = 404
HTTP_409_CONFLICT = 409
######################################################################
# T E S T C A S E S
######################################################################
class TestOrderServer(unittest.TestCase):
def setUp(self):
server.app.debug = True
server.app.logger.addHandler(logging.StreamHandler())
server.app.logger.setLevel(logging.CRITICAL)
self.app = server.app.test_client()
server.inititalize_redis()
server.data_reset()
server.data_load({"customer_name": "Tom", "amount_paid": "200"})
server.data_load({"customer_name": "Bob", "amount_paid": "300"})
def test_index(self):
resp = self.app.get('/')
self.assertEqual( resp.status_code, HTTP_200_OK )
self.assertTrue ('Order Demo REST API Service' in resp.data)
def test_get_order_list(self):
resp = self.app.get('/orders')
self.assertEqual( resp.status_code, HTTP_200_OK )
self.assertTrue( len(resp.data) > 0 )
def test_get_order(self):
resp = self.app.get('/orders/2')
#print 'resp_data: ' + resp.data
self.assertEqual( resp.status_code, HTTP_200_OK )
data = json.loads(resp.data)
self.assertEqual (data['customer_name'], 'Bob')
def test_get_order_not_found(self):
resp = self.app.get('/orders/0')
self.assertEqual( resp.status_code, HTTP_404_NOT_FOUND )
def test_create_order(self):
# save the current number of orders for later comparrison
order_count = self.get_order_count()
# add a new order
new_order = {'customer_name': 'Kate', 'amount_paid': '400'}
data = json.dumps(new_order)
resp = self.app.post('/orders', data=data, content_type='application/json')
self.assertEqual( resp.status_code, HTTP_201_CREATED )
# Make sure location header is set
location = resp.headers.get('Location', None)
self.assertTrue( location != None)
# Check the data is correct
new_json = json.loads(resp.data)
self.assertEqual (new_json['customer_name'], 'Kate')
# check that count has gone up and includes Kate
resp = self.app.get('/orders')
# print 'resp_data(2): ' + resp.data
data = json.loads(resp.data)
self.assertEqual( resp.status_code, HTTP_200_OK )
self.assertEqual( len(data), order_count + 1 )
self.assertIn( new_json, data )
def test_update_order(self):
new_order = {'customer_name': 'Bob', 'amount_paid': '500'}
data = json.dumps(new_order)
resp = self.app.put('/orders/2', data=data, content_type='application/json')
self.assertEqual( resp.status_code, HTTP_200_OK )
resp = self.app.get('/orders/2', content_type='application/json')
self.assertEqual( resp.status_code, HTTP_200_OK )
new_json = json.loads(resp.data)
self.assertEqual (new_json['amount_paid'], '500')
def test_update_order_with_no_customer_name(self):
new_order = {'amount_paid': '200'}
data = json.dumps(new_order)
resp = self.app.put('/orders/2', data=data, content_type='application/json')
self.assertEqual( resp.status_code, HTTP_400_BAD_REQUEST )
def test_update_order_not_found(self):
new_order = {"customer_name": "ossso", "amount_paid": '3000'}
data = json.dumps(new_order)
resp = self.app.put('/orders/0', data=data, content_type='application/json')
self.assertEquals( resp.status_code, HTTP_404_NOT_FOUND )
def test_delete_order(self):
# save the current number of orders for later comparrison
order_count = self.get_order_count()
# delete a order
resp = self.app.delete('/orders/2', content_type='application/json')
self.assertEqual( resp.status_code, HTTP_204_NO_CONTENT )
self.assertEqual( len(resp.data), 0 )
new_count = self.get_order_count()
self.assertEqual( new_count, order_count - 1)
def test_create_order_with_no_customer_name(self):
new_order = {'amount_paid': '200'}
data = json.dumps(new_order)
resp = self.app.post('/orders', data=data, content_type='application/json')
self.assertEqual( resp.status_code, HTTP_400_BAD_REQUEST )
def test_create_order_with_no_content_type(self):
new_order = {'amount_paid': '200'}
data = json.dumps(new_order)
resp = self.app.post('/orders', data=data)
self.assertEqual( resp.status_code, HTTP_400_BAD_REQUEST )
def test_get_nonexisting_order(self):
resp = self.app.get('/orders/5')
self.assertEqual( resp.status_code, HTTP_404_NOT_FOUND )
def test_query_order_list(self):
resp = self.app.get('/orders', query_string='customer_name=Tom')
self.assertEqual( resp.status_code, HTTP_200_OK )
self.assertTrue( len(resp.data) > 0 )
self.assertTrue( 'Tom' in resp.data)
self.assertFalse( 'Bob' in resp.data)
data = json.loads(resp.data)
query_item = data[0]
self.assertEqual(query_item['customer_name'], 'Tom')
def test_duplicate_nonexisting_order(self):
resp = self.app.put('/orders/11111/duplicate')
self.assertEqual(resp.status_code, HTTP_400_BAD_REQUEST)
def test_duplicate_existing_order(self):
order_count_old = self.get_order_count()
resp = self.app.put('/orders/1/duplicate')
self.assertEqual(resp.status_code,HTTP_201_CREATED)
order_count_new = self.get_order_count()
self.assertEqual(order_count_new,order_count_old+1)
######################################################################
# Utility functions
######################################################################
def get_order_count(self):
# save the current number of orders
resp = self.app.get('/orders')
self.assertEqual( resp.status_code, HTTP_200_OK )
data = json.loads(resp.data)
return len(data)
######################################################################
# M A I N
######################################################################
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 801,794,700,130,095,400 | 39.319018 | 84 | 0.585514 | false |
jorik041/stackprinter | app/lib/deliciousapi.py | 2 | 50450 | """
Unofficial Python API for retrieving data from Delicious.com.
This module provides the following features plus some more:
* retrieving a URL's full public bookmarking history including
* users who bookmarked the URL including tags used for such bookmarks
and the creation time of the bookmark (up to YYYY-MM-DD granularity)
* top tags (up to a maximum of 10) including tag count
* title as stored on Delicious.com
* total number of bookmarks/users for this URL at Delicious.com
* retrieving a user's full bookmark collection, including any private bookmarks
if you know the corresponding password
* retrieving a user's full public tagging vocabulary, i.e. tags and tag counts
* retrieving a user's network information (network members and network fans)
* HTTP proxy support
* updated to support Delicious.com "version 2" (mini-relaunch as of August 2008)
The official Delicious.com API and the JSON/RSS feeds do not provide all
the functionality mentioned above, and in such cases this module will query
the Delicious.com *website* directly and extract the required information
by parsing the HTML code of the resulting Web pages (a kind of poor man's
web mining). The module is able to detect IP throttling, which is employed
by Delicious.com to temporarily block abusive HTTP request behavior, and
will raise a custom Python error to indicate that. Please be a nice netizen
and do not stress the Delicious.com service more than necessary.
It is strongly advised that you read the Delicious.com Terms of Use
before using this Python module. In particular, read section 5
'Intellectual Property'.
The code is licensed to you under version 2 of the GNU General Public
License.
More information about this module can be found at
http://www.michael-noll.com/wiki/Del.icio.us_Python_API
Changelog is available at
http://code.michael-noll.com/?p=deliciousapi;a=log
Copyright 2006-2010 Michael G. Noll <http://www.michael-noll.com/>
"""
__author__ = "Michael G. Noll"
__copyright__ = "(c) 2006-2010 Michael G. Noll"
__description__ = "Unofficial Python API for retrieving data from Delicious.com"
__email__ = "coding[AT]michael-REMOVEME-noll[DOT]com"
__license__ = "GPLv2"
__maintainer__ = "Michael G. Noll"
__status__ = "Development"
__url__ = "http://www.michael-noll.com/"
__version__ = "1.6.3"
import base64
import cgi
import datetime
import hashlib
from operator import itemgetter
import re
import socket
import time
import urllib2
import xml.dom.minidom
try:
from BeautifulSoup import BeautifulSoup
except:
print "ERROR: could not import BeautifulSoup Python module"
print
print "You can download BeautifulSoup from the Python Cheese Shop at"
print "http://cheeseshop.python.org/pypi/BeautifulSoup/"
print "or directly from http://www.crummy.com/software/BeautifulSoup/"
print
raise
try:
from app.lib import simplejson
except:
print "ERROR: could not import simplejson module"
print
print "Since version 1.5.0, DeliciousAPI requires the simplejson module."
print "You can download simplejson from the Python Cheese Shop at"
print "http://pypi.python.org/pypi/simplejson"
print
raise
class DeliciousUser(object):
"""This class wraps all available information about a user into one object.
Variables:
bookmarks:
A list of (url, tags, title, comment, timestamp) tuples representing
a user's bookmark collection.
url is a 'unicode'
tags is a 'list' of 'unicode' ([] if no tags)
title is a 'unicode'
comment is a 'unicode' (u"" if no comment)
timestamp is a 'datetime.datetime'
tags (read-only property):
A list of (tag, tag_count) tuples, aggregated over all a user's
retrieved bookmarks. The tags represent a user's tagging vocabulary.
username:
The Delicious.com account name of the user.
"""
def __init__(self, username, bookmarks=None):
assert username
self.username = username
self.bookmarks = bookmarks or []
def __str__(self):
total_tag_count = 0
total_tags = set()
for url, tags, title, comment, timestamp in self.bookmarks:
if tags:
total_tag_count += len(tags)
for tag in tags:
total_tags.add(tag)
return "[%s] %d bookmarks, %d tags (%d unique)" % \
(self.username, len(self.bookmarks), total_tag_count, len(total_tags))
def __repr__(self):
return self.username
def get_tags(self):
"""Returns a dictionary mapping tags to their tag count.
For example, if the tag count of tag 'foo' is 23, then
23 bookmarks were annotated with 'foo'. A different way
to put it is that 23 users used the tag 'foo' when
bookmarking the URL.
"""
total_tags = {}
for url, tags, title, comment, timestamp in self.bookmarks:
for tag in tags:
total_tags[tag] = total_tags.get(tag, 0) + 1
return total_tags
tags = property(fget=get_tags, doc="Returns a dictionary mapping tags to their tag count")
class DeliciousURL(object):
"""This class wraps all available information about a web document into one object.
Variables:
bookmarks:
A list of (user, tags, comment, timestamp) tuples, representing a
document's bookmark history. Generally, this variable is populated
via get_url(), so the number of bookmarks available in this variable
depends on the parameters of get_url(). See get_url() for more
information.
user is a 'unicode'
tags is a 'list' of 'unicode's ([] if no tags)
comment is a 'unicode' (u"" if no comment)
timestamp is a 'datetime.datetime' (granularity: creation *day*,
i.e. the day but not the time of day)
tags (read-only property):
A list of (tag, tag_count) tuples, aggregated over all a document's
retrieved bookmarks.
top_tags:
A list of (tag, tag_count) tuples, representing a document's so-called
"top tags", i.e. the up to 10 most popular tags for this document.
url:
The URL of the document.
hash (read-only property):
The MD5 hash of the URL.
title:
The document's title.
total_bookmarks:
The number of total bookmarks (posts) of the document.
Note that the value of total_bookmarks can be greater than the
length of "bookmarks" depending on how much (detailed) bookmark
data could be retrieved from Delicious.com.
Here's some more background information:
The value of total_bookmarks is the "real" number of bookmarks of
URL "url" stored at Delicious.com as reported by Delicious.com
itself (so it's the "ground truth"). On the other hand, the length
of "bookmarks" depends on iteratively scraped bookmarking data.
Since scraping Delicous.com's Web pages has its limits in practice,
this means that DeliciousAPI could most likely not retrieve all
available bookmarks. In such a case, the value reported by
total_bookmarks is greater than the length of "bookmarks".
"""
def __init__(self, url, top_tags=None, bookmarks=None, title=u"", total_bookmarks=0):
assert url
self.url = url
self.top_tags = top_tags or []
self.bookmarks = bookmarks or []
self.title = title
self.total_bookmarks = total_bookmarks
def __str__(self):
total_tag_count = 0
total_tags = set()
for user, tags, comment, timestamp in self.bookmarks:
if tags:
total_tag_count += len(tags)
for tag in tags:
total_tags.add(tag)
return "[%s] %d total bookmarks (= users), %d tags (%d unique), %d out of 10 max 'top' tags" % \
(self.url, self.total_bookmarks, total_tag_count, \
len(total_tags), len(self.top_tags))
def __repr__(self):
return self.url
def get_tags(self):
"""Returns a dictionary mapping tags to their tag count.
For example, if the tag count of tag 'foo' is 23, then
23 bookmarks were annotated with 'foo'. A different way
to put it is that 23 users used the tag 'foo' when
bookmarking the URL.
@return: Dictionary mapping tags to their tag count.
"""
total_tags = {}
for user, tags, comment, timestamp in self.bookmarks:
for tag in tags:
total_tags[tag] = total_tags.get(tag, 0) + 1
return total_tags
tags = property(fget=get_tags, doc="Returns a dictionary mapping tags to their tag count")
def get_hash(self):
m = hashlib.md5()
m.update(self.url)
return m.hexdigest()
hash = property(fget=get_hash, doc="Returns the MD5 hash of the URL of this document")
class DeliciousAPI(object):
"""
This class provides a custom, unofficial API to the Delicious.com service.
Instead of using just the functionality provided by the official
Delicious.com API (which has limited features), this class retrieves
information from the Delicious.com website directly and extracts data from
the Web pages.
Note that Delicious.com will block clients with too many queries in a
certain time frame (similar to their API throttling). So be a nice citizen
and don't stress their website.
"""
def __init__(self,
http_proxy="",
tries=3,
wait_seconds=3,
user_agent="DeliciousAPI/%s (+http://www.michael-noll.com/wiki/Del.icio.us_Python_API)" % __version__,
timeout=30,
):
"""Set up the API module.
@param http_proxy: Optional, default: "".
Use an HTTP proxy for HTTP connections. Proxy support for
HTTPS is not available yet.
Format: "hostname:port" (e.g., "localhost:8080")
@type http_proxy: str
@param tries: Optional, default: 3.
Try the specified number of times when downloading a monitored
document fails. tries must be >= 1. See also wait_seconds.
@type tries: int
@param wait_seconds: Optional, default: 3.
Wait the specified number of seconds before re-trying to
download a monitored document. wait_seconds must be >= 0.
See also tries.
@type wait_seconds: int
@param user_agent: Optional, default: "DeliciousAPI/<version>
(+http://www.michael-noll.com/wiki/Del.icio.us_Python_API)".
The User-Agent HTTP Header to use when querying Delicous.com.
@type user_agent: str
@param timeout: Optional, default: 30.
Set network timeout. timeout must be >= 0.
@type timeout: int
"""
assert tries >= 1
assert wait_seconds >= 0
assert timeout >= 0
self.http_proxy = http_proxy
self.tries = tries
self.wait_seconds = wait_seconds
self.user_agent = user_agent
self.timeout = timeout
#socket.setdefaulttimeout(self.timeout)
def _query(self, path, host="delicious.com", user=None, password=None, use_ssl=False):
"""Queries Delicious.com for information, specified by (query) path.
@param path: The HTTP query path.
@type path: str
@param host: The host to query, default: "delicious.com".
@type host: str
@param user: The Delicious.com username if any, default: None.
@type user: str
@param password: The Delicious.com password of user, default: None.
@type password: unicode/str
@param use_ssl: Whether to use SSL encryption or not, default: False.
@type use_ssl: bool
@return: None on errors (i.e. on all HTTP status other than 200).
On success, returns the content of the HTML response.
"""
opener = None
handlers = []
# add HTTP Basic authentication if available
if user and password:
pwd_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
pwd_mgr.add_password(None, host, user, password)
basic_auth_handler = urllib2.HTTPBasicAuthHandler(pwd_mgr)
handlers.append(basic_auth_handler)
# add proxy support if requested
if self.http_proxy:
proxy_handler = urllib2.ProxyHandler({'http': 'http://%s' % self.http_proxy})
handlers.append(proxy_handler)
if handlers:
opener = urllib2.build_opener(*handlers)
else:
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', self.user_agent)]
data = None
tries = self.tries
if use_ssl:
protocol = "https"
else:
protocol = "http"
url = "%s://%s%s" % (protocol, host, path)
while tries > 0:
try:
f = opener.open(url)
data = f.read()
f.close()
break
except urllib2.HTTPError, e:
if e.code == 301:
raise DeliciousMovedPermanentlyWarning, "Delicious.com status %s - url moved permanently" % e.code
if e.code == 302:
raise DeliciousMovedTemporarilyWarning, "Delicious.com status %s - url moved temporarily" % e.code
elif e.code == 401:
raise DeliciousUnauthorizedError, "Delicious.com error %s - unauthorized (authentication failed?)" % e.code
elif e.code == 403:
raise DeliciousForbiddenError, "Delicious.com error %s - forbidden" % e.code
elif e.code == 404:
raise DeliciousNotFoundError, "Delicious.com error %s - url not found" % e.code
elif e.code == 500:
raise Delicious500Error, "Delicious.com error %s - server problem" % e.code
elif e.code == 503 or e.code == 999:
raise DeliciousThrottleError, "Delicious.com error %s - unable to process request (your IP address has been throttled/blocked)" % e.code
else:
raise DeliciousUnknownError, "Delicious.com error %s - unknown error" % e.code
break
except urllib2.URLError, e:
time.sleep(self.wait_seconds)
except socket.error, msg:
# sometimes we get a "Connection Refused" error
# wait a bit and then try again
time.sleep(self.wait_seconds)
#finally:
# f.close()
tries -= 1
return data
def get_url(self, url, max_bookmarks=50, sleep_seconds=1):
"""
Returns a DeliciousURL instance representing the Delicious.com history of url.
Generally, this method is what you want for getting title, bookmark, tag,
and user information about a URL.
Delicious only returns up to 50 bookmarks per URL. This means that
we have to do subsequent queries plus parsing if we want to retrieve
more than 50. Roughly speaking, the processing time of get_url()
increases linearly with the number of 50-bookmarks-chunks; i.e.
it will take 10 times longer to retrieve 500 bookmarks than 50.
@param url: The URL of the web document to be queried for.
@type url: str
@param max_bookmarks: Optional, default: 50.
See the documentation of get_bookmarks() for more information
as get_url() uses get_bookmarks() to retrieve a url's
bookmarking history.
@type max_bookmarks: int
@param sleep_seconds: Optional, default: 1.
See the documentation of get_bookmarks() for more information
as get_url() uses get_bookmarks() to retrieve a url's
bookmarking history. sleep_seconds must be >= 1 to comply with
Delicious.com's Terms of Use.
@type sleep_seconds: int
@return: DeliciousURL instance representing the Delicious.com history
of url.
"""
# we must wait at least 1 second between subsequent queries to
# comply with Delicious.com's Terms of Use
assert sleep_seconds >= 1
document = DeliciousURL(url)
m = hashlib.md5()
m.update(url)
hash = m.hexdigest()
path = "/v2/json/urlinfo/%s" % hash
data = self._query(path, host="feeds.delicious.com")
if data:
urlinfo = {}
try:
urlinfo = simplejson.loads(data)
if urlinfo:
urlinfo = urlinfo[0]
else:
urlinfo = {}
except TypeError:
pass
try:
document.title = urlinfo['title'] or u""
except KeyError:
pass
try:
top_tags = urlinfo['top_tags'] or {}
if top_tags:
document.top_tags = sorted(top_tags.iteritems(), key=itemgetter(1), reverse=True)
else:
document.top_tags = []
except KeyError:
pass
try:
document.total_bookmarks = int(urlinfo['total_posts'])
except (KeyError, ValueError):
pass
document.bookmarks = self.get_bookmarks(url=url, max_bookmarks=max_bookmarks, sleep_seconds=sleep_seconds)
return document
def get_network(self, username):
"""
Returns the user's list of followees and followers.
Followees are users in his Delicious "network", i.e. those users whose
bookmark streams he's subscribed to. Followers are his Delicious.com
"fans", i.e. those users who have subscribed to the given user's
bookmark stream).
Example:
A --------> --------> C
D --------> B --------> E
F --------> --------> F
followers followees
of B of B
Arrows from user A to user B denote that A has subscribed to B's
bookmark stream, i.e. A is "following" or "tracking" B.
Note that user F is both a followee and a follower of B, i.e. F tracks
B and vice versa. In Delicious.com terms, F is called a "mutual fan"
of B.
Comparing this network concept to information retrieval, one could say
that followers are incoming links and followees outgoing links of B.
@param username: Delicous.com username for which network information is
retrieved.
@type username: unicode/str
@return: Tuple of two lists ([<followees>, [<followers>]), where each list
contains tuples of (username, tracking_since_timestamp).
If a network is set as private, i.e. hidden from public view,
(None, None) is returned.
If a network is public but empty, ([], []) is returned.
"""
assert username
followees = followers = None
# followees (network members)
path = "/v2/json/networkmembers/%s" % username
data = None
try:
data = self._query(path, host="feeds.delicious.com")
except DeliciousForbiddenError:
pass
if data:
followees = []
users = []
try:
users = simplejson.loads(data)
except TypeError:
pass
uname = tracking_since = None
for user in users:
# followee's username
try:
uname = user['user']
except KeyError:
pass
# try to convert uname to Unicode
if uname:
try:
# we assume UTF-8 encoding
uname = uname.decode('utf-8')
except UnicodeDecodeError:
pass
# time when the given user started tracking this user
try:
tracking_since = datetime.datetime.strptime(user['dt'], "%Y-%m-%dT%H:%M:%SZ")
except KeyError:
pass
if uname:
followees.append( (uname, tracking_since) )
# followers (network fans)
path = "/v2/json/networkfans/%s" % username
data = None
try:
data = self._query(path, host="feeds.delicious.com")
except DeliciousForbiddenError:
pass
if data:
followers = []
users = []
try:
users = simplejson.loads(data)
except TypeError:
pass
uname = tracking_since = None
for user in users:
# fan's username
try:
uname = user['user']
except KeyError:
pass
# try to convert uname to Unicode
if uname:
try:
# we assume UTF-8 encoding
uname = uname.decode('utf-8')
except UnicodeDecodeError:
pass
# time when fan started tracking the given user
try:
tracking_since = datetime.datetime.strptime(user['dt'], "%Y-%m-%dT%H:%M:%SZ")
except KeyError:
pass
if uname:
followers.append( (uname, tracking_since) )
return ( followees, followers )
def get_bookmarks(self, url=None, username=None, max_bookmarks=50, sleep_seconds=1):
"""
Returns the bookmarks of url or user, respectively.
Delicious.com only returns up to 50 bookmarks per URL on its website.
This means that we have to do subsequent queries plus parsing if
we want to retrieve more than 50. Roughly speaking, the processing
time of get_bookmarks() increases linearly with the number of
50-bookmarks-chunks; i.e. it will take 10 times longer to retrieve
500 bookmarks than 50.
@param url: The URL of the web document to be queried for.
Cannot be used together with 'username'.
@type url: str
@param username: The Delicious.com username to be queried for.
Cannot be used together with 'url'.
@type username: str
@param max_bookmarks: Optional, default: 50.
Maximum number of bookmarks to retrieve. Set to 0 to disable
this limitation/the maximum and retrieve all available
bookmarks of the given url.
Bookmarks are sorted so that newer bookmarks are first.
Setting max_bookmarks to 50 means that get_bookmarks() will retrieve
the 50 most recent bookmarks of the given url.
In the case of getting bookmarks of a URL (url is set),
get_bookmarks() will take *considerably* longer to run
for pages with lots of bookmarks when setting max_bookmarks
to a high number or when you completely disable the limit.
Delicious returns only up to 50 bookmarks per result page,
so for example retrieving 250 bookmarks requires 5 HTTP
connections and parsing 5 HTML pages plus wait time between
queries (to comply with delicious' Terms of Use; see
also parameter 'sleep_seconds').
In the case of getting bookmarks of a user (username is set),
the same restrictions as for a URL apply with the exception
that we can retrieve up to 100 bookmarks per HTTP query
(instead of only up to 50 per HTTP query for a URL).
@type max_bookmarks: int
@param sleep_seconds: Optional, default: 1.
Wait the specified number of seconds between subsequent
queries in case that there are multiple pages of bookmarks
for the given url. sleep_seconds must be >= 1 to comply with
Delicious.com's Terms of Use.
See also parameter 'max_bookmarks'.
@type sleep_seconds: int
@return: Returns the bookmarks of url or user, respectively.
For urls, it returns a list of (user, tags, comment, timestamp)
tuples.
For users, it returns a list of (url, tags, title, comment,
timestamp) tuples.
Bookmarks are sorted "descendingly" by creation time, i.e. newer
bookmarks come first.
"""
# we must wait at least 1 second between subsequent queries to
# comply with delicious' Terms of Use
assert sleep_seconds >= 1
# url XOR username
assert bool(username) is not bool(url)
# maximum number of urls/posts Delicious.com will display
# per page on its website
max_html_count = 100
# maximum number of pages that Delicious.com will display;
# currently, the maximum number of pages is 20. Delicious.com
# allows to go beyond page 20 via pagination, but page N (for
# N > 20) will always display the same content as page 20.
max_html_pages = 20
path = None
if url:
m = hashlib.md5()
m.update(url)
hash = m.hexdigest()
# path will change later on if there are multiple pages of boomarks
# for the given url
path = "/url/%s" % hash
elif username:
# path will change later on if there are multiple pages of boomarks
# for the given username
path = "/%s?setcount=%d" % (username, max_html_count)
else:
raise Exception('You must specify either url or user.')
page_index = 1
bookmarks = []
while path and page_index <= max_html_pages:
data = self._query(path)
path = None
if data:
# extract bookmarks from current page
if url:
bookmarks.extend(self._extract_bookmarks_from_url_history(data))
else:
bookmarks.extend(self._extract_bookmarks_from_user_history(data))
# stop scraping if we already have as many bookmarks as we want
if (len(bookmarks) >= max_bookmarks) and max_bookmarks != 0:
break
else:
# check if there are multiple pages of bookmarks for this
# url on Delicious.com
soup = BeautifulSoup(data)
paginations = soup.findAll("div", id="pagination")
if paginations:
# find next path
nexts = paginations[0].findAll("a", attrs={ "class": "pn next" })
if nexts and (max_bookmarks == 0 or len(bookmarks) < max_bookmarks) and len(bookmarks) > 0:
# e.g. /url/2bb293d594a93e77d45c2caaf120e1b1?show=all&page=2
path = nexts[0]['href']
if username:
path += "&setcount=%d" % max_html_count
page_index += 1
# wait one second between queries to be compliant with
# delicious' Terms of Use
time.sleep(sleep_seconds)
if max_bookmarks > 0:
return bookmarks[:max_bookmarks]
else:
return bookmarks
def _extract_bookmarks_from_url_history(self, data):
"""
Extracts user bookmarks from a URL's history page on Delicious.com.
The Python library BeautifulSoup is used to parse the HTML page.
@param data: The HTML source of a URL history Web page on Delicious.com.
@type data: str
@return: list of user bookmarks of the corresponding URL
"""
bookmarks = []
soup = BeautifulSoup(data)
bookmark_elements = soup.findAll("div", attrs={"class": re.compile("^bookmark\s*")})
timestamp = None
for bookmark_element in bookmark_elements:
# extract bookmark creation time
#
# this timestamp has to "persist" until a new timestamp is
# found (delicious only provides the creation time data for the
# first bookmark in the list of bookmarks for a given day
dategroups = bookmark_element.findAll("div", attrs={"class": "dateGroup"})
if dategroups:
spans = dategroups[0].findAll('span')
if spans:
date_str = spans[0].contents[0].strip()
timestamp = datetime.datetime.strptime(date_str, '%d %b %y')
# extract comments
comment = u""
datas = bookmark_element.findAll("div", attrs={"class": "data"})
if datas:
divs = datas[0].findAll("div", attrs={"class": "description"})
if divs:
comment = divs[0].contents[0].strip()
# extract tags
user_tags = []
tagdisplays = bookmark_element.findAll("div", attrs={"class": "tagdisplay"})
if tagdisplays:
spans = tagdisplays[0].findAll("span", attrs={"class": "tagItem"})
for span in spans:
tag = span.contents[0]
user_tags.append(tag)
# extract user information
metas = bookmark_element.findAll("div", attrs={"class": "meta"})
if metas:
links = metas[0].findAll("a", attrs={"class": "user user-tag"})
if links:
user_a = links[0]
spans = user_a.findAll('span')
if spans:
try:
user = spans[0].contents[0]
except IndexError:
# WORKAROUND: it seems there is a bug on Delicious.com where
# sometimes a bookmark is shown in a URL history without any
# associated Delicious username (username is empty); this could
# be caused by special characters in the username or other things
#
# this problem of Delicious is very rare, so we just skip such
# entries until they find a fix
pass
bookmarks.append( (user, user_tags, comment, timestamp) )
return bookmarks
def _extract_bookmarks_from_user_history(self, data):
"""
Extracts a user's bookmarks from his user page on Delicious.com.
The Python library BeautifulSoup is used to parse the HTML page.
@param data: The HTML source of a user page on Delicious.com.
@type data: str
@return: list of bookmarks of the corresponding user
"""
bookmarks = []
soup = BeautifulSoup(data)
ul = soup.find("ul", id="bookmarklist")
if ul:
bookmark_elements = ul.findAll("div", attrs={"class": re.compile("^bookmark\s*")})
timestamp = None
for bookmark_element in bookmark_elements:
# extract bookmark creation time
#
# this timestamp has to "persist" until a new timestamp is
# found (delicious only provides the creation time data for the
# first bookmark in the list of bookmarks for a given day
dategroups = bookmark_element.findAll("div", attrs={"class": "dateGroup"})
if dategroups:
spans = dategroups[0].findAll('span')
if spans:
date_str = spans[0].contents[0].strip()
timestamp = datetime.datetime.strptime(date_str, '%d %b %y')
# extract url, title and comments
url = u""
title = u""
comment = u""
datas = bookmark_element.findAll("div", attrs={"class": "data"})
if datas:
links = datas[0].findAll("a", attrs={"class": re.compile("^taggedlink\s*")})
if links:
title = links[0].contents[0].strip()
url = links[0]['href']
divs = datas[0].findAll("div", attrs={"class": "description"})
if divs:
comment = divs[0].contents[0].strip()
# extract tags
url_tags = []
tagdisplays = bookmark_element.findAll("div", attrs={"class": "tagdisplay"})
if tagdisplays:
spans = tagdisplays[0].findAll("span", attrs={"class": "tagItem"})
for span in spans:
tag = span.contents[0]
url_tags.append(tag)
bookmarks.append( (url, url_tags, title, comment, timestamp) )
return bookmarks
def get_user(self, username, password=None, max_bookmarks=50, sleep_seconds=1):
"""Retrieves a user's bookmarks from Delicious.com.
If a correct username AND password are supplied, a user's *full*
bookmark collection (which also includes private bookmarks) is
retrieved. Data communication is encrypted using SSL in this case.
If no password is supplied, only the *public* bookmarks of the user
are retrieved. Here, the parameter 'max_bookmarks' specifies how
many public bookmarks will be retrieved (default: 50). Set the
parameter to 0 to retrieve all public bookmarks.
This function can be used to backup all of a user's bookmarks if
called with a username and password.
@param username: The Delicious.com username.
@type username: str
@param password: Optional, default: None.
The user's Delicious.com password. If password is set,
all communication with Delicious.com is SSL-encrypted.
@type password: unicode/str
@param max_bookmarks: Optional, default: 50.
See the documentation of get_bookmarks() for more
information as get_url() uses get_bookmarks() to
retrieve a url's bookmarking history.
The parameter is NOT used when a password is specified
because in this case the *full* bookmark collection of
a user will be retrieved.
@type max_bookmarks: int
@param sleep_seconds: Optional, default: 1.
See the documentation of get_bookmarks() for more information as
get_url() uses get_bookmarks() to retrieve a url's bookmarking
history. sleep_seconds must be >= 1 to comply with Delicious.com's
Terms of Use.
@type sleep_seconds: int
@return: DeliciousUser instance
"""
assert username
user = DeliciousUser(username)
bookmarks = []
if password:
# We have username AND password, so we call
# the official Delicious.com API.
path = "/v1/posts/all"
data = self._query(path, host="api.del.icio.us", use_ssl=True, user=username, password=password)
if data:
soup = BeautifulSoup(data)
elements = soup.findAll("post")
for element in elements:
url = element["href"]
title = element["description"] or u""
comment = element["extended"] or u""
tags = []
if element["tag"]:
tags = element["tag"].split()
timestamp = datetime.datetime.strptime(element["time"], "%Y-%m-%dT%H:%M:%SZ")
bookmarks.append( (url, tags, title, comment, timestamp) )
user.bookmarks = bookmarks
else:
# We have only the username, so we extract data from
# the user's JSON feed. However, the feed is restricted
# to the most recent public bookmarks of the user, which
# is about 100 if any. So if we need more than 100, we start
# scraping the Delicious.com website directly
if max_bookmarks > 0 and max_bookmarks <= 100:
path = "/v2/json/%s/stackoverflow?count=100" % username
data = self._query(path, host="feeds.delicious.com", user=username)
if data:
posts = []
try:
posts = simplejson.loads(data)
except TypeError:
pass
url = timestamp = None
title = comment = u""
tags = []
for post in posts:
# url
try:
url = post['u']
except KeyError:
pass
# title
try:
title = post['d']
except KeyError:
pass
# tags
try:
tags = post['t']
except KeyError:
pass
if not tags:
tags = [u"system:unfiled"]
# comment / notes
try:
comment = post['n']
except KeyError:
pass
# bookmark creation time
try:
timestamp = datetime.datetime.strptime(post['dt'], "%Y-%m-%dT%H:%M:%SZ")
except KeyError:
pass
bookmarks.append( (url, tags, title, comment, timestamp) )
user.bookmarks = bookmarks[:max_bookmarks]
else:
# TODO: retrieve the first 100 bookmarks via JSON before
# falling back to scraping the delicous.com website
user.bookmarks = self.get_bookmarks(username=username, max_bookmarks=max_bookmarks, sleep_seconds=sleep_seconds)
return user
def get_urls(self, tag=None, popular=True, max_urls=100, sleep_seconds=1):
"""
Returns the list of recent URLs (of web documents) tagged with a given tag.
This is very similar to parsing Delicious' RSS/JSON feeds directly,
but this function will return up to 2,000 links compared to a maximum
of 100 links when using the official feeds (with query parameter
count=100).
The return list of links will be sorted by recency in descending order,
i.e. newest items first.
Note that even when setting max_urls, get_urls() cannot guarantee that
it can retrieve *at least* this many URLs. It is really just an upper
bound.
@param tag: Retrieve links which have been tagged with the given tag.
If tag is not set (default), links will be retrieved from the
Delicious.com front page (aka "delicious hotlist").
@type tag: unicode/str
@param popular: If true (default), retrieve only popular links (i.e.
/popular/<tag>). Otherwise, the most recent links tagged with
the given tag will be retrieved (i.e. /tag/<tag>).
As of January 2009, it seems that Delicious.com modified the list
of popular tags to contain only up to a maximum of 15 URLs.
This also means that setting max_urls to values larger than 15
will not change the results of get_urls().
So if you are interested in more URLs, set the "popular" parameter
to false.
Note that if you set popular to False, the returned list of URLs
might contain duplicate items. This is due to the way Delicious.com
creates its /tag/<tag> Web pages. So if you need a certain
number of unique URLs, you have to take care of that in your
own code.
@type popular: bool
@param max_urls: Retrieve at most max_urls links. The default is 100,
which is the maximum number of links that can be retrieved by
parsing the official JSON feeds. The maximum value of max_urls
in practice is 2000 (currently). If it is set higher, Delicious
will return the same links over and over again, giving lots of
duplicate items.
@type max_urls: int
@param sleep_seconds: Optional, default: 1.
Wait the specified number of seconds between subsequent queries in
case that there are multiple pages of bookmarks for the given url.
Must be greater than or equal to 1 to comply with Delicious.com's
Terms of Use.
See also parameter 'max_urls'.
@type sleep_seconds: int
@return: The list of recent URLs (of web documents) tagged with a given tag.
"""
assert sleep_seconds >= 1
urls = []
path = None
if tag is None or (tag is not None and max_urls > 0 and max_urls <= 100):
# use official JSON feeds
max_json_count = 100
if tag:
# tag-specific JSON feed
if popular:
path = "/v2/json/popular/%s?count=%d" % (tag, max_json_count)
else:
path = "/v2/json/tag/%s?count=%d" % (tag, max_json_count)
else:
# Delicious.com hotlist
path = "/v2/json/?count=%d" % (max_json_count)
data = self._query(path, host="feeds.delicious.com")
if data:
posts = []
try:
posts = simplejson.loads(data)
except TypeError:
pass
for post in posts:
# url
try:
url = post['u']
if url:
urls.append(url)
except KeyError:
pass
else:
# maximum number of urls/posts Delicious.com will display
# per page on its website
max_html_count = 100
# maximum number of pages that Delicious.com will display;
# currently, the maximum number of pages is 20. Delicious.com
# allows to go beyond page 20 via pagination, but page N (for
# N > 20) will always display the same content as page 20.
max_html_pages = 20
if popular:
path = "/popular/%s?setcount=%d" % (tag, max_html_count)
else:
path = "/tag/%s?setcount=%d" % (tag, max_html_count)
page_index = 1
urls = []
while path and page_index <= max_html_pages:
data = self._query(path)
path = None
if data:
# extract urls from current page
soup = BeautifulSoup(data)
links = soup.findAll("a", attrs={"class": re.compile("^taggedlink\s*")})
for link in links:
try:
url = link['href']
if url:
urls.append(url)
except KeyError:
pass
# check if there are more multiple pages of urls
soup = BeautifulSoup(data)
paginations = soup.findAll("div", id="pagination")
if paginations:
# find next path
nexts = paginations[0].findAll("a", attrs={ "class": "pn next" })
if nexts and (max_urls == 0 or len(urls) < max_urls) and len(urls) > 0:
# e.g. /url/2bb293d594a93e77d45c2caaf120e1b1?show=all&page=2
path = nexts[0]['href']
path += "&setcount=%d" % max_html_count
page_index += 1
# wait between queries to Delicious.com to be
# compliant with its Terms of Use
time.sleep(sleep_seconds)
if max_urls > 0:
return urls[:max_urls]
else:
return urls
def get_tags_of_user(self, username):
"""
Retrieves user's public tags and their tag counts from Delicious.com.
The tags represent a user's full public tagging vocabulary.
DeliciousAPI uses the official JSON feed of the user. We could use
RSS here, but the JSON feed has proven to be faster in practice.
@param username: The Delicious.com username.
@type username: str
@return: Dictionary mapping tags to their tag counts.
"""
tags = {}
path = "/v2/json/tags/%s" % username
data = self._query(path, host="feeds.delicious.com")
if data:
try:
tags = simplejson.loads(data)
except TypeError:
pass
return tags
def get_number_of_users(self, url):
"""get_number_of_users() is obsolete and has been removed. Please use get_url() instead."""
reason = "get_number_of_users() is obsolete and has been removed. Please use get_url() instead."
raise Exception(reason)
def get_common_tags_of_url(self, url):
"""get_common_tags_of_url() is obsolete and has been removed. Please use get_url() instead."""
reason = "get_common_tags_of_url() is obsolete and has been removed. Please use get_url() instead."
raise Exception(reason)
def _html_escape(self, s):
"""HTML-escape a string or object.
This converts any non-string objects passed into it to strings
(actually, using unicode()). All values returned are
non-unicode strings (using "&#num;" entities for all non-ASCII
characters).
None is treated specially, and returns the empty string.
@param s: The string that needs to be escaped.
@type s: str
@return: The escaped string.
"""
if s is None:
return ''
if not isinstance(s, basestring):
if hasattr(s, '__unicode__'):
s = unicode(s)
else:
s = str(s)
s = cgi.escape(s, True)
if isinstance(s, unicode):
s = s.encode('ascii', 'xmlcharrefreplace')
return s
class DeliciousError(Exception):
"""Used to indicate that an error occurred when trying to access Delicious.com via its API."""
class DeliciousWarning(Exception):
"""Used to indicate a warning when trying to access Delicious.com via its API.
Warnings are raised when it is useful to alert the user of some condition
where that condition doesn't warrant raising an exception and terminating
the program. For example, we issue a warning when Delicious.com returns a
HTTP status code for redirections (3xx).
"""
class DeliciousThrottleError(DeliciousError):
"""Used to indicate that the client computer (i.e. its IP address) has been temporarily blocked by Delicious.com."""
pass
class DeliciousUnknownError(DeliciousError):
"""Used to indicate that Delicious.com returned an (HTTP) error which we don't know how to handle yet."""
pass
class DeliciousUnauthorizedError(DeliciousError):
"""Used to indicate that Delicious.com returned a 401 Unauthorized error.
Most of the time, the user credentials for accessing restricted functions
of the official Delicious.com API are incorrect.
"""
pass
class DeliciousForbiddenError(DeliciousError):
"""Used to indicate that Delicious.com returned a 403 Forbidden error.
"""
pass
class DeliciousNotFoundError(DeliciousError):
"""Used to indicate that Delicious.com returned a 404 Not Found error.
Most of the time, retrying some seconds later fixes the problem
(because we only query existing pages with this API).
"""
pass
class Delicious500Error(DeliciousError):
"""Used to indicate that Delicious.com returned a 500 error.
Most of the time, retrying some seconds later fixes the problem.
"""
pass
class DeliciousMovedPermanentlyWarning(DeliciousWarning):
"""Used to indicate that Delicious.com returned a 301 Found (Moved Permanently) redirection."""
pass
class DeliciousMovedTemporarilyWarning(DeliciousWarning):
"""Used to indicate that Delicious.com returned a 302 Found (Moved Temporarily) redirection."""
pass
__all__ = ['DeliciousAPI', 'DeliciousURL', 'DeliciousError', 'DeliciousThrottleError', 'DeliciousUnauthorizedError', 'DeliciousUnknownError', 'DeliciousNotFoundError' , 'Delicious500Error', 'DeliciousMovedTemporarilyWarning']
if __name__ == "__main__":
d = DeliciousAPI()
max_bookmarks = 50
url = 'http://www.michael-noll.com/wiki/Del.icio.us_Python_API'
print "Retrieving Delicious.com information about url"
print "'%s'" % url
print "Note: This might take some time..."
print "========================================================="
document = d.get_url(url, max_bookmarks=max_bookmarks)
print document
| bsd-3-clause | 3,970,781,760,770,663,400 | 39.263368 | 225 | 0.565451 | false |
hugobranquinho/ines | ines/__init__.py | 1 | 1198 | # -*- coding: utf-8 -*-
import datetime
import errno
from os import getpid, linesep, uname
from os.path import join as os_join
import sys
from tempfile import gettempdir
from time import time as _now_time
APPLICATIONS = {}
CAMELCASE_UPPER_WORDS = {'CSV'}
MARKER = object()
API_CONFIGURATION_EXTENSIONS = {}
DEFAULT_RENDERERS = {}
DEFAULT_METHODS = ['GET', 'PUT', 'POST', 'DELETE']
IGNORE_FULL_NAME_WORDS = ['de', 'da', 'e', 'do']
PROCESS_ID = getpid()
SYSTEM_NAME, DOMAIN_NAME, SYSTEM_RELEASE, SYSTEM_VERSION, MACHINE = uname()
DEFAULT_CACHE_DIRPATH = os_join(gettempdir(), 'ines-cache')
DEFAULT_RETRY_ERRNO = {errno.ESTALE}
DEFAULT_RETRY_ERRNO.add(116) # Stale NFS file handle
OPEN_BLOCK_SIZE = 2**18
# datetime now without microseconds
_now = datetime.datetime.now
NOW = lambda: _now().replace(microsecond=0)
# timestamp without microseconds
NOW_TIME = lambda: int(_now_time())
TODAY_DATE = datetime.date.today
HTML_NEW_LINE = '<br/>'
NEW_LINE = linesep
NEW_LINE_AS_BYTES = NEW_LINE.encode()
def lazy_import_module(name):
module = sys.modules.get(name, MARKER)
if module is not MARKER:
return module
else:
__import__(name)
return sys.modules[name]
| mit | 6,989,389,053,462,082,000 | 23.958333 | 75 | 0.69616 | false |
Dwolla/arbalest | examples/s3_json_object_to_redshift.py | 1 | 2379 | #!/usr/bin/env python
import psycopg2
from arbalest.configuration import env
from arbalest.redshift import S3CopyPipeline
from arbalest.redshift.schema import JsonObject, Property
"""
**Example: Bulk copy JSON objects from S3 bucket to Redshift table**
Arbalest orchestrates data loading using pipelines. Each `Pipeline`
can have one or many steps that are made up of three parts:
metadata: Path in an S3 bucket to store information needed for the copy process.
`s3://{BUCKET_NAME}/path_to_save_pipeline_metadata`
source: Path in an S3 bucket where data to be copied from is located.
`s3://{BUCKET_NAME}/path_of_source_data` consisting of JSON files:
```
{
"id": "66bc8153-d6d9-4351-bada-803330f22db7",
"someNumber": 1
}
```
schema: Definition of JSON objects to map into Redshift rows using a
`JsonObject` mapper which consists of one or many `Property` declarations.
By default the name of the JSON property is used as the column, but can be set
to a custom column name.
"""
if __name__ == '__main__':
pipeline = S3CopyPipeline(
aws_access_key_id=env('AWS_ACCESS_KEY_ID'),
aws_secret_access_key=env('AWS_SECRET_ACCESS_KEY'),
bucket=env('BUCKET_NAME'),
db_connection=psycopg2.connect(env('REDSHIFT_CONNECTION')))
pipeline.bulk_copy(metadata='path_to_save_pipeline_metadata',
source='path_of_source_data',
schema=JsonObject('destination_table_name',
Property('id', 'VARCHAR(36)'),
Property('someNumber', 'INTEGER',
'custom_column_name')))
pipeline.manifest_copy(metadata='path_to_save_pipeline_metadata',
source='path_of_incremental_source_data',
schema=JsonObject('incremental_destination_table_name',
Property('id', 'VARCHAR(36)'),
Property('someNumber', 'INTEGER',
'custom_column_name')))
pipeline.sql(('SELECT someNumber + %s '
'INTO some_olap_table FROM destination_table_name', 1),
('SELECT * INTO destination_table_name_copy '
'FROM destination_table_name'))
pipeline.run()
| mit | 2,963,051,004,563,553,000 | 38.65 | 82 | 0.599412 | false |
hlmnrmr/superdesk-core | superdesk/tests/steps.py | 1 | 91099 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import os
import time
import shutil
from base64 import b64encode
from datetime import datetime, timedelta
from os.path import basename
from re import findall
from unittest.mock import patch
from urllib.parse import urlparse
import arrow
from behave import given, when, then # @UnresolvedImport
from bson import ObjectId
from eve.io.mongo import MongoJSONEncoder
from eve.methods.common import parse
from eve.utils import ParsedRequest, config
from flask import json
from wooper.assertions import (
assert_in, assert_equal, assertions
)
from wooper.general import (
fail_and_print_body, apply_path, parse_json_response,
WooperAssertionError
)
from wooper.expect import (
expect_status, expect_status_in,
expect_json, expect_json_length,
expect_json_contains, expect_json_not_contains,
expect_headers_contain,
)
import superdesk
from superdesk import tests
from superdesk.io import registered_feeding_services
from superdesk.io.commands.update_ingest import LAST_ITEM_UPDATE
from superdesk import default_user_preferences, get_resource_service, utc, etree
from superdesk.io.feed_parsers import XMLFeedParser, EMailRFC822FeedParser
from superdesk.utc import utcnow, get_expiry_date
from superdesk.tests import get_prefixed_url, set_placeholder
from apps.dictionaries.resource import DICTIONARY_FILE
from superdesk.filemeta import get_filemeta
external_url = 'http://thumbs.dreamstime.com/z/digital-nature-10485007.jpg'
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
ANALYTICS_DATETIME_FORMAT = "%Y-%m-%d %H:00:00"
def test_json(context):
try:
response_data = json.loads(context.response.get_data())
except Exception:
fail_and_print_body(context.response, 'response is not valid json')
context_data = json.loads(apply_placeholders(context, context.text))
assert_equal(json_match(context_data, response_data), True,
msg=str(context_data) + '\n != \n' + str(response_data))
return response_data
def test_json_with_string_field_value(context, field):
try:
response_data = json.loads(context.response.get_data())
except Exception:
fail_and_print_body(context.response, 'response is not valid json')
context_data = json.loads(apply_placeholders(context, context.text))
assert_equal(json_match(context_data[field], response_data[field]), True,
msg=str(context_data) + '\n != \n' + str(response_data))
return response_data
def test_key_is_present(key, context, response):
"""Test if given key is present in response.
In case the context value is empty - "", {}, [] - it checks if it's non empty in response.
If it's set in context to false, it will check that it's falsy/empty in response too.
:param key
:param context
:param response
"""
assert not isinstance(context[key], bool) or not response[key], \
'"%s" should be empty or false, but it was "%s" in (%s)' % (key, response[key], response)
def test_key_is_not_present(key, response):
"""Test if given key is not present in response.
:param key
:param response
"""
assert key not in response, \
'"%s" should not be present, but it was "%s" in (%s)' % (key, response[key], response)
def assert_is_now(val, key):
"""Assert that given datetime value is now (with 2s tolerance).
:param val: datetime
:param key: val label - used for error reporting
"""
now = arrow.get()
val = arrow.get(val)
assert val + timedelta(seconds=2) > now, '%s should be now, it is %s' % (key, val)
def json_match(context_data, response_data):
if isinstance(context_data, dict):
if (not isinstance(response_data, dict)):
return False
for key in context_data:
if context_data[key] == "__none__":
assert response_data[key] is None
continue
if context_data[key] == "__no_value__":
test_key_is_not_present(key, response_data)
continue
if key not in response_data:
print(key, ' not in ', response_data)
return False
if context_data[key] == "__any_value__":
test_key_is_present(key, context_data, response_data)
continue
if context_data[key] == "__now__":
assert_is_now(response_data[key], key)
continue
if context_data[key] == "__empty__":
assert len(response_data[key]) == 0, '%s is not empty' % key
continue
if not json_match(context_data[key], response_data[key]):
return False
return True
elif isinstance(context_data, list):
for item_context in context_data:
found = False
for item_response in response_data:
if json_match(item_context, item_response):
found = True
break
if not found:
print(item_context, ' not in ', json.dumps(response_data, indent=2))
return False
return True
elif not isinstance(context_data, dict):
if context_data != response_data:
print('---' + str(context_data) + '---\n', ' != \n', '---' + str(response_data) + '---\n')
return context_data == response_data
def get_fixture_path(context, fixture):
path = context.app.settings['BEHAVE_TESTS_FIXTURES_PATH']
return os.path.join(path, fixture)
def get_macro_path(macro):
abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
return os.path.join(abspath, 'macros', macro)
def get_self_href(resource, context):
assert '_links' in resource, 'expted "_links", but got only %s' % (resource)
return resource['_links']['self']['href']
def get_res(url, context):
response = context.client.get(get_prefixed_url(context.app, url), headers=context.headers)
expect_status(response, 200)
return json.loads(response.get_data())
def parse_date(datestr):
return datetime.strptime(datestr, DATETIME_FORMAT)
def format_date(date_to_format):
return date_to_format.strftime(DATETIME_FORMAT)
def format_date_analytics(date_to_format):
return date_to_format.strftime(ANALYTICS_DATETIME_FORMAT)
def assert_200(response):
"""Assert we get status code 200."""
expect_status_in(response, (200, 201, 204))
def assert_404(response):
"""Assert we get status code 404."""
assert response.status_code == 404, 'Expected 404, got %d' % (response.status_code)
def assert_ok(response):
"""Assert we get ok status within api response."""
expect_status_in(response, (200, 201))
expect_json_contains(response, {'_status': 'OK'})
def get_json_data(response):
return json.loads(response.get_data())
def get_it(context):
it = context.data[0]
res = get_res('/%s/%s' % (context.resource, it['_id']), context)
return get_self_href(res, context), res.get('_etag')
def if_match(context, etag):
headers = []
if etag:
headers = [('If-Match', etag)]
headers = unique_headers(headers, context.headers)
return headers
def unique_headers(headers_to_add, old_headers):
headers = dict(old_headers)
for item in headers_to_add:
headers.update({item[0]: item[1]})
unique_headers = [(k, v) for k, v in headers.items()]
return unique_headers
def patch_current_user(context, data):
response = context.client.get(get_prefixed_url(context.app, '/users/%s' % context.user['_id']),
headers=context.headers)
user = json.loads(response.get_data())
headers = if_match(context, user.get('_etag'))
response = context.client.patch(get_prefixed_url(context.app, '/users/%s' % context.user['_id']),
data=data, headers=headers)
assert_ok(response)
return response
def apply_placeholders(context, text):
placeholders = getattr(context, 'placeholders', {})
for placeholder in findall('#([^#"]+)#', text):
if placeholder.startswith('DATE'):
value = utcnow()
unit = placeholder.find('+')
if unit != -1:
value += timedelta(days=int(placeholder[unit + 1]))
else:
unit = placeholder.find('-')
if unit != -1:
value -= timedelta(days=int(placeholder[unit + 1]))
if placeholder == 'ANALYTICS_DATE_FORMATTED':
value = format_date_analytics(value)
else:
value = format_date(value)
placeholders['LAST_DATE_VALUE'] = value
elif placeholder not in placeholders:
try:
resource_name, field_name = placeholder.split('.', maxsplit=1)
except Exception:
continue
resource = getattr(context, resource_name, None)
for name in field_name.split('.'):
if not resource:
break
resource = resource.get(name, None)
if not resource:
continue
if isinstance(resource, datetime):
value = format_date(resource)
else:
value = str(resource)
else:
value = placeholders[placeholder]
text = text.replace('#%s#' % placeholder, value)
return text
def get_resource_name(url):
parsed_url = urlparse(url)
return basename(parsed_url.path)
def format_items(items):
output = [''] # insert empty line
for item in items:
if item.get('formatted_item'):
item['formatted_item'] = json.loads(item['formatted_item'])
output.append(json.dumps(item, indent=4, sort_keys=True))
return ',\n'.join(output)
@given('empty "{resource}"')
def step_impl_given_empty(context, resource):
if not is_user_resource(resource):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
get_resource_service(resource).delete_action()
@given('"{resource}"')
def step_impl_given_(context, resource):
data = apply_placeholders(context, context.text)
with context.app.test_request_context(context.app.config['URL_PREFIX']):
if not is_user_resource(resource):
get_resource_service(resource).delete_action()
items = [parse(item, resource) for item in json.loads(data)]
if is_user_resource(resource):
for item in items:
item.setdefault('needs_activation', False)
get_resource_service(resource).post(items)
context.data = items
context.resource = resource
try:
setattr(context, resource, items[-1])
except KeyError:
pass
@given('"{resource}" with objectid')
def step_impl_given_with_objectid(context, resource):
data = apply_placeholders(context, context.text)
with context.app.test_request_context(context.app.config['URL_PREFIX']):
items = [parse(item, resource) for item in json.loads(data)]
for item in items:
if '_id' in item:
item['_id'] = ObjectId(item['_id'])
get_resource_service(resource).post(items)
context.data = items
context.resource = resource
setattr(context, resource, items[-1])
@given('the "{resource}"')
def step_impl_given_the(context, resource):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
if not is_user_resource(resource):
get_resource_service(resource).delete_action()
orig_items = {}
items = [parse(item, resource) for item in json.loads(context.text)]
get_resource_service(resource).post(items)
context.data = orig_items or items
context.resource = resource
@given('ingest from "{provider}"')
def step_impl_given_resource_with_provider(context, provider):
resource = 'ingest'
with context.app.test_request_context(context.app.config['URL_PREFIX']):
get_resource_service(resource).delete_action()
items = [parse(item, resource) for item in json.loads(context.text)]
ingest_provider = get_resource_service('ingest_providers').find_one(req=None,
_id=context.providers[provider])
for item in items:
item['ingest_provider'] = context.providers[provider]
item['source'] = ingest_provider.get('source')
get_resource_service(resource).post(items)
context.data = items
context.resource = resource
@given('config update')
def given_config_update(context):
diff = json.loads(context.text)
context.app.config.update(diff)
if 'AMAZON_CONTAINER_NAME' in diff:
from superdesk.storage import AmazonMediaStorage
context.app.media = AmazonMediaStorage(context.app)
m = patch.object(context.app.media, 'client')
m.start()
@given('config')
def step_impl_given_config(context):
tests.setup(context, json.loads(context.text))
tests.setup_auth_user(context)
@given('we have "{role_name}" role')
def step_impl_given_role(context, role_name):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
role = get_resource_service('roles').find_one(name=role_name, req=None)
data = MongoJSONEncoder().encode({'role': role.get('_id')})
response = patch_current_user(context, data)
assert_ok(response)
@given('we have "{user_type}" as type of user')
def step_impl_given_user_type(context, user_type):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
data = json.dumps({'user_type': user_type})
response = patch_current_user(context, data)
assert_ok(response)
@when('we post to auth_db')
def step_impl_when_auth(context):
data = context.text
context.response = context.client.post(
get_prefixed_url(context.app, '/auth_db'), data=data, headers=context.headers)
if context.response.status_code == 200 or context.response.status_code == 201:
item = json.loads(context.response.get_data())
if item.get('_id'):
set_placeholder(context, 'AUTH_ID', item['_id'])
context.headers.append(('Authorization', b'basic ' + b64encode(item['token'].encode('ascii') + b':')))
context.user = item['user']
@when('we sleep for {limit}s')
def when_we_sleep_for(context, limit):
time.sleep(int(limit))
@given('we create a new macro "{macro_name}"')
def step_create_new_macro(context, macro_name):
src = get_fixture_path(context, macro_name)
dst = get_macro_path(macro_name)
shutil.copyfile(src, dst)
@when('we fetch from "{provider_name}" ingest "{guid}"')
def step_impl_fetch_from_provider_ingest(context, provider_name, guid):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
fetch_from_provider(context, provider_name, guid)
def embed_routing_scheme_rules(scheme):
"""Fetch all content filters referenced by the given routing scheme and embed those into scheme.
:param dict scheme: routing scheme configuration
"""
filters_service = superdesk.get_resource_service('content_filters')
rules_filters = (
(rule, str(rule['filter']))
for rule in scheme['rules'] if rule.get('filter'))
for rule, filter_id in rules_filters:
content_filter = filters_service.find_one(_id=filter_id, req=None)
rule['filter'] = content_filter
@when('we fetch from "{provider_name}" ingest "{guid}" using routing_scheme')
def step_impl_fetch_from_provider_ingest_using_routing(context, provider_name, guid):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
_id = apply_placeholders(context, context.text)
routing_scheme = get_resource_service('routing_schemes').find_one(_id=_id, req=None)
embed_routing_scheme_rules(routing_scheme)
fetch_from_provider(context, provider_name, guid, routing_scheme)
@when('we ingest and fetch "{provider_name}" "{guid}" to desk "{desk}" stage "{stage}" using routing_scheme')
def step_impl_fetch_from_provider_ingest_using_routing_with_desk(context, provider_name, guid, desk, stage):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
_id = apply_placeholders(context, context.text)
desk_id = apply_placeholders(context, desk)
stage_id = apply_placeholders(context, stage)
routing_scheme = get_resource_service('routing_schemes').find_one(_id=_id, req=None)
embed_routing_scheme_rules(routing_scheme)
fetch_from_provider(context, provider_name, guid, routing_scheme, desk_id, stage_id)
@when('we ingest with routing scheme "{provider_name}" "{guid}"')
def step_impl_ingest_with_routing_scheme(context, provider_name, guid):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
_id = apply_placeholders(context, context.text)
routing_scheme = get_resource_service('routing_schemes').find_one(_id=_id, req=None)
embed_routing_scheme_rules(routing_scheme)
fetch_from_provider(context, provider_name, guid, routing_scheme)
def fetch_from_provider(context, provider_name, guid, routing_scheme=None, desk_id=None, stage_id=None):
ingest_provider_service = get_resource_service('ingest_providers')
provider = ingest_provider_service.find_one(name=provider_name, req=None)
provider['routing_scheme'] = routing_scheme
if 'rule_set' in provider:
rule_set = get_resource_service('rule_sets').find_one(_id=provider['rule_set'], req=None)
else:
rule_set = None
provider_service = registered_feeding_services[provider['feeding_service']]
provider_service = provider_service.__class__()
if provider.get('name', '').lower() in ('aap', 'dpa', 'ninjs', 'email'):
file_path = os.path.join(provider.get('config', {}).get('path', ''), guid)
feeding_parser = provider_service.get_feed_parser(provider)
if isinstance(feeding_parser, XMLFeedParser):
with open(file_path, 'rb') as f:
xml_string = etree.etree.fromstring(f.read())
items = [feeding_parser.parse(xml_string, provider)]
elif isinstance(feeding_parser, EMailRFC822FeedParser):
with open(file_path, 'rb') as f:
data = f.read()
items = feeding_parser.parse([(1, data)], provider)
else:
parsed = feeding_parser.parse(file_path, provider)
items = [parsed] if not isinstance(parsed, list) else parsed
else:
provider_service.provider = provider
provider_service.URL = provider.get('config', {}).get('url')
items = provider_service.fetch_ingest(guid)
for item in items:
item['versioncreated'] = utcnow()
item['expiry'] = utcnow() + timedelta(minutes=20)
if desk_id:
from bson.objectid import ObjectId
item['task'] = {'desk': ObjectId(desk_id), 'stage': ObjectId(stage_id)}
failed = context.ingest_items(items, provider, provider_service, rule_set=rule_set,
routing_scheme=provider.get('routing_scheme'))
assert len(failed) == 0, failed
provider = ingest_provider_service.find_one(name=provider_name, req=None)
ingest_provider_service.system_update(provider['_id'], {LAST_ITEM_UPDATE: utcnow()}, provider)
for item in items:
set_placeholder(context, '{}.{}'.format(provider_name, item['guid']), item['_id'])
@when('we post to "{url}"')
def step_impl_when_post_url(context, url):
post_data(context, url)
@when('we post to "{url}" with delay')
def step_impl_when_post_url_delay(context, url):
time.sleep(1)
post_data(context, url)
def set_user_default(url, data):
if is_user_resource(url):
user = json.loads(data)
user.setdefault('needs_activation', False)
data = json.dumps(user)
def get_response_etag(response):
return json.loads(response.get_data())['_etag']
@when('we save etag')
def step_when_we_save_etag(context):
context.etag = get_response_etag(context.response)
@then('we get same etag')
def step_then_we_get_same_etag(context):
assert context.etag == get_response_etag(context.response), 'etags not matching'
def store_placeholder(context, url):
if context.response.status_code in (200, 201):
item = json.loads(context.response.get_data())
if item['_status'] == 'OK' and item.get('_id'):
try:
setattr(context, get_resource_name(url), item)
except (IndexError, KeyError):
pass
def post_data(context, url, success=False):
with context.app.mail.record_messages() as outbox:
data = apply_placeholders(context, context.text)
url = apply_placeholders(context, url)
set_user_default(url, data)
context.response = context.client.post(get_prefixed_url(context.app, url),
data=data, headers=context.headers)
if success:
assert_ok(context.response)
item = json.loads(context.response.get_data())
context.outbox = outbox
store_placeholder(context, url)
return item
@when('we post to "{url}" with "{tag}" and success')
def step_impl_when_post_url_with_tag(context, url, tag):
item = post_data(context, url, True)
if item.get('_id'):
set_placeholder(context, tag, item.get('_id'))
@given('we have "{url}" with "{tag}" and success')
def step_impl_given_post_url_with_tag(context, url, tag):
item = post_data(context, url, True)
if item.get('_id'):
set_placeholder(context, tag, item.get('_id'))
@when('we post to "{url}" with success')
def step_impl_when_post_url_with_success(context, url):
post_data(context, url, True)
@when('we put to "{url}"')
def step_impl_when_put_url(context, url):
with context.app.mail.record_messages() as outbox:
data = apply_placeholders(context, context.text)
href = get_self_href(url)
context.response = context.client.put(get_prefixed_url(context.app, href), data=data, headers=context.headers)
assert_ok(context.response)
context.outbox = outbox
@when('we get "{url}"')
def when_we_get_url(context, url):
url = apply_placeholders(context, url).encode('ascii').decode('unicode-escape')
headers = []
if context.text:
for line in context.text.split('\n'):
key, val = line.split(': ')
headers.append((key, val))
headers = unique_headers(headers, context.headers)
url = apply_placeholders(context, url)
context.response = context.client.get(get_prefixed_url(context.app, url), headers=headers)
@when('we get dictionary "{dictionary_id}"')
def when_we_get_dictionary(context, dictionary_id):
dictionary_id = apply_placeholders(context, dictionary_id)
url = '/dictionaries/' + dictionary_id + '?projection={"content": 1}'
return when_we_get_url(context, url)
@then('we get latest')
def step_impl_we_get_latest(context):
data = get_json_data(context.response)
href = get_self_href(data, context)
headers = if_match(context, data.get('_etag'))
href = get_prefixed_url(context.app, href)
context.response = context.client.get(href, headers=headers)
assert_200(context.response)
@when('we find for "{resource}" the id as "{name}" by "{search_criteria}"')
def when_we_find_for_resource_the_id_as_name_by_search_criteria(context, resource, name, search_criteria):
url = '/' + resource + '?' + search_criteria
context.response = context.client.get(get_prefixed_url(context.app, url), headers=context.headers)
if context.response.status_code == 200:
expect_json_length(context.response, 1, path='_items')
item = json.loads(context.response.get_data())
item = item['_items'][0]
if item.get('_id'):
set_placeholder(context, name, item['_id'])
@when('we delete "{url}"')
def step_impl_when_delete_url(context, url):
with context.app.mail.record_messages() as outbox:
url = apply_placeholders(context, url)
res = get_res(url, context)
href = get_self_href(res, context)
headers = if_match(context, res.get('_etag'))
href = get_prefixed_url(context.app, href)
context.response = context.client.delete(href, headers=headers)
context.outbox = outbox
@when('we delete link "{url}"')
def step_impl_when_delete_link_url(context, url):
with context.app.mail.record_messages() as outbox:
url = apply_placeholders(context, url)
headers = context.headers
context.response = context.client.delete(get_prefixed_url(context.app, url), headers=headers)
context.outbox = outbox
@when('we delete all sessions "{url}"')
def step_impl_when_delete_all_url(context, url):
with context.app.mail.record_messages() as outbox:
url = apply_placeholders(context, url)
headers = context.headers
href = get_prefixed_url(context.app, url)
context.response = context.client.delete(href, headers=headers)
context.outbox = outbox
@when('we delete latest')
def when_we_delete_it(context):
with context.app.mail.record_messages() as outbox:
res = get_json_data(context.response)
href = get_self_href(res, context)
headers = if_match(context, res.get('_etag'))
href = get_prefixed_url(context.app, href)
context.response = context.client.delete(href, headers=headers)
context.email = outbox
@when('we patch "{url}"')
def step_impl_when_patch_url(context, url):
with context.app.mail.record_messages() as outbox:
url = apply_placeholders(context, url)
res = get_res(url, context)
href = get_self_href(res, context)
headers = if_match(context, res.get('_etag'))
data = apply_placeholders(context, context.text)
href = get_prefixed_url(context.app, href)
context.response = context.client.patch(href, data=data, headers=headers)
context.outbox = outbox
@when('we patch latest')
def step_impl_when_patch_again(context):
with context.app.mail.record_messages() as outbox:
data = get_json_data(context.response)
href = get_prefixed_url(context.app, get_self_href(data, context))
headers = if_match(context, data.get('_etag'))
data2 = apply_placeholders(context, context.text)
context.response = context.client.patch(href, data=data2, headers=headers)
if context.response.status_code in (200, 201):
item = json.loads(context.response.get_data())
if item['_status'] == 'OK' and item.get('_id'):
setattr(context, get_resource_name(href), item)
assert_ok(context.response)
context.outbox = outbox
@when('we patch latest without assert')
def step_impl_when_patch_without_assert(context):
data = get_json_data(context.response)
href = get_prefixed_url(context.app, get_self_href(data, context))
headers = if_match(context, data.get('_etag'))
data2 = apply_placeholders(context, context.text)
context.response = context.client.patch(href, data=data2, headers=headers)
@when('we patch routing scheme "{url}"')
def step_impl_when_patch_routing_scheme(context, url):
with context.app.mail.record_messages() as outbox:
url = apply_placeholders(context, url)
res = get_res(url, context)
href = get_self_href(res, context)
headers = if_match(context, res.get('_etag'))
data = json.loads(apply_placeholders(context, context.text))
res.get('rules', []).append(data)
context.response = context.client.patch(get_prefixed_url(context.app, href),
data=json.dumps({'rules': res.get('rules', [])}),
headers=headers)
context.outbox = outbox
@when('we patch given')
def step_impl_when_patch(context):
with context.app.mail.record_messages() as outbox:
href, etag = get_it(context)
headers = if_match(context, etag)
context.response = context.client.patch(get_prefixed_url(context.app, href), data=context.text, headers=headers)
assert_ok(context.response)
context.outbox = outbox
@when('we get given')
def step_impl_when_get(context):
href, _etag = get_it(context)
context.response = context.client.get(get_prefixed_url(context.app, href), headers=context.headers)
@when('we restore version {version}')
def step_impl_when_restore_version(context, version):
data = get_json_data(context.response)
href = get_self_href(data, context)
headers = if_match(context, data.get('_etag'))
text = '{"type": "text", "old_version": %s, "last_version": %s}' % (version, data.get('_current_version'))
context.response = context.client.put(get_prefixed_url(context.app, href), data=text, headers=headers)
assert_ok(context.response)
@when('we upload a file "{filename}" to "{dest}"')
def step_impl_when_upload_image(context, filename, dest):
upload_file(context, dest, filename, 'media')
@when('we upload a binary file with cropping')
def step_impl_when_upload_with_crop(context):
data = {'CropTop': '0', 'CropLeft': '0', 'CropBottom': '333', 'CropRight': '333'}
upload_file(context, '/upload', 'bike.jpg', 'media', data)
@when('upload a file "{file_name}" to "{destination}" with "{guid}"')
def step_impl_when_upload_image_with_guid(context, file_name, destination, guid):
upload_file(context, destination, file_name, 'media', {'guid': guid})
if destination == 'archive':
set_placeholder(context, 'original.href', context.archive['renditions']['original']['href'])
set_placeholder(context, 'original.media', context.archive['renditions']['original']['media'])
@when('we upload a new dictionary with success')
def when_upload_dictionary(context):
data = json.loads(apply_placeholders(context, context.text))
upload_file(context, '/dictionaries', 'test_dict.txt', DICTIONARY_FILE, data)
assert_ok(context.response)
@when('we upload to an existing dictionary with success')
def when_upload_patch_dictionary(context):
data = json.loads(apply_placeholders(context, context.text))
url = apply_placeholders(context, '/dictionaries/#dictionaries._id#')
etag = apply_placeholders(context, '#dictionaries._etag#')
upload_file(context, url, 'test_dict2.txt', DICTIONARY_FILE, data, 'patch', [('If-Match', etag)])
assert_ok(context.response)
def upload_file(context, dest, filename, file_field, extra_data=None, method='post', user_headers=[]):
with open(get_fixture_path(context, filename), 'rb') as f:
data = {file_field: f}
if extra_data:
data.update(extra_data)
headers = [('Content-Type', 'multipart/form-data')]
headers.extend(user_headers)
headers = unique_headers(headers, context.headers)
url = get_prefixed_url(context.app, dest)
context.response = getattr(context.client, method)(url, data=data, headers=headers)
assert_ok(context.response)
store_placeholder(context, url)
@when('we upload a file from URL')
def step_impl_when_upload_from_url(context):
data = {'URL': external_url}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
context.response = context.client.post(get_prefixed_url(context.app, '/upload'), data=data, headers=headers)
@when('we upload a file from URL with cropping')
def step_impl_when_upload_from_url_with_crop(context):
data = {'URL': external_url,
'CropTop': '0',
'CropLeft': '0',
'CropBottom': '333',
'CropRight': '333'}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
context.response = context.client.post(get_prefixed_url(context.app, '/upload'), data=data, headers=headers)
@when('we get user profile')
def step_impl_when_get_user(context):
profile_url = '/%s/%s' % ('users', context.user['_id'])
context.response = context.client.get(get_prefixed_url(context.app, profile_url), headers=context.headers)
@then('we get new resource')
def step_impl_then_get_new(context):
assert_ok(context.response)
expect_json_contains(context.response, 'self', path='_links')
if context.text is not None:
return test_json(context)
@then('we get error {code}')
def step_impl_then_get_error(context, code):
expect_status(context.response, int(code))
if context.text:
test_json(context)
@then('we get list with {total_count} items')
def step_impl_then_get_list(context, total_count):
assert_200(context.response)
data = get_json_data(context.response)
int_count = int(total_count.replace('+', '').replace('<', ''))
if '+' in total_count:
assert int_count <= data['_meta']['total'], '%d items is not enough' % data['_meta']['total']
elif total_count.startswith('<'):
assert int_count > data['_meta']['total'], '%d items is too much' % data['_meta']['total']
else:
assert int_count == data['_meta']['total'], 'got %d: %s' % (data['_meta']['total'],
format_items(data['_items']))
if context.text:
test_json(context)
@then('we get list ordered by {field} with {total_count} items')
def step_impl_ordered_list(context, field, total_count):
step_impl_then_get_list(context, total_count)
data = get_json_data(context.response)
fields = []
for i in data['_items']:
fields.append(i[field])
assert sorted(fields) == fields
@then('we get "{value}" in formatted output')
def step_impl_then_get_formatted_output(context, value):
assert_200(context.response)
value = apply_placeholders(context, value)
data = get_json_data(context.response)
for item in data['_items']:
if value in item['formatted_item']:
return
assert False
@then('we get "{value}" in formatted output as "{group}" story for subscriber "{sub}"')
def step_impl_then_get_formatted_output_as_story(context, value, group, sub):
assert_200(context.response)
value = apply_placeholders(context, value)
data = get_json_data(context.response)
for item in data['_items']:
if item['subscriber_id'] != sub:
continue
try:
formatted_data = json.loads(item['formatted_item'])
except Exception:
continue
associations = formatted_data.get('associations', {})
for assoc_group in associations:
if assoc_group.startswith(group) and associations[assoc_group].get('guid', '') == value:
return
assert False
@then('we get "{value}" as "{group}" story for subscriber "{sub}" in package "{pck}"')
def step_impl_then_get_formatted_output_pck(context, value, group, sub, pck):
assert_200(context.response)
value = apply_placeholders(context, value)
data = get_json_data(context.response)
for item in data['_items']:
if item['item_id'] != pck:
continue
if item['subscriber_id'] != sub:
continue
try:
formatted_data = json.loads(item['formatted_item'])
except Exception:
continue
associations = formatted_data.get('associations', {})
for assoc_group in associations:
if assoc_group.startswith(group) and associations[assoc_group].get('guid', '') == value:
return
assert False
@then('we get "{value}" as "{group}" story for subscriber "{sub}" not in package "{pck}" version "{v}"')
def step_impl_then_get_formatted_output_pck_version(context, value, group, sub, pck, v):
assert_200(context.response)
value = apply_placeholders(context, value)
data = get_json_data(context.response)
for item in data['_items']:
if item['item_id'] == pck:
if item['subscriber_id'] == sub and str(item['item_version']) == v:
try:
formatted_data = json.loads(item['formatted_item'])
except Exception:
continue
associations = formatted_data.get('associations', {})
for assoc_group in associations:
if assoc_group.startswith(group) \
and associations[assoc_group].get('guid', '') == value:
assert False
assert True
return
assert False
@then('we get "{value}" in formatted output as "{group}" newsml12 story')
def step_impl_then_get_formatted_output_newsml(context, value, group):
assert_200(context.response)
value = apply_placeholders(context, value)
data = get_json_data(context.response)
for item in data['_items']:
if '<' + group + '>' + value + '</' + group + '>' in item['formatted_item']:
return
assert False
@then('we get no "{field}"')
def step_impl_then_get_nofield(context, field):
assert_200(context.response)
expect_json_not_contains(context.response, field)
@then('expect json in "{path}"')
def step_impl_then_get_nofield_in_path(context, path):
assert_200(context.response)
expect_json(context.response, context.text, path)
@then('we get existing resource')
def step_impl_then_get_existing(context):
assert_200(context.response)
test_json(context)
@then('we get existing saved search')
def step_impl_then_get_existing_saved_search(context):
assert_200(context.response)
test_json_with_string_field_value(context, 'filter')
@then('we get OK response')
def step_impl_then_get_ok(context):
assert_200(context.response)
@then('we get response code {code}')
def step_impl_then_get_code(context, code):
expect_status(context.response, int(code))
@then('we get updated response')
def step_impl_then_get_updated(context):
assert_ok(context.response)
if context.text:
test_json(context)
@then('we get "{key}" in "{url}"')
def step_impl_then_get_key_in_url(context, key, url):
url = apply_placeholders(context, url)
res = context.client.get(get_prefixed_url(context.app, url), headers=context.headers)
assert_200(res)
expect_json_contains(res, key)
@then('we get file metadata')
def step_impl_then_get_file_meta(context):
assert len(
json.loads(apply_path(
parse_json_response(context.response),
'filemeta_json'
)).items()
) > 0
'expected non empty metadata dictionary'
@then('we get "{filename}" metadata')
def step_impl_then_get_given_file_meta(context, filename):
if filename == 'bike.jpg':
metadata = {
'ycbcrpositioning': 1,
'imagelength': 2448,
'exifimagewidth': 2448,
'meteringmode': 2,
'datetimedigitized': '2013:08:01 16:19:28',
'exposuremode': 0,
'flashpixversion': '0100',
'isospeedratings': 80,
'length': 469900,
'imageuniqueid': 'f3533c05daef2debe6257fd99e058eec',
'datetimeoriginal': '2013:08:01 16:19:28',
'whitebalance': 0,
'exposureprogram': 3,
'colorspace': 1,
'exifimageheight': 3264,
'software': 'Google',
'resolutionunit': 2,
'make': 'SAMSUNG',
'maxaperturevalue': [276, 100],
'aperturevalue': [276, 100],
'scenecapturetype': 0,
'exposuretime': [1, 2004],
'datetime': '2013:08:01 16:19:28',
'exifoffset': 216,
'yresolution': [72, 1],
'orientation': 1,
'componentsconfiguration': '0000',
'exifversion': '0220',
'focallength': [37, 10],
'flash': 0,
'model': 'GT-I9300',
'xresolution': [72, 1],
'fnumber': [26, 10],
'imagewidth': 3264,
'brightnessvalue': [2362, 256],
'exposurebiasvalue': [0, 10],
'shutterspeedvalue': [2808, 256]
}
elif filename == 'green.ogg':
metadata = {
'producer': 'Lavf54.59.103',
'music_genre': 'New Age',
'sample_rate': '44100',
'artist': 'Maxime Abbey',
'length': 368058,
'bit_rate': '160000',
'title': 'Green Hills',
'mime_type': 'audio/vorbis',
'format_version': 'Vorbis version 0',
'compression': 'Vorbis',
'duration': '0:00:20.088163',
'endian': 'Little endian',
'nb_channel': '2'
}
elif filename == 'this_week_nasa.mp4':
metadata = {
'mime_type': 'video/mp4',
'creation_date': '1904-01-01T00:00:00+00:00',
'duration': '0:00:10.224000',
'width': '480',
'length': 877869,
'comment': 'User volume: 100.0%',
'height': '270',
'endian': 'Big endian',
'last_modification': '1904-01-01T00:00:00+00:00'
}
else:
raise NotImplementedError("No metadata for file '{}'.".format(filename))
assertions.maxDiff = None
data = json.loads(context.response.get_data())
filemeta = get_filemeta(data)
json_match(filemeta, metadata)
@then('we get "{type}" renditions')
def step_impl_then_get_renditions(context, type):
expect_json_contains(context.response, 'renditions')
renditions = apply_path(parse_json_response(context.response), 'renditions')
assert isinstance(renditions, dict), 'expected dict for image renditions'
for rend_name in context.app.config['RENDITIONS'][type]:
desc = renditions[rend_name]
assert isinstance(desc, dict), 'expected dict for rendition description'
assert 'href' in desc, 'expected href in rendition description'
assert 'media' in desc, 'expected media identifier in rendition description'
we_can_fetch_a_file(context, desc['href'], 'image/jpeg')
@then('we get "{crop_name}" in renditions')
def step_impl_then_get_renditions(context, crop_name):
expect_json_contains(context.response, 'renditions')
renditions = apply_path(parse_json_response(context.response), 'renditions')
assert isinstance(renditions, dict), 'expected dict for image renditions'
desc = renditions[crop_name]
assert isinstance(desc, dict), 'expected dict for rendition description'
assert 'href' in desc, 'expected href in rendition description'
assert 'media' in desc, 'expected media identifier in rendition description'
we_can_fetch_a_file(context, desc['href'], 'image/jpeg')
@then('we get "{crop_name}" not in renditions')
def step_impl_then_get_renditions(context, crop_name):
expect_json_contains(context.response, 'renditions')
renditions = apply_path(parse_json_response(context.response), 'renditions')
assert isinstance(renditions, dict), 'expected dict for image renditions'
assert crop_name not in renditions, 'expected crop not in renditions'
@then('item "{item_id}" is unlocked')
def then_item_is_unlocked(context, item_id):
assert_200(context.response)
data = json.loads(context.response.get_data())
assert data.get('lock_user', None) is None, 'item is locked by user #{0}'.format(data.get('lock_user'))
@then('item "{item_id}" is locked')
def then_item_is_locked(context, item_id):
assert_200(context.response)
resp = parse_json_response(context.response)
assert resp['lock_user'] is not None
@then('item "{item_id}" is assigned')
def then_item_is_assigned(context, item_id):
resp = parse_json_response(context.response)
assert resp['task'].get('user', None) is not None, 'item is not assigned'
@then('we get rendition "{name}" with mimetype "{mimetype}"')
def step_impl_then_get_rendition_with_mimetype(context, name, mimetype):
expect_json_contains(context.response, 'renditions')
renditions = apply_path(parse_json_response(context.response), 'renditions')
assert isinstance(renditions, dict), 'expected dict for image renditions'
desc = renditions[name]
assert isinstance(desc, dict), 'expected dict for rendition description'
assert 'href' in desc, 'expected href in rendition description'
we_can_fetch_a_file(context, desc['href'], mimetype)
set_placeholder(context, "rendition.{}.href".format(name), desc['href'])
@when('we get updated media from archive')
def get_updated_media_from_archive(context):
url = 'archive/%s' % context._id
when_we_get_url(context, url)
assert_200(context.response)
@then('baseImage rendition is updated')
def check_base_image_rendition(context):
check_rendition(context, 'baseImage')
@then('original rendition is updated with link to file having mimetype "{mimetype}"')
def check_original_rendition(context, mimetype):
rv = parse_json_response(context.response)
link_to_file = rv['renditions']['original']['href']
assert link_to_file
we_can_fetch_a_file(context, link_to_file, mimetype)
@then('thumbnail rendition is updated')
def check_thumbnail_rendition(context):
check_rendition(context, 'thumbnail')
def check_rendition(context, rendition_name):
rv = parse_json_response(context.response)
assert rv['renditions'][rendition_name] != context.renditions[rendition_name], rv['renditions']
@then('we get "{key}"')
def step_impl_then_get_key(context, key):
assert_200(context.response)
expect_json_contains(context.response, key)
item = json.loads(context.response.get_data())
set_placeholder(context, '%s' % key, item[key])
@then('we store "{key}" with value "{value}" to context')
def step_impl_then_we_store_key_value_to_context(context, key, value):
set_placeholder(context, key, apply_placeholders(context, value))
@then('we get action in user activity')
def step_impl_then_get_action(context):
response = context.client.get(get_prefixed_url(context.app, '/activity'), headers=context.headers)
expect_json_contains(response, '_items')
@then('we get a file reference')
def step_impl_then_get_file(context):
assert_200(context.response)
expect_json_contains(context.response, 'renditions')
data = get_json_data(context.response)
url = '/upload/%s' % data['_id']
headers = [('Accept', 'application/json')]
headers = unique_headers(headers, context.headers)
response = context.client.get(get_prefixed_url(context.app, url), headers=headers)
assert_200(response)
assert len(response.get_data()), response
assert response.mimetype == 'application/json', response.mimetype
expect_json_contains(response, 'renditions')
expect_json_contains(response, {'mimetype': 'image/jpeg'})
fetched_data = get_json_data(context.response)
context.fetched_data = fetched_data
@then('we get cropped data smaller than "{max_size}"')
def step_impl_then_get_cropped_file(context, max_size):
assert int(get_filemeta(context.fetched_data, 'length')) < int(max_size), 'was expecting smaller image'
@then('we can fetch a data_uri')
def step_impl_we_fetch_data_uri(context):
we_can_fetch_a_file(context, context.fetched_data['renditions']['original']['href'], 'image/jpeg')
@then('we fetch a file "{url}"')
def step_impl_we_cannot_fetch_file(context, url):
url = apply_placeholders(context, url)
headers = [('Accept', 'application/json')]
headers = unique_headers(headers, context.headers)
context.response = context.client.get(get_prefixed_url(context.app, url), headers=headers)
def we_can_fetch_a_file(context, url, mimetype):
headers = [('Accept', 'application/json')]
headers = unique_headers(headers, context.headers)
response = context.client.get(get_prefixed_url(context.app, url), headers=headers)
assert_200(response)
assert len(response.get_data()), response
assert response.mimetype == mimetype, response.mimetype
@then('we can delete that file')
def step_impl_we_delete_file(context):
url = '/upload/%s' % context.fetched_data['_id']
context.headers.append(('Accept', 'application/json'))
headers = if_match(context, context.fetched_data.get('_etag'))
response = context.client.delete(get_prefixed_url(context.app, url), headers=headers)
assert_200(response)
response = context.client.get(get_prefixed_url(context.app, url), headers=headers)
assert_404(response)
@then('we get a picture url')
def step_impl_then_get_picture(context):
assert_ok(context.response)
expect_json_contains(context.response, 'picture_url')
@then('we get aggregations "{keys}"')
def step_impl_then_get_aggs(context, keys):
assert_200(context.response)
expect_json_contains(context.response, '_aggregations')
data = get_json_data(context.response)
aggs = data['_aggregations']
for key in keys.split(','):
assert_in(key, aggs)
@then('the file is stored localy')
def step_impl_then_file(context):
assert_200(context.response)
folder = context.app.config['UPLOAD_FOLDER']
assert os.path.exists(os.path.join(folder, context.filename))
@then('we get version {version}')
def step_impl_then_get_version(context, version):
assert_200(context.response)
expect_json_contains(context.response, {'_current_version': int(version)})
@then('the field "{field}" value is "{value}"')
def step_impl_then_get_field_value(context, field, value):
assert_200(context.response)
expect_json_contains(context.response, {field: value})
@then('we get etag matching "{url}"')
def step_impl_then_get_etag(context, url):
if context.app.config['IF_MATCH']:
assert_200(context.response)
expect_json_contains(context.response, '_etag')
etag = get_json_data(context.response).get('_etag')
response = context.client.get(get_prefixed_url(context.app, url), headers=context.headers)
expect_json_contains(response, {'_etag': etag})
@then('we get not modified response')
def step_impl_then_not_modified(context):
expect_status(context.response, 304)
@then('we get "{header}" header')
def step_impl_then_get_header(context, header):
expect_headers_contain(context.response, header)
@then('we get "{header}" header with "{type}" type')
def step_impl_then_get_header_with_type(context, header, type):
expect_headers_contain(context.response, header, type)
@then('we get link to "{resource}"')
def then_we_get_link_to_resource(context, resource):
doc = get_json_data(context.response)
self_link = doc.get('_links').get('self')
assert resource in self_link['href'], 'expect link to "%s", got %s' % (resource, self_link)
@then('we get deleted response')
def then_we_get_deleted_response(context):
assert_200(context.response)
@when('we post to reset_password we get email with token')
def we_post_to_reset_password(context):
data = {'email': '[email protected]'}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
with context.app.mail.record_messages() as outbox:
context.response = context.client.post(get_prefixed_url(context.app, '/reset_user_password'),
data=data, headers=headers)
expect_status_in(context.response, (200, 201))
assert len(outbox) == 1
assert outbox[0].subject == "Reset password"
email_text = outbox[0].body
assert "24" in email_text
words = email_text.split()
url = urlparse(words[words.index("link") + 1])
token = url.fragment.split('token=')[-1]
assert token
context.token = token
@then('we can check if token is valid')
def we_can_check_token_is_valid(context):
data = {'token': context.token}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
context.response = context.client.post(get_prefixed_url(context.app, '/reset_user_password'),
data=data, headers=headers)
expect_status_in(context.response, (200, 201))
@then('we update token to be expired')
def we_update_token_to_expired(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
expiry = utc.utcnow() - timedelta(days=2)
reset_request = get_resource_service('reset_user_password').find_one(req=None, token=context.token)
reset_request['expire_time'] = expiry
id = reset_request.pop('_id')
get_resource_service('reset_user_password').patch(id, reset_request)
@then('token is invalid')
def check_token_invalid(context):
data = {'token': context.token}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
context.response = context.client.post(get_prefixed_url(context.app, '/reset_user_password'),
data=data, headers=headers)
expect_status_in(context.response, (403, 401))
@when('we post to reset_password we do not get email with token')
def we_post_to_reset_password_it_fails(context):
data = {'email': '[email protected]'}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
with context.app.mail.record_messages() as outbox:
context.response = context.client.post(get_prefixed_url(context.app, '/reset_user_password'),
data=data, headers=headers)
expect_status_in(context.response, (200, 201))
assert len(outbox) == 0
def start_reset_password_for_user(context):
data = {'token': context.token, 'password': 'test_pass'}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
context.response = context.client.post(get_prefixed_url(context.app, '/reset_user_password'),
data=data, headers=headers)
@then('we fail to reset password for user')
def we_fail_to_reset_password_for_user(context):
start_reset_password_for_user(context)
step_impl_then_get_error(context, 403)
@then('we reset password for user')
def we_reset_password_for_user(context):
start_reset_password_for_user(context)
expect_status_in(context.response, (200, 201))
auth_data = {'username': 'foo', 'password': 'test_pass'}
headers = [('Content-Type', 'multipart/form-data')]
headers = unique_headers(headers, context.headers)
context.response = context.client.post(get_prefixed_url(context.app, '/auth_db'), data=auth_data, headers=headers)
expect_status_in(context.response, (200, 201))
@when('we switch user')
def when_we_switch_user(context):
user = {'username': 'test-user-2', 'password': 'pwd', 'is_active': True,
'needs_activation': False, 'sign_off': 'foo'}
tests.setup_auth_user(context, user)
set_placeholder(context, 'USERS_ID', str(context.user['_id']))
@when('we setup test user')
def when_we_setup_test_user(context):
tests.setup_auth_user(context, tests.test_user)
@when('we get my "{url}"')
def when_we_get_my_url(context, url):
user_id = str(context.user.get('_id'))
my_url = '{0}?where={1}'.format(url, json.dumps({'user': user_id}))
return when_we_get_url(context, my_url)
@when('we get user "{resource}"')
def when_we_get_user_resource(context, resource):
url = '/users/{0}/{1}'.format(str(context.user.get('_id')), resource)
return when_we_get_url(context, url)
@then('we get embedded items')
def we_get_embedded_items(context):
response_data = json.loads(context.response.get_data())
href = get_self_href(response_data, context)
url = href + '/?embedded={"items": 1}'
context.response = context.client.get(get_prefixed_url(context.app, url), headers=context.headers)
assert_200(context.response)
context.response_data = json.loads(context.response.get_data())
assert len(context.response_data['items']['view_items']) == 2
@when('we reset notifications')
def step_when_we_reset_notifications(context):
context.app.notification_client.reset()
@then('we get notifications')
def then_we_get_notifications(context):
assert hasattr(context.app.notification_client, 'messages'), 'no messages'
notifications = context.app.notification_client.messages
notifications_data = [json.loads(notification) for notification in notifications]
context_data = json.loads(apply_placeholders(context, context.text))
assert_equal(json_match(context_data, notifications_data), True,
msg=str(context_data) + '\n != \n' + str(notifications_data))
@then('we get default preferences')
def get_default_prefs(context):
response_data = json.loads(context.response.get_data())
assert_equal(response_data['user_preferences'], default_user_preferences)
@when('we spike "{item_id}"')
def step_impl_when_spike_url(context, item_id):
item_id = apply_placeholders(context, item_id)
res = get_res('/archive/' + item_id, context)
headers = if_match(context, res.get('_etag'))
context.response = context.client.patch(get_prefixed_url(context.app, '/archive/spike/' + item_id),
data='{"state": "spiked"}', headers=headers)
@when('we spike fetched item')
def step_impl_when_spike_fetched_item(context):
data = json.loads(apply_placeholders(context, context.text))
item_id = data["_id"]
res = get_res('/archive/' + item_id, context)
headers = if_match(context, res.get('_etag'))
context.response = context.client.patch(get_prefixed_url(context.app, '/archive/spike/' + item_id),
data='{"state": "spiked"}', headers=headers)
@when('we unspike "{item_id}"')
def step_impl_when_unspike_url(context, item_id):
item_id = apply_placeholders(context, item_id)
res = get_res('/archive/' + item_id, context)
headers = if_match(context, res.get('_etag'))
context.response = context.client.patch(get_prefixed_url(context.app, '/archive/unspike/' + item_id),
data=apply_placeholders(context, context.text or '{}'), headers=headers)
@then('we get spiked content "{item_id}"')
def get_spiked_content(context, item_id):
item_id = apply_placeholders(context, item_id)
url = 'archive/{0}'.format(item_id)
when_we_get_url(context, url)
assert_200(context.response)
response_data = json.loads(context.response.get_data())
assert_equal(response_data['state'], 'spiked')
assert_equal(response_data['operation'], 'spike')
@then('we get unspiked content "{id}"')
def get_unspiked_content(context, id):
text = context.text
context.text = ''
url = 'archive/{0}'.format(id)
when_we_get_url(context, url)
assert_200(context.response)
response_data = json.loads(context.response.get_data())
assert_equal(response_data['state'], 'draft')
assert_equal(response_data['operation'], 'unspike')
# Tolga Akin (05/11/14)
# Expiry value doesn't get set to None properly in Elastic.
# Discussed with Petr so we'll look into this later
# assert_equal(response_data['expiry'], None)
if text:
assert json_match(json.loads(apply_placeholders(context, text)), response_data)
@then('we get global content expiry')
def get_global_content_expiry(context):
validate_expired_content(context, context.app.config['CONTENT_EXPIRY_MINUTES'], utcnow())
@then('we get content expiry {minutes}')
def get_content_expiry(context, minutes):
validate_expired_content(context, minutes, utcnow())
@then('we get expiry for schedule and embargo content {minutes} minutes after "{future_date}"')
def get_content_expiry_schedule(context, minutes, future_date):
future_date = parse_date(apply_placeholders(context, future_date))
validate_expired_content(context, minutes, future_date)
@then('we get desk spike expiry after "{test_minutes}"')
def get_desk_spike_expiry(context, test_minutes):
validate_expired_content(context, test_minutes, utcnow())
def validate_expired_content(context, minutes, start_datetime):
response_data = json.loads(context.response.get_data())
assert response_data['expiry']
response_expiry = parse_date(response_data['expiry'])
expiry = start_datetime + timedelta(minutes=int(minutes))
assert response_expiry <= expiry
@when('we mention user in comment for "{url}"')
def we_mention_user_in_comment(context, url):
with context.app.mail.record_messages() as outbox:
step_impl_when_post_url(context, url)
assert len(outbox) == 1
assert_equal(outbox[0].subject, "You were mentioned in a comment by test_user")
email_text = outbox[0].body
assert email_text
@when('we change user status to "{status}" using "{url}"')
def we_change_user_status(context, status, url):
with context.app.mail.record_messages() as outbox:
step_impl_when_patch_url(context, url)
assert len(outbox) == 1
assert_equal(outbox[0].subject, "Your Superdesk account is " + status)
assert outbox[0].body
@when('we get the default incoming stage')
def we_get_default_incoming_stage(context):
data = json.loads(context.response.get_data())
incoming_stage = data['_items'][0]['incoming_stage'] if '_items' in data else data['incoming_stage']
assert incoming_stage
url = 'stages/{0}'.format(incoming_stage)
when_we_get_url(context, url)
assert_200(context.response)
data = json.loads(context.response.get_data())
assert data['default_incoming'] is True
assert data['name'] == 'Incoming Stage'
@then('we get stage filled in to default_incoming')
def we_get_stage_filled_in(context):
data = json.loads(context.response.get_data())
assert data['task']['stage']
@given('we have sessions "{url}"')
def we_have_sessions_get_id(context, url):
when_we_get_url(context, url)
item = json.loads(context.response.get_data())
context.session_id = item['_items'][0]['_id']
context.data = item
set_placeholder(context, 'SESSION_ID', item['_items'][0]['_id'])
setattr(context, 'users', item['_items'][0]['user'])
@then('we get session by id')
def we_get_session_by_id(context):
url = 'sessions/' + context.session_id
when_we_get_url(context, url)
item = json.loads(context.response.get_data())
returned_id = item["_id"]
assert context.session_id == returned_id
@then('we delete session by id')
def we_delete_session_by_id(context):
url = 'sessions/' + context.session_id
step_impl_when_delete_url(context, url)
assert_200(context.response)
@when('we create a new user')
def step_create_a_user(context):
data = apply_placeholders(context, context.text)
with context.app.mail.record_messages() as outbox:
context.response = context.client.post(get_prefixed_url(context.app, '/users'),
data=data, headers=context.headers)
expect_status_in(context.response, (200, 201))
assert len(outbox) == 1
context.email = outbox[0]
@then('we get activation email')
def step_get_activation_email(context):
assert context.email.subject == 'Superdesk account created'
email_text = context.email.body
words = email_text.split()
url = urlparse(words[words.index("to") + 1])
token = url.fragment.split('token=')[-1]
assert token
@then('we set elastic limit')
def step_set_limit(context):
context.app.settings['MAX_SEARCH_DEPTH'] = 1
@then('we get emails')
def step_we_get_email(context):
data = json.loads(context.text)
for email in data:
assert check_if_email_sent(context, email)
@then('we get {count} emails')
def step_we_get_no_email(context, count):
assert len(context.outbox) == int(count)
if context.text:
step_we_get_email(context)
def check_if_email_sent(context, spec):
if context.outbox:
for key in spec:
found = False
values = [getattr(email, key) for email in context.outbox]
for value in values:
if spec[key] in value:
found = True
if not found:
print('%s:%s not found in %s' % (key, spec[key], json.dumps(values, indent=2)))
return False
return True
print('no email sent')
return False
@then('we get activity')
def then_we_get_activity(context):
url = apply_placeholders(context, '/activity?where={"name": {"$in": ["notify", "user:mention" , "desk:mention"]}}')
context.response = context.client.get(get_prefixed_url(context.app, url), headers=context.headers)
if context.response.status_code == 200:
expect_json_length(context.response, 1, path='_items')
item = json.loads(context.response.get_data())
item = item['_items'][0]
if item.get('_id'):
setattr(context, 'activity', item)
set_placeholder(context, 'USERS_ID', item['user'])
def login_as(context, username, password, user_type):
user = {'username': username, 'password': password, 'is_active': True,
'is_enabled': True, 'needs_activation': False, user_type: user_type}
if context.text:
user.update(json.loads(context.text))
tests.setup_auth_user(context, user)
@given('we login as user "{username}" with password "{password}" and user type "{user_type}"')
def given_we_login_as_user(context, username, password, user_type):
login_as(context, username, password, user_type)
@when('we login as user "{username}" with password "{password}" and user type "{user_type}"')
def when_we_login_as_user(context, username, password, user_type):
login_as(context, username, password, user_type)
def is_user_resource(resource):
return resource in ('users', '/users')
@then('we get {no_of_stages} invisible stages')
def when_we_get_invisible_stages(context, no_of_stages):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
stages = get_resource_service('stages').get_stages_by_visibility(is_visible=False)
assert len(stages) == int(no_of_stages)
@then('we get {no_of_stages} visible stages')
def when_we_get_visible_stages(context, no_of_stages):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
stages = get_resource_service('stages').get_stages_by_visibility(is_visible=True)
assert len(stages) == int(no_of_stages)
@then('we get {no_of_stages} invisible stages for user')
def when_we_get_invisible_stages_for_user(context, no_of_stages):
data = json.loads(apply_placeholders(context, context.text))
with context.app.test_request_context(context.app.config['URL_PREFIX']):
stages = get_resource_service('users').get_invisible_stages(data['user'])
assert len(stages) == int(no_of_stages)
@then('we get "{field_name}" populated')
def then_field_is_populated(context, field_name):
resp = parse_json_response(context.response)
assert resp[field_name].get('user', None) is not None, 'item is not populated'
@then('we get "{field_name}" not populated')
def then_field_is_not_populated(context, field_name):
resp = parse_json_response(context.response)
assert resp[field_name] is None, 'item is not populated'
@then('the field "{field_name}" value is not "{field_value}"')
def then_field_value_is_not_same(context, field_name, field_value):
resp = parse_json_response(context.response)
assert resp[field_name] != field_value, 'values are the same'
@then('we get "{field_name}" not populated in results')
def then_field_is_not_populated_in_results(context, field_name):
resps = parse_json_response(context.response)
for resp in resps['_items']:
assert resp[field_name] is None, 'item is not populated'
@when('we delete content filter "{name}"')
def step_delete_content_filter(context, name):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
filter = get_resource_service('content_filters').find_one(req=None, name=name)
url = '/content_filters/{}'.format(filter['_id'])
headers = if_match(context, filter.get('_etag'))
context.response = context.client.delete(get_prefixed_url(context.app, url), headers=headers)
@when('we rewrite "{item_id}"')
def step_impl_when_rewrite(context, item_id):
context_data = {}
_id = apply_placeholders(context, item_id)
if context.text:
context_data.update(json.loads(apply_placeholders(context, context.text)))
data = json.dumps(context_data)
context.response = context.client.post(
get_prefixed_url(context.app, '/archive/{}/rewrite'.format(_id)),
data=data, headers=context.headers)
if context.response.status_code == 400:
return
resp = parse_json_response(context.response)
set_placeholder(context, 'REWRITE_OF', _id)
set_placeholder(context, 'REWRITE_ID', resp['_id'])
@then('we get "{field_name}" does not exist')
def then_field_is_not_populated_in_results(context, field_name):
resps = parse_json_response(context.response)
if '_items' in resps:
for resp in resps['_items']:
assert field_name not in resp, 'field exists'
else:
assert field_name not in resps, 'field exists'
@then('we get "{field_name}" does exist')
def then_field_is_not_populated_in_results(context, field_name):
resps = parse_json_response(context.response)
for resp in resps['_items']:
assert field_name in resp, 'field does not exist'
@when('we publish "{item_id}" with "{pub_type}" type and "{state}" state')
def step_impl_when_publish_url(context, item_id, pub_type, state):
item_id = apply_placeholders(context, item_id)
res = get_res('/archive/' + item_id, context)
headers = if_match(context, res.get('_etag'))
context_data = {"state": state}
if context.text:
data = apply_placeholders(context, context.text)
context_data.update(json.loads(data))
data = json.dumps(context_data)
context.response = context.client.patch(get_prefixed_url(context.app, '/archive/{}/{}'.format(pub_type, item_id)),
data=data, headers=headers)
store_placeholder(context, 'archive_{}'.format(pub_type))
@then('the ingest item is routed based on routing scheme and rule "{rule_name}"')
def then_ingest_item_is_routed_based_on_routing_scheme(context, rule_name):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
validate_routed_item(context, rule_name, True)
@then('the ingest item is routed and transformed based on routing scheme and rule "{rule_name}"')
def then_ingest_item_is_routed_transformed_based_on_routing_scheme(context, rule_name):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
validate_routed_item(context, rule_name, True, True)
@then('the ingest item is not routed based on routing scheme and rule "{rule_name}"')
def then_ingest_item_is_not_routed_based_on_routing_scheme(context, rule_name):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
validate_routed_item(context, rule_name, False)
def validate_routed_item(context, rule_name, is_routed, is_transformed=False):
data = json.loads(apply_placeholders(context, context.text))
def validate_rule(action, state):
for destination in rule.get('actions', {}).get(action, []):
query = {
'and': [
{'term': {'ingest_id': str(data['ingest'])}},
{'term': {'task.desk': str(destination['desk'])}},
{'term': {'task.stage': str(destination['stage'])}},
{'term': {'state': state}}
]
}
item = get_archive_items(query) + get_published_items(query)
if is_routed:
assert len(item) > 0, 'No routed items found for criteria: ' + str(query)
assert item[0]['ingest_id'] == data['ingest']
assert item[0]['task']['desk'] == str(destination['desk'])
assert item[0]['task']['stage'] == str(destination['stage'])
assert item[0]['state'] == state
if is_transformed:
assert item[0]['abstract'] == 'Abstract has been updated'
assert_items_in_package(item[0], state, str(destination['desk']), str(destination['stage']))
else:
assert len(item) == 0
scheme = get_resource_service('routing_schemes').find_one(_id=data['routing_scheme'], req=None)
rule = next((rule for rule in scheme['rules'] if rule['name'].lower() == rule_name.lower()), {})
validate_rule('fetch', 'routed')
validate_rule('publish', 'published')
@when('we schedule the routing scheme "{scheme_id}"')
def when_we_schedule_the_routing_scheme(context, scheme_id):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
scheme_id = apply_placeholders(context, scheme_id)
url = apply_placeholders(context, 'routing_schemes/%s' % scheme_id)
res = get_res(url, context)
href = get_self_href(res, context)
headers = if_match(context, res.get('_etag'))
rule = res.get('rules')[0]
now = utcnow()
from apps.rules.routing_rules import Weekdays
rule['schedule'] = {
'day_of_week': [
Weekdays.dayname(now + timedelta(days=1)),
Weekdays.dayname(now + timedelta(days=2))
],
'hour_of_day_from': '16:00:00',
'hour_of_day_to': '20:00:00'
}
if len(res.get('rules')) > 1:
rule = res.get('rules')[1]
rule['schedule'] = {
'day_of_week': [Weekdays.dayname(now)]
}
context.response = context.client.patch(get_prefixed_url(context.app, href),
data=json.dumps({'rules': res.get('rules', [])}),
headers=headers)
assert_200(context.response)
def get_archive_items(query):
req = ParsedRequest()
req.max_results = 100
req.args = {'filter': json.dumps(query)}
return list(get_resource_service('archive').get(lookup=None, req=req))
def get_published_items(query):
req = ParsedRequest()
req.max_results = 100
req.args = {'filter': json.dumps(query)}
return list(get_resource_service('published').get(lookup=None, req=req))
def assert_items_in_package(item, state, desk, stage):
if item.get('groups'):
terms = [{'term': {'_id': ref.get('residRef')}}
for ref in [ref for group in item.get('groups', [])
for ref in group.get('refs', []) if 'residRef' in ref]]
query = {'or': terms}
items = get_archive_items(query)
assert len(items) == len(terms)
for item in items:
assert item.get('state') == state
assert item.get('task', {}).get('desk') == desk
assert item.get('task', {}).get('stage') == stage
@given('I logout')
def logout(context):
we_have_sessions_get_id(context, '/sessions')
step_impl_when_delete_url(context, '/auth_db/{}'.format(context.session_id))
assert_200(context.response)
@then('we get "{url}" and match')
def we_get_and_match(context, url):
url = apply_placeholders(context, url)
response_data = get_res(url, context)
context_data = json.loads(apply_placeholders(context, context.text))
assert_equal(json_match(context_data, response_data), True,
msg=str(context_data) + '\n != \n' + str(response_data))
@then('there is no "{key}" in response')
def there_is_no_key_in_response(context, key):
data = get_json_data(context.response)
assert key not in data, 'key "%s" is in %s' % (key, data)
@then('there is no "{key}" in task')
def there_is_no_key_in_preferences(context, key):
data = get_json_data(context.response)['task']
assert key not in data, 'key "%s" is in task' % key
@then('there is no "{key}" in data')
def there_is_no_profile_in_data(context, key):
data = get_json_data(context.response)['_items'][0]['data']
assert key not in data, 'key "%s" is in data' % key
@then('broadcast "{key}" has value "{value}"')
def broadcast_key_has_value(context, key, value):
data = get_json_data(context.response).get('broadcast', {})
value = apply_placeholders(context, value)
if value.lower() == 'none':
assert data[key] is None, 'key "%s" is not none and has value "%s"' % (key, data[key])
else:
assert data[key] == value, 'key "%s" does not have valid value "%s"' % (key, data[key])
@then('there is no "{key}" preference')
def there_is_no_preference(context, key):
data = get_json_data(context.response)
assert key not in data['user_preferences'], '%s is in %s' % (key, data['user_preferences'].keys())
@then('there is no "{key}" in "{namespace}" preferences')
def there_is_no_key_in_namespace_preferences(context, key, namespace):
data = get_json_data(context.response)['user_preferences']
assert key not in data[namespace], 'key "%s" is in %s' % (key, data[namespace])
@then('we check if article has Embargo')
def step_impl_then_check_embargo(context):
assert_200(context.response)
try:
response_data = json.loads(context.response.get_data())
except Exception:
fail_and_print_body(context.response, 'response is not valid json')
if response_data.get('_meta') and response_data.get('_items'):
for item in response_data.get('_items'):
assert_embargo(context, item)
else:
assert_embargo(context, response_data)
def assert_embargo(context, item):
if not item.get('embargo'):
fail_and_print_body(context, context.response, 'Embargo not found')
@when('embargo lapses for "{item_id}"')
def embargo_lapses(context, item_id):
item_id = apply_placeholders(context, item_id)
item = get_res("/archive/%s" % item_id, context)
updates = {'embargo': (utcnow() - timedelta(minutes=10)),
'schedule_settings': {'utc_embargo': (utcnow() - timedelta(minutes=10))}}
with context.app.test_request_context(context.app.config['URL_PREFIX']):
get_resource_service('archive').system_update(id=item['_id'], original=item, updates=updates)
@then('we validate the published item expiry to be after publish expiry set in desk settings {publish_expiry_in_desk}')
def validate_published_item_expiry(context, publish_expiry_in_desk):
assert_200(context.response)
try:
response_data = json.loads(context.response.get_data())
except Exception:
fail_and_print_body(context.response, 'response is not valid json')
if response_data.get('_meta') and response_data.get('_items'):
for item in response_data.get('_items'):
assert_expiry(item, publish_expiry_in_desk)
else:
assert_expiry(response_data, publish_expiry_in_desk)
@then('we get updated timestamp "{field}"')
def step_we_get_updated_timestamp(context, field):
data = get_json_data(context.response)
timestamp = arrow.get(data[field])
now = utcnow()
assert timestamp + timedelta(seconds=5) > now, 'timestamp < now (%s, %s)' % (timestamp, now) # 5s tolerance
def assert_expiry(item, publish_expiry_in_desk):
embargo = item.get('embargo')
actual = parse_date(item.get('expiry'))
error_message = 'Published Item Expiry validation fails'
publish_expiry_in_desk = int(publish_expiry_in_desk)
if embargo:
expected = get_expiry_date(minutes=publish_expiry_in_desk,
offset=datetime.strptime(embargo, '%Y-%m-%dT%H:%M:%S%z'))
if actual != expected:
raise WooperAssertionError("{}. Expected: {}, Actual: {}".format(error_message, expected, actual))
else:
expected = get_expiry_date(minutes=publish_expiry_in_desk)
if expected < actual:
raise WooperAssertionError("{}. Expected: {}, Actual: {}".format(error_message, expected, actual))
@when('run import legal publish queue')
def run_import_legal_publish_queue(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
from apps.legal_archive import ImportLegalPublishQueueCommand
ImportLegalPublishQueueCommand().run()
@when('we expire items')
def expire_content(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
ids = json.loads(apply_placeholders(context, context.text))
expiry = utcnow() - timedelta(minutes=5)
for item_id in ids:
original = get_resource_service('archive').find_one(req=None, _id=item_id)
get_resource_service('archive').system_update(item_id, {'expiry': expiry}, original)
get_resource_service('published').update_published_items(item_id, 'expiry', expiry)
from apps.archive.commands import RemoveExpiredContent
RemoveExpiredContent().run()
@when('the publish schedule lapses')
def run_overdue_schedule_jobs(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
ids = json.loads(apply_placeholders(context, context.text))
lapse_time = utcnow() - timedelta(minutes=5)
updates = {
'publish_schedule': lapse_time,
'schedule_settings': {
'utc_publish_schedule': lapse_time,
'time_zone': None
}
}
for item_id in ids:
original = get_resource_service('archive').find_one(req=None, _id=item_id)
get_resource_service('archive').system_update(item_id, updates, original)
get_resource_service('published').update_published_items(item_id, 'publish_schedule', lapse_time)
get_resource_service('published').update_published_items(item_id, 'schedule_settings.utc_publish_schedule',
lapse_time)
@when('we transmit items')
def expire_content(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
from superdesk.publish.publish_content import PublishContent
PublishContent().run()
@when('we remove item "{_id}" from mongo')
def remove_item_from_mongo(context, _id):
with context.app.app_context():
context.app.data.mongo.remove('archive', {'_id': _id})
@then('we get text "{text}" in response field "{field}"')
def we_get_text_in_field(context, text, field):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
resp = parse_json_response(context.response)
assert field in resp, 'Field {} not found in response.'.format(field)
assert isinstance(resp.get(field), str), 'Invalid type'
assert text in resp.get(field, ''), '{} contains text: {}. Text To find: {}'.format(field,
resp.get(field, ''),
text)
@then('we reset priority flag for updated articles')
def we_get_reset_default_priority_for_updated_articles(context):
context.app.config['RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES'] = True
@then('we mark the items not moved to legal')
def we_mark_the_items_not_moved_to_legal(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
ids = json.loads(apply_placeholders(context, context.text))
for item_id in ids:
get_resource_service('published').update_published_items(item_id, 'moved_to_legal', False)
@when('we run import legal archive command')
def we_run_import_legal_archive_command(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
from apps.legal_archive.commands import ImportLegalArchiveCommand
ImportLegalArchiveCommand().run()
@then('we find no reference of package "{reference}" in item')
def we_find_no_reference_of_package_in_item(context, reference):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
reference = apply_placeholders(context, reference)
resp = parse_json_response(context.response)
linked_in_packages = resp.get('linked_in_packages', [])
assert reference not in [p.get('package') for p in linked_in_packages], \
'Package reference {} found in item'.format(reference)
@then('we set spike exipry "{expiry}"')
def we_set_spike_exipry(context, expiry):
context.app.settings['SPIKE_EXPIRY_MINUTES'] = int(expiry)
@then('we set published item expiry {expiry}')
def we_set_published_item_expiry(context, expiry):
context.app.settings['PUBLISHED_CONTENT_EXPIRY_MINUTES'] = int(expiry)
@then('we set copy metadata from parent flag')
def we_set_copy_metadata_from_parent(context):
context.app.settings['COPY_METADATA_FROM_PARENT'] = True
@then('we assert the content api item "{item_id}" is published to subscriber "{subscriber}"')
def we_assert_content_api_item_is_published_to_subscriber(context, item_id, subscriber):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
item_id = apply_placeholders(context, item_id)
subscriber = apply_placeholders(context, subscriber)
req = ParsedRequest()
req.projection = json.dumps({'subscribers': 1})
cursor = get_resource_service('items').get_from_mongo(req, {'_id': item_id})
assert cursor.count() > 0, 'Item not found'
item = cursor[0]
subscriber = apply_placeholders(context, subscriber)
assert len(item.get('subscribers', [])) > 0, 'No subscribers found.'
assert subscriber in item.get('subscribers', []), 'Subscriber with Id: {} not found.'.format(subscriber)
@then('we assert the content api item "{item_id}" is not published to subscriber "{subscriber}"')
def we_assert_content_api_item_is_not_published_to_subscriber(context, item_id, subscriber):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
item_id = apply_placeholders(context, item_id)
subscriber = apply_placeholders(context, subscriber)
req = ParsedRequest()
req.projection = json.dumps({'subscribers': 1})
cursor = get_resource_service('items').get_from_mongo(req, {'_id': item_id})
assert cursor.count() > 0, 'Item not found'
item = cursor[0]
subscriber = apply_placeholders(context, subscriber)
assert subscriber not in item.get('subscribers', []), \
'Subscriber with Id: {} found for the item. '.format(subscriber)
@then('we assert the content api item "{item_id}" is not published to any subscribers')
def we_assert_content_api_item_is_not_published(context, item_id):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
item_id = apply_placeholders(context, item_id)
req = ParsedRequest()
req.projection = json.dumps({'subscribers': 1})
cursor = get_resource_service('items').get_from_mongo(req, {'_id': item_id})
assert cursor.count() > 0, 'Item not found'
item = cursor[0]
assert len(item.get('subscribers', [])) == 0, \
'Item published to subscribers {}.'.format(item.get('subscribers', []))
@then('we ensure that archived schema extra fields are not present in duplicated item')
def we_ensure_that_archived_schema_extra_fields_are_not_present(context):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
eve_keys = set([config.ID_FIELD, config.LAST_UPDATED, config.DATE_CREATED, config.VERSION, config.ETAG])
archived_schema_keys = set(context.app.config['DOMAIN']['archived']['schema'].keys())
archived_schema_keys.union(eve_keys)
archive_schema_keys = set(context.app.config['DOMAIN']['archive']['schema'].keys())
archive_schema_keys.union(eve_keys)
extra_fields = [key for key in archived_schema_keys if key not in archive_schema_keys]
duplicate_item = json.loads(context.response.get_data())
for field in extra_fields:
assert field not in duplicate_item, 'Field {} found the duplicate item'.format(field)
@then('we assert content api item "{item_id}" with associated item "{embedded_id}" is published to "{subscriber}"')
def we_assert_that_associated_item_for_subscriber(context, item_id, embedded_id, subscriber):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
item_id = apply_placeholders(context, item_id)
subscriber = apply_placeholders(context, subscriber)
embedded_id = apply_placeholders(context, embedded_id)
req = ParsedRequest()
cursor = get_resource_service('items').get_from_mongo(req, {'_id': item_id})
assert cursor.count() > 0, 'Item not found'
item = cursor[0]
assert embedded_id in (item.get('associations') or {}), '{} association not found.'.format(embedded_id)
assert subscriber in (item['associations'][embedded_id] or {}).get('subscribers', []), \
'{} subscriber not found in associations {}'.format(subscriber, embedded_id)
@then('we assert content api item "{item_id}" with associated item "{embedded_id}" is not published to "{subscriber}"')
def we_assert_that_associated_item_for_subscriber(context, item_id, embedded_id, subscriber):
with context.app.test_request_context(context.app.config['URL_PREFIX']):
item_id = apply_placeholders(context, item_id)
subscriber = apply_placeholders(context, subscriber)
embedded_id = apply_placeholders(context, embedded_id)
req = ParsedRequest()
cursor = get_resource_service('items').get_from_mongo(req, {'_id': item_id})
assert cursor.count() > 0, 'Item not found'
item = cursor[0]
assert embedded_id in (item.get('associations') or {}), '{} association not found.'.format(embedded_id)
assert subscriber not in (item['associations'][embedded_id] or {}).get('subscribers', []), \
'{} subscriber found in associations {}'.format(subscriber, embedded_id)
@then('file exists "{path}"')
def then_file_exists(context, path):
assert os.path.isfile(path), '{} is not a file'.format(path)
| agpl-3.0 | -117,397,680,062,153,810 | 38.317652 | 120 | 0.64768 | false |
ESS-LLP/erpnext-healthcare | erpnext/hr/doctype/payroll_entry/payroll_entry.py | 1 | 20575 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from dateutil.relativedelta import relativedelta
from frappe.utils import cint, flt, nowdate, add_days, getdate, fmt_money, add_to_date, DATE_FORMAT, date_diff
from frappe import _
from erpnext.accounts.utils import get_fiscal_year
from erpnext.hr.doctype.employee.employee import get_holiday_list_for_employee
class PayrollEntry(Document):
def on_submit(self):
self.create_salary_slips()
def before_submit(self):
if self.validate_attendance:
if self.validate_employee_attendance():
frappe.throw(_("Cannot Submit, Employees left to mark attendance"))
def get_emp_list(self):
"""
Returns list of active employees based on selected criteria
and for which salary structure exists
"""
cond = self.get_filter_condition()
cond += self.get_joining_releiving_condition()
condition = ''
if self.payroll_frequency:
condition = """and payroll_frequency = '%(payroll_frequency)s'"""% {"payroll_frequency": self.payroll_frequency}
sal_struct = frappe.db.sql_list("""
select
name from `tabSalary Structure`
where
docstatus = 1 and
is_active = 'Yes'
and company = %(company)s and
ifnull(salary_slip_based_on_timesheet,0) = %(salary_slip_based_on_timesheet)s
{condition}""".format(condition=condition),
{"company": self.company, "salary_slip_based_on_timesheet":self.salary_slip_based_on_timesheet})
if sal_struct:
cond += "and t2.salary_structure IN %(sal_struct)s "
cond += "and %(from_date)s >= t2.from_date"
emp_list = frappe.db.sql("""
select
distinct t1.name as employee, t1.employee_name, t1.department, t1.designation
from
`tabEmployee` t1, `tabSalary Structure Assignment` t2
where
t1.name = t2.employee
and t2.docstatus = 1
%s order by t2.from_date desc
""" % cond, {"sal_struct": tuple(sal_struct), "from_date": self.end_date}, as_dict=True)
return emp_list
def fill_employee_details(self):
self.set('employees', [])
employees = self.get_emp_list()
if not employees:
frappe.throw(_("No employees for the mentioned criteria"))
for d in employees:
self.append('employees', d)
self.number_of_employees = len(employees)
if self.validate_attendance:
return self.validate_employee_attendance()
def get_filter_condition(self):
self.check_mandatory()
cond = ''
for f in ['company', 'branch', 'department', 'designation']:
if self.get(f):
cond += " and t1." + f + " = '" + self.get(f).replace("'", "\'") + "'"
return cond
def get_joining_releiving_condition(self):
cond = """
and ifnull(t1.date_of_joining, '0000-00-00') <= '%(end_date)s'
and ifnull(t1.relieving_date, '2199-12-31') >= '%(start_date)s'
""" % {"start_date": self.start_date, "end_date": self.end_date}
return cond
def check_mandatory(self):
for fieldname in ['company', 'start_date', 'end_date']:
if not self.get(fieldname):
frappe.throw(_("Please set {0}").format(self.meta.get_label(fieldname)))
def create_salary_slips(self):
"""
Creates salary slip for selected employees if already not created
"""
self.check_permission('write')
self.created = 1
emp_list = [d.employee for d in self.get_emp_list()]
if emp_list:
args = frappe._dict({
"salary_slip_based_on_timesheet": self.salary_slip_based_on_timesheet,
"payroll_frequency": self.payroll_frequency,
"start_date": self.start_date,
"end_date": self.end_date,
"company": self.company,
"posting_date": self.posting_date,
"deduct_tax_for_unclaimed_employee_benefits": self.deduct_tax_for_unclaimed_employee_benefits,
"deduct_tax_for_unsubmitted_tax_exemption_proof": self.deduct_tax_for_unsubmitted_tax_exemption_proof,
"payroll_entry": self.name
})
if len(emp_list) > 30:
frappe.enqueue(create_salary_slips_for_employees, timeout=600, employees=emp_list, args=args)
else:
create_salary_slips_for_employees(emp_list, args, publish_progress=False)
def get_sal_slip_list(self, ss_status, as_dict=False):
"""
Returns list of salary slips based on selected criteria
"""
cond = self.get_filter_condition()
ss_list = frappe.db.sql("""
select t1.name, t1.salary_structure from `tabSalary Slip` t1
where t1.docstatus = %s and t1.start_date >= %s and t1.end_date <= %s
and (t1.journal_entry is null or t1.journal_entry = "") and ifnull(salary_slip_based_on_timesheet,0) = %s %s
""" % ('%s', '%s', '%s','%s', cond), (ss_status, self.start_date, self.end_date, self.salary_slip_based_on_timesheet), as_dict=as_dict)
return ss_list
def submit_salary_slips(self):
self.check_permission('write')
ss_list = self.get_sal_slip_list(ss_status=0)
if len(ss_list) > 30:
frappe.enqueue(submit_salary_slips_for_employees, timeout=600, payroll_entry=self, salary_slips=ss_list)
else:
submit_salary_slips_for_employees(self, ss_list, publish_progress=False)
def email_salary_slip(self, submitted_ss):
if frappe.db.get_single_value("HR Settings", "email_salary_slip_to_employee"):
for ss in submitted_ss:
ss.email_salary_slip()
def get_loan_details(self):
"""
Get loan details from submitted salary slip based on selected criteria
"""
cond = self.get_filter_condition()
return frappe.db.sql(""" select eld.loan_account, eld.loan,
eld.interest_income_account, eld.principal_amount, eld.interest_amount, eld.total_payment
from
`tabSalary Slip` t1, `tabSalary Slip Loan` eld
where
t1.docstatus = 1 and t1.name = eld.parent and start_date >= %s and end_date <= %s %s
""" % ('%s', '%s', cond), (self.start_date, self.end_date), as_dict=True) or []
def get_salary_component_account(self, salary_component):
account = frappe.db.get_value("Salary Component Account",
{"parent": salary_component, "company": self.company}, "default_account")
if not account:
frappe.throw(_("Please set default account in Salary Component {0}")
.format(salary_component))
return account
def get_salary_components(self, component_type):
salary_slips = self.get_sal_slip_list(ss_status = 1, as_dict = True)
if salary_slips:
salary_components = frappe.db.sql("""select salary_component, amount, parentfield
from `tabSalary Detail` where parentfield = '%s' and parent in (%s)""" %
(component_type, ', '.join(['%s']*len(salary_slips))), tuple([d.name for d in salary_slips]), as_dict=True)
return salary_components
def get_salary_component_total(self, component_type = None):
salary_components = self.get_salary_components(component_type)
if salary_components:
component_dict = {}
for item in salary_components:
add_component_to_accrual_jv_entry = True
if component_type == "earnings":
is_flexible_benefit, only_tax_impact = frappe.db.get_value("Salary Component", item['salary_component'], ['is_flexible_benefit', 'only_tax_impact'])
if is_flexible_benefit == 1 and only_tax_impact ==1:
add_component_to_accrual_jv_entry = False
if add_component_to_accrual_jv_entry:
component_dict[item['salary_component']] = component_dict.get(item['salary_component'], 0) + item['amount']
account_details = self.get_account(component_dict = component_dict)
return account_details
def get_account(self, component_dict = None):
account_dict = {}
for s, a in component_dict.items():
account = self.get_salary_component_account(s)
account_dict[account] = account_dict.get(account, 0) + a
return account_dict
def get_default_payroll_payable_account(self):
payroll_payable_account = frappe.get_cached_value('Company',
{"company_name": self.company}, "default_payroll_payable_account")
if not payroll_payable_account:
frappe.throw(_("Please set Default Payroll Payable Account in Company {0}")
.format(self.company))
return payroll_payable_account
def make_accrual_jv_entry(self):
self.check_permission('write')
earnings = self.get_salary_component_total(component_type = "earnings") or {}
deductions = self.get_salary_component_total(component_type = "deductions") or {}
default_payroll_payable_account = self.get_default_payroll_payable_account()
loan_details = self.get_loan_details()
jv_name = ""
precision = frappe.get_precision("Journal Entry Account", "debit_in_account_currency")
if earnings or deductions:
journal_entry = frappe.new_doc('Journal Entry')
journal_entry.voucher_type = 'Journal Entry'
journal_entry.user_remark = _('Accrual Journal Entry for salaries from {0} to {1}')\
.format(self.start_date, self.end_date)
journal_entry.company = self.company
journal_entry.posting_date = self.posting_date
accounts = []
payable_amount = 0
# Earnings
for acc, amount in earnings.items():
payable_amount += flt(amount, precision)
accounts.append({
"account": acc,
"debit_in_account_currency": flt(amount, precision),
"cost_center": self.cost_center,
"project": self.project
})
# Deductions
for acc, amount in deductions.items():
payable_amount -= flt(amount, precision)
accounts.append({
"account": acc,
"credit_in_account_currency": flt(amount, precision),
"cost_center": self.cost_center,
"project": self.project
})
# Loan
for data in loan_details:
accounts.append({
"account": data.loan_account,
"credit_in_account_currency": data.principal_amount
})
if data.interest_amount and not data.interest_income_account:
frappe.throw(_("Select interest income account in loan {0}").format(data.loan))
if data.interest_income_account and data.interest_amount:
accounts.append({
"account": data.interest_income_account,
"credit_in_account_currency": data.interest_amount,
"cost_center": self.cost_center,
"project": self.project
})
payable_amount -= flt(data.total_payment, precision)
# Payable amount
accounts.append({
"account": default_payroll_payable_account,
"credit_in_account_currency": flt(payable_amount, precision)
})
journal_entry.set("accounts", accounts)
journal_entry.title = default_payroll_payable_account
journal_entry.save()
try:
journal_entry.submit()
jv_name = journal_entry.name
self.update_salary_slip_status(jv_name = jv_name)
except Exception as e:
frappe.msgprint(e)
return jv_name
def make_payment_entry(self):
self.check_permission('write')
cond = self.get_filter_condition()
salary_slip_name_list = frappe.db.sql(""" select t1.name from `tabSalary Slip` t1
where t1.docstatus = 1 and start_date >= %s and end_date <= %s %s
""" % ('%s', '%s', cond), (self.start_date, self.end_date), as_list = True)
if salary_slip_name_list and len(salary_slip_name_list) > 0:
salary_slip_total = 0
for salary_slip_name in salary_slip_name_list:
salary_slip = frappe.get_doc("Salary Slip", salary_slip_name[0])
for sal_detail in salary_slip.earnings:
is_flexible_benefit, only_tax_impact, creat_separate_je, statistical_component = frappe.db.get_value("Salary Component", sal_detail.salary_component,
['is_flexible_benefit', 'only_tax_impact', 'create_separate_payment_entry_against_benefit_claim', 'statistical_component'])
if only_tax_impact != 1 and statistical_component != 1:
if is_flexible_benefit == 1 and creat_separate_je == 1:
self.create_journal_entry(sal_detail.amount, sal_detail.salary_component)
else:
salary_slip_total += sal_detail.amount
for sal_detail in salary_slip.deductions:
statistical_component = frappe.db.get_value("Salary Component", sal_detail.salary_component, 'statistical_component')
if statistical_component != 1:
salary_slip_total -= sal_detail.amount
if salary_slip_total > 0:
self.create_journal_entry(salary_slip_total, "salary")
def create_journal_entry(self, je_payment_amount, user_remark):
default_payroll_payable_account = self.get_default_payroll_payable_account()
precision = frappe.get_precision("Journal Entry Account", "debit_in_account_currency")
journal_entry = frappe.new_doc('Journal Entry')
journal_entry.voucher_type = 'Bank Entry'
journal_entry.user_remark = _('Payment of {0} from {1} to {2}')\
.format(user_remark, self.start_date, self.end_date)
journal_entry.company = self.company
journal_entry.posting_date = self.posting_date
payment_amount = flt(je_payment_amount, precision)
journal_entry.set("accounts", [
{
"account": self.payment_account,
"credit_in_account_currency": payment_amount
},
{
"account": default_payroll_payable_account,
"debit_in_account_currency": payment_amount,
"reference_type": self.doctype,
"reference_name": self.name
}
])
journal_entry.save(ignore_permissions = True)
def update_salary_slip_status(self, jv_name = None):
ss_list = self.get_sal_slip_list(ss_status=1)
for ss in ss_list:
ss_obj = frappe.get_doc("Salary Slip",ss[0])
frappe.db.set_value("Salary Slip", ss_obj.name, "journal_entry", jv_name)
def set_start_end_dates(self):
self.update(get_start_end_dates(self.payroll_frequency,
self.start_date or self.posting_date, self.company))
def validate_employee_attendance(self):
employees_to_mark_attendance = []
days_in_payroll, days_holiday, days_attendance_marked = 0, 0, 0
for employee_detail in self.employees:
days_holiday = self.get_count_holidays_of_employee(employee_detail.employee)
days_attendance_marked = self.get_count_employee_attendance(employee_detail.employee)
days_in_payroll = date_diff(self.end_date, self.start_date) + 1
if days_in_payroll > days_holiday + days_attendance_marked:
employees_to_mark_attendance.append({
"employee": employee_detail.employee,
"employee_name": employee_detail.employee_name
})
return employees_to_mark_attendance
def get_count_holidays_of_employee(self, employee):
holiday_list = get_holiday_list_for_employee(employee)
holidays = 0
if holiday_list:
days = frappe.db.sql("""select count(*) from tabHoliday where
parent=%s and holiday_date between %s and %s""", (holiday_list,
self.start_date, self.end_date))
if days and days[0][0]:
holidays = days[0][0]
return holidays
def get_count_employee_attendance(self, employee):
marked_days = 0
attendances = frappe.db.sql("""select count(*) from tabAttendance where
employee=%s and docstatus=1 and attendance_date between %s and %s""",
(employee, self.start_date, self.end_date))
if attendances and attendances[0][0]:
marked_days = attendances[0][0]
return marked_days
@frappe.whitelist()
def get_start_end_dates(payroll_frequency, start_date=None, company=None):
'''Returns dict of start and end dates for given payroll frequency based on start_date'''
if payroll_frequency == "Monthly" or payroll_frequency == "Bimonthly" or payroll_frequency == "":
fiscal_year = get_fiscal_year(start_date, company=company)[0]
month = "%02d" % getdate(start_date).month
m = get_month_details(fiscal_year, month)
if payroll_frequency == "Bimonthly":
if getdate(start_date).day <= 15:
start_date = m['month_start_date']
end_date = m['month_mid_end_date']
else:
start_date = m['month_mid_start_date']
end_date = m['month_end_date']
else:
start_date = m['month_start_date']
end_date = m['month_end_date']
if payroll_frequency == "Weekly":
end_date = add_days(start_date, 6)
if payroll_frequency == "Fortnightly":
end_date = add_days(start_date, 13)
if payroll_frequency == "Daily":
end_date = start_date
return frappe._dict({
'start_date': start_date, 'end_date': end_date
})
def get_frequency_kwargs(frequency_name):
frequency_dict = {
'monthly': {'months': 1},
'fortnightly': {'days': 14},
'weekly': {'days': 7},
'daily': {'days': 1}
}
return frequency_dict.get(frequency_name)
@frappe.whitelist()
def get_end_date(start_date, frequency):
start_date = getdate(start_date)
frequency = frequency.lower() if frequency else 'monthly'
kwargs = get_frequency_kwargs(frequency) if frequency != 'bimonthly' else get_frequency_kwargs('monthly')
# weekly, fortnightly and daily intervals have fixed days so no problems
end_date = add_to_date(start_date, **kwargs) - relativedelta(days=1)
if frequency != 'bimonthly':
return dict(end_date=end_date.strftime(DATE_FORMAT))
else:
return dict(end_date='')
def get_month_details(year, month):
ysd = frappe.db.get_value("Fiscal Year", year, "year_start_date")
if ysd:
import calendar, datetime
diff_mnt = cint(month)-cint(ysd.month)
if diff_mnt<0:
diff_mnt = 12-int(ysd.month)+cint(month)
msd = ysd + relativedelta(months=diff_mnt) # month start date
month_days = cint(calendar.monthrange(cint(msd.year) ,cint(month))[1]) # days in month
mid_start = datetime.date(msd.year, cint(month), 16) # month mid start date
mid_end = datetime.date(msd.year, cint(month), 15) # month mid end date
med = datetime.date(msd.year, cint(month), month_days) # month end date
return frappe._dict({
'year': msd.year,
'month_start_date': msd,
'month_end_date': med,
'month_mid_start_date': mid_start,
'month_mid_end_date': mid_end,
'month_days': month_days
})
else:
frappe.throw(_("Fiscal Year {0} not found").format(year))
def get_payroll_entry_bank_entries(payroll_entry_name):
journal_entries = frappe.db.sql(
'select name from `tabJournal Entry Account` '
'where reference_type="Payroll Entry" '
'and reference_name=%s and docstatus=1',
payroll_entry_name,
as_dict=1
)
return journal_entries
@frappe.whitelist()
def payroll_entry_has_bank_entries(name):
response = {}
bank_entries = get_payroll_entry_bank_entries(name)
response['submitted'] = 1 if bank_entries else 0
return response
def create_salary_slips_for_employees(employees, args, publish_progress=True):
salary_slips_exists_for = get_existing_salary_slips(employees, args)
count=0
for emp in employees:
if emp not in salary_slips_exists_for:
args.update({
"doctype": "Salary Slip",
"employee": emp
})
ss = frappe.get_doc(args)
ss.insert()
count+=1
if publish_progress:
frappe.publish_progress(count*100/len(set(employees) - set(salary_slips_exists_for)),
title = _("Creating Salary Slips..."))
payroll_entry = frappe.get_doc("Payroll Entry", args.payroll_entry)
payroll_entry.db_set("salary_slips_created", 1)
payroll_entry.notify_update()
def get_existing_salary_slips(employees, args):
return frappe.db.sql_list("""
select distinct employee from `tabSalary Slip`
where docstatus!= 2 and company = %s
and start_date >= %s and end_date <= %s
and employee in (%s)
""" % ('%s', '%s', '%s', ', '.join(['%s']*len(employees))),
[args.company, args.start_date, args.end_date] + employees)
def submit_salary_slips_for_employees(payroll_entry, salary_slips, publish_progress=True):
submitted_ss = []
not_submitted_ss = []
frappe.flags.via_payroll_entry = True
count = 0
for ss in salary_slips:
ss_obj = frappe.get_doc("Salary Slip",ss[0])
if ss_obj.net_pay<0:
not_submitted_ss.append(ss[0])
else:
try:
ss_obj.submit()
submitted_ss.append(ss_obj)
except frappe.ValidationError:
not_submitted_ss.append(ss[0])
count += 1
if publish_progress:
frappe.publish_progress(count*100/len(salary_slips), title = _("Submitting Salary Slips..."))
if submitted_ss:
payroll_entry.make_accrual_jv_entry()
frappe.msgprint(_("Salary Slip submitted for period from {0} to {1}")
.format(ss_obj.start_date, ss_obj.end_date))
payroll_entry.email_salary_slip(submitted_ss)
payroll_entry.db_set("salary_slips_submitted", 1)
payroll_entry.notify_update()
if not submitted_ss and not not_submitted_ss:
frappe.msgprint(_("No salary slip found to submit for the above selected criteria OR salary slip already submitted"))
if not_submitted_ss:
frappe.msgprint(_("Could not submit some Salary Slips"))
def get_payroll_entries_for_jv(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""
select name from `tabPayroll Entry`
where `{key}` LIKE %(txt)s
and name not in
(select reference_name from `tabJournal Entry Account`
where reference_type="Payroll Entry")
order by name limit %(start)s, %(page_len)s"""
.format(key=searchfield), {
'txt': "%%%s%%" % frappe.db.escape(txt),
'start': start, 'page_len': page_len
})
| gpl-3.0 | 4,264,990,217,364,493,000 | 35.675579 | 154 | 0.693026 | false |
alvin777/excelsior | sort/benchmark.py | 1 | 2909 | #!/usr/bin/python
import time
from simple_sorts import *
from shell_sort import *
from quick_sort import *
from external_merge_sort import *
from radix_sort import *
from merge_sort import *
from heap_sort import *
from intro_sort import *
from timsort import *
from list_generators import *
result = {}
def run_until(sort_func, max_duration = 1.0, generator = random_generator):
print sort_func
duration = 0
list_size = 100
while duration < max_duration:
randomList = [x for x in generator(list_size)]
time_start = time.time()
try:
sort_func(randomList)
except RuntimeError:
print 'failed on list size: %5d' % list_size
return
duration = time.time() - time_start
print 'list size: %7d, duration: %0.3f' % (list_size, duration)
if not generator in result:
result[generator] = {}
if not list_size in result[generator]:
result[generator][list_size] = {}
result[generator][list_size][sort_func] = duration
list_size *= 2
def test_run_benchmarks():
generators_list = [random_generator, almost_sorted_generator, reverse_sorted_generator, few_uniq_generator]
# generators_list = [random_generator, reverse_sorted_generator]
# generators_list = [few_uniq_generator]
# sort_func_list = [bubble_sort, insertion_sort, insertion_sort2]
sort_func_list = [bubble_sort, insertion_sort, insertion_sort2, selection_sort, shell_sort, \
merge_sort, quick_sort, lambda x: quick_sort(x, splitByMedian), heap_sort,
lambda x: radix_sort(x, 1000), intro_sort, timsort]
# sort_func_list = [quick_sort, \
# lambda x: quick_sort(x, partition_func=splitByMiddleElement), \
# lambda x: quick_sort(x, partition_func=splitByMedian), \
# lambda x: quick_sort(x, leaf_sort_func=leaf_insertion_sort)]
# sort_func_list = [radix_sort, \
# lambda x: radix_sort(x, 2), \
# lambda x: radix_sort(x, 100),
# lambda x: radix_sort(x, 1000),
# lambda x: radix_sort(x, 10000)
# ]
for generator in generators_list:
print generator
for sort_func in sort_func_list:
run_until(sort_func, 0.5, generator)
for generator in generators_list:
print generator
for list_size in sorted(result[generator]):
sys.stdout.write(str(list_size) + "\t")
for sort_func in sort_func_list:
if sort_func in result[generator][list_size]:
sys.stdout.write("{:.3f}\t".format(result[generator][list_size][sort_func]))
else:
sys.stdout.write("\t")
sys.stdout.write("\n")
test_run_benchmarks() | gpl-2.0 | -2,014,888,064,060,402,700 | 34.487805 | 111 | 0.584393 | false |
amitdhiman000/MyOffers | myadmin/views.py | 1 | 7396 | from myadmin.backenddb import (insert_default_areas, insert_custom_areas, insert_default_categories)
from offer.models import CategoryModel
from locus.models import (CountryModel ,StateModel, CityModel, AreaModel)
from mail.models import (PublicMessageModel)
from myadmin.preload_data import (gCountries, gCategories)
from base.apputil import (App_AdminRequired, App_Render)
# Create your views here.
@App_AdminRequired
def home(request):
data = {'title': 'MyAdmin'}
return App_Render(request, 'admin/admin_home_1.html', data)
@App_AdminRequired
def locus_area_view(request, country, state, city, area):
print(area)
areas = AreaModel.fetch_by_name(area, city, state, country)
data = {'title': 'MyAdmin', 'country': country, 'state': state, 'city': city, 'area': area, 'areas': areas}
return App_Render(request, 'admin/admin_locus_area_1.html', data)
@App_AdminRequired
def locus_city_view(request, country, state, city):
print(city)
filter = {'fk_city__name': city, 'fk_state__name': state, 'fk_country__name': country}
areas = AreaModel.fetch(filter)
data = {'title': 'MyAdmin', 'country': country, 'state': state, 'city': city, 'areas': areas}
return App_Render(request, 'admin/admin_locus_city_1.html', data)
@App_AdminRequired
def locus_state_view(request, country, state):
print(state)
filter = {'fk_state__name': state, 'fk_country__name': country}
cities = CityModel.fetch(filter)
data = {'title': 'MyAdmin', 'country': country, 'state': state, 'cities': cities}
return App_Render(request, 'admin/admin_locus_state_1.html', data)
@App_AdminRequired
def locus_country_view(request, country):
print(country)
states = StateModel.fetch({'fk_country__name': country})
data = {'title': 'MyAdmin', 'country': country, 'states': states}
return App_Render(request, 'admin/admin_locus_country_1.html', data)
@App_AdminRequired
def locus_view0(request):
countries = CountryModel.fetch_all()
states = StateModel.fetch({'fk_country__name': 'India'})
data = {'title': 'MyAdmin', 'countries': countries, 'states': states}
return App_Render(request, 'admin/admin_locus_view_1.html', data)
@App_AdminRequired
def locus_view(request, query=''):
print('query : '+query)
params = query.rstrip('/').split('/')
length = len(params)
print(params)
print('length : '+str(length))
if length == 1 and params[0] != '':
return locus_country_view(request, params[0])
elif length == 2:
return locus_state_view(request, params[0], params[1])
elif length == 3:
return locus_city_view(request, params[0], params[1], params[2])
elif length == 4:
return locus_area_view(request, params[0], params[1], params[2], params[3])
return locus_view0(request)
@App_AdminRequired
def locus_country_add_view(request, country):
states = {}
if country in gCountries:
states = gCountries[country]
data = {'title': 'MyAdmin', 'country': country, 'states': states}
return App_Render(request, 'admin/admin_locus_country_add_1.html', data)
@App_AdminRequired
def locus_add_view0(request):
countries = list(gCountries.keys())
data = {'title': 'MyAdmin', 'countries': countries}
return App_Render(request, 'admin/admin_locus_add_1.html', data)
@App_AdminRequired
def locus_add_view(request, query=''):
print('query : '+query)
params = query.rstrip('/').split('/')
length = len(params)
print(params)
print('length : '+str(length))
if length == 1 and params[0] != '':
return locus_country_add_view(request, params[0])
elif length == 2:
return locus_state_add_view(request, params[0], params[1])
elif length == 3:
return locus_city_add_view(request, params[0], params[1], params[2])
elif length == 4:
return locus_area_add_view(request, params[0], params[1], params[2], params[3])
return locus_add_view0(request)
@App_AdminRequired
def locus_auth(request, query=''):
print('query : '+query)
params = query.rstrip('/').split('/')
length = len(params)
print(params)
print('length : '+str(length))
if length < 3:
return None
country = params[0]
state = params[1]
city = params[2]
print(country, state, city)
if CityModel.fetch_by_name(city_name=city, state_name=state, country_name=country) is None:
insert_custom_areas(city, state, country)
areas = AreaModel.fetch_by_city(city)
data = {'title': 'Location', 'country': country, 'state': state, 'city': city, 'areas': areas}
return App_Render(request, 'admin/admin_locus_added_1.html', data)
@App_AdminRequired
def category_view(request, query=''):
print('query : '+query)
params = query.rstrip('/').split('/')
length = len(params)
print(params)
print('length : '+str(length))
name = "All"
if length > 0 and params[0] != '':
name = params[length - 1]
categories = CategoryModel.fetch_children(name)
data = {'title': 'MyAdmin', 'categories': categories}
return App_Render(request, 'admin/admin_category_1.html', data)
@App_AdminRequired
def category_add_view0(request):
base_cat = gCategories[0]['sub']
print(len(base_cat))
data = {'title': 'MyAdmin', 'categories': base_cat}
return App_Render(request, 'admin/admin_category_add_1.html', data)
@App_AdminRequired
def category_add_view1(request, params, length):
print(request)
index = 0
cat_list = gCategories
while index < length:
for cat in cat_list:
if cat['name'] == params[index]:
if 'sub' in cat:
cat_list = cat['sub']
else:
print('No more subcategories, jump to root')
cat_list = cat
index = length
break
index = index + 1
nav_links = []
url = '/myadmin/category-add/'
for param in params:
print('param : '+param)
url += param + "/"
nav_links.append({'text': param, 'href': url})
data = {}
if type(cat_list) is list:
categories = []
desired_attrs = ['name', 'desc']
for cat in cat_list:
categories.append({ key: value for key,value in cat.items() if key in desired_attrs })
print(len(categories))
print(categories)
data.update({'categories': categories})
else:
data.update({'category': cat_list})
data.update({'title': 'Add Category | MyAdmin', 'nav_links': nav_links, })
return App_Render(request, 'admin/admin_category_add_1.html', data)
@App_AdminRequired
def category_add(request, params):
insert_default_categories()
@App_AdminRequired
def category_add_view(request, query):
print('query : '+query)
params = query.rstrip('/').split('/')
length = len(params)
print(params)
print('length : '+str(length))
command = request.GET.get('command', '')
if command == 'Add':
category_add(request, params)
if params[0] == '':
params[0] = 'All';
return category_add_view1(request, params, length)
@App_AdminRequired
def messages_view(request):
print('chaum executing this')
messages = PublicMessageModel.fetch_all()
data = {'title': 'Messages', 'messages': messages}
return App_Render(request, 'admin/admin_message_1.html', data)
| apache-2.0 | -4,578,698,669,415,442,000 | 32.165919 | 111 | 0.636966 | false |
uqyge/combustionML | FPV_ANN_pureResNet/data_reader_2.py | 1 | 5981 | import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler, StandardScaler
class data_scaler(object):
def __init__(self):
self.norm = None
self.norm_1 = None
self.std = None
self.case = None
self.scale = 1
self.bias = 1e-20
# self.bias = 1
self.switcher = {
'min_std': 'min_std',
'std2': 'std2',
'std_min': 'std_min',
'min': 'min',
'no': 'no',
'log': 'log',
'log_min': 'log_min',
'log2': 'log2',
'tan': 'tan'
}
def fit_transform(self, input_data, case):
self.case = case
if self.switcher.get(self.case) == 'min_std':
self.norm = MinMaxScaler()
self.std = StandardScaler()
out = self.norm.fit_transform(input_data)
out = self.std.fit_transform(out)
if self.switcher.get(self.case) == 'std2':
self.std = StandardScaler()
out = self.std.fit_transform(input_data)
if self.switcher.get(self.case) == 'std_min':
self.norm = MinMaxScaler()
self.std = StandardScaler()
out = self.std.fit_transform(input_data)
out = self.norm.fit_transform(out)
if self.switcher.get(self.case) == 'min':
self.norm = MinMaxScaler()
out = self.norm.fit_transform(input_data)
if self.switcher.get(self.case) == 'no':
self.norm = MinMaxScaler()
self.std = StandardScaler()
out = input_data
if self.switcher.get(self.case) == 'log':
out = - np.log(np.asarray(input_data / self.scale) + self.bias)
self.std = StandardScaler()
out = self.std.fit_transform(out)
if self.switcher.get(self.case) == 'log_min':
out = - np.log(np.asarray(input_data / self.scale) + self.bias)
self.norm = MinMaxScaler()
out = self.norm.fit_transform(out)
if self.switcher.get(self.case) == 'log2':
self.norm = MinMaxScaler()
self.norm_1 = MinMaxScaler()
out = self.norm.fit_transform(input_data)
out = np.log(np.asarray(out) + self.bias)
out = self.norm_1.fit_transform(out)
if self.switcher.get(self.case) == 'tan':
self.norm = MaxAbsScaler()
self.std = StandardScaler()
out = self.std.fit_transform(input_data)
out = self.norm.fit_transform(out)
out = np.tan(out / (2 * np.pi + self.bias))
return out
def transform(self, input_data):
if self.switcher.get(self.case) == 'min_std':
out = self.norm.transform(input_data)
out = self.std.transform(out)
if self.switcher.get(self.case) == 'std2':
out = self.std.transform(input_data)
if self.switcher.get(self.case) == 'std_min':
out = self.std.transform(input_data)
out = self.norm.transform(out)
if self.switcher.get(self.case) == 'min':
out = self.norm.transform(input_data)
if self.switcher.get(self.case) == 'no':
out = input_data
if self.switcher.get(self.case) == 'log':
out = - np.log(np.asarray(input_data / self.scale) + self.bias)
out = self.std.transform(out)
if self.switcher.get(self.case) == 'log_min':
out = - np.log(np.asarray(input_data / self.scale) + self.bias)
out = self.norm.transform(out)
if self.switcher.get(self.case) == 'log2':
out = self.norm.transform(input_data)
out = np.log(np.asarray(out) + self.bias)
out = self.norm_1.transform(out)
if self.switcher.get(self.case) == 'tan':
out = self.std.transform(input_data)
out = self.norm.transform(out)
out = np.tan(out / (2 * np.pi + self.bias))
return out
def inverse_transform(self, input_data):
if self.switcher.get(self.case) == 'min_std':
out = self.std.inverse_transform(input_data)
out = self.norm.inverse_transform(out)
if self.switcher.get(self.case) == 'std2':
out = self.std.inverse_transform(input_data)
if self.switcher.get(self.case) == 'std_min':
out = self.norm.inverse_transform(input_data)
out = self.std.inverse_transform(out)
if self.switcher.get(self.case) == 'min':
out = self.norm.inverse_transform(input_data)
if self.switcher.get(self.case) == 'no':
out = input_data
if self.switcher.get(self.case) == 'log':
out = self.std.inverse_transform(input_data)
out = (np.exp(-out) - self.bias) * self.scale
if self.switcher.get(self.case) == 'log_min':
out = self.norm.inverse_transform(input_data)
out = (np.exp(-out) - self.bias) * self.scale
if self.switcher.get(self.case) == 'log2':
out = self.norm_1.inverse_transform(input_data)
out = np.exp(out) - self.bias
out = self.norm.inverse_transform(out)
if self.switcher.get(self.case) == 'tan':
out = (2 * np.pi + self.bias) * np.arctan(input_data)
out = self.norm.inverse_transform(out)
out = self.std.inverse_transform(out)
return out
def read_h5_data(fileName, input_features, labels):
df = pd.read_hdf(fileName)
df = df[df['f'] < 0.45]
input_df = df[input_features]
in_scaler = data_scaler()
input_np = in_scaler.fit_transform(input_df.values, 'no')
label_df = df[labels].clip(0)
# if 'PVs' in labels:
# label_df['PVs']=np.log(label_df['PVs']+1)
out_scaler = data_scaler()
label_np = out_scaler.fit_transform(label_df.values, 'std2')
return input_np, label_np, df, in_scaler, out_scaler | mit | 5,411,136,813,735,696,000 | 33.578035 | 75 | 0.546397 | false |
BaluDontu/docker-volume-vsphere | esx_service/vsan_policy_test.py | 1 | 2572 | # Copyright 2016 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os, os.path
import vsan_policy
import vmdk_utils
import volume_kv
import vsan_info
class TestVsanPolicy(unittest.TestCase):
""" Test VSAN Policy code """
@unittest.skipIf(not vsan_info.get_vsan_datastore(),
"VSAN is not found - skipping vsan_info tests")
def setUp(self):
self.policy_path = os.path.join(vsan_info.get_vsan_dockvols_path(),
'policies/test_policy')
self.name = 'test_policy'
self.content = ('(("proportionalCapacity" i50) '
'("hostFailuresToTolerate" i0))')
def tearDown(self):
try:
os.remove(self.policy_path)
except:
pass
def assertPoliciesEqual(self):
with open(self.policy_path) as f:
content = f.read()
# Remove the added newline
self.assertEqual(content[:-1], self.content)
def test_create(self):
self.assertEqual(None, vsan_policy.create(self.name, self.content))
self.assertPoliciesEqual()
def test_double_create_fails(self):
self.assertEqual(None, vsan_policy.create(self.name, self.content))
self.assertNotEqual(None, vsan_policy.create(self.name, self.content))
self.assertPoliciesEqual()
def test_create_delete(self):
self.assertEqual(None, vsan_policy.create(self.name, self.content))
self.assertPoliciesEqual()
self.assertEqual(None, vsan_policy.delete(self.name))
self.assertFalse(os.path.isfile(self.policy_path))
def test_delete_nonexistent_policy_fails(self):
self.assertNotEqual(None, vsan_policy.delete(self.name))
def test_create_list(self):
self.assertEqual(None, vsan_policy.create(self.name, self.content))
policies = vsan_policy.get_policies()
self.assertTrue(self.content + '\n', policies[self.name])
if __name__ == '__main__':
volume_kv.init()
unittest.main()
| apache-2.0 | 6,275,258,985,350,038,000 | 33.756757 | 78 | 0.661353 | false |
informatik-mannheim/Moduro-CC3D | Simulation/Logger/ArrangementFitnessSteppable.py | 1 | 5298 | # Copyright 2016 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Angelo Torelli, Markus Gumbel"
__copyright__ = "The authors"
__license__ = "Apache 2"
__email__ = "[email protected]"
__status__ = "Production"
from Logger.TissueFitnessSteppable import TissueFitnessSteppable
class ArrangementFitnessSteppable(TissueFitnessSteppable):
def __init__(self, simulator, model, _frequency=1):
TissueFitnessSteppable.__init__(self, simulator, model,
"FitnessArrangement.dat", _frequency)
# step is overwritten
# TODO sizes do not scale yet!
def step(self, mcs):
if self.execConfig.interuptMCS(mcs):
deltaXPx = self.execConfig.calcPixelFromMuMeterMin1(20) # 20 mu m.
deltaZPx = deltaXPx
sumFitness_a = []
avgStemCellDiameterPx = \
self.execConfig.calcPixelFromMuMeterMin1(self.model.cellTypes[2].getAvgDiameter())
zRange = [0] if self.execConfig.zDimension == 1 else range(0, self.execConfig.zDimension, deltaZPx)
for z in zRange:
for x in xrange(1, self.execConfig.xDimension, deltaXPx):
cells_in_order = []
for y in xrange(3, self.execConfig.yDimension, int(avgStemCellDiameterPx / 2)):
# Gives the mode of a cell ID in a 3x3x3 pixels cube if 3D otherwise 3x3 rectangle
mode_of_cellIDs = []
for width in xrange(0, 2, 1):
for height in xrange(0, 2, 1):
depthRange = [0] if zRange.__len__() == 1 else range(0, 2, 1)
for depth in depthRange:
if self.cellField[x + width, y + height, z + depth] is not None:
mode_of_cellIDs.append(self.cellField[x + width, y + height, z + depth].id)
# If mode ID exists and in not already in cell_in_order list it will be added
if len(mode_of_cellIDs) > 0:
cellToCheck = self.attemptFetchingCellById(self.mode(mode_of_cellIDs))
exist = False
for cell in cells_in_order:
if cellToCheck.id == cell.id:
exist = True
if not exist:
cells_in_order.append(cellToCheck)
layers = len(cells_in_order)
if layers == 0:
fitness_a = 0
else:
optimumLayers = 1 if layers <= 7 and layers >= 3 else 0
if cells_in_order[layers - 1].type == self.UMBRELLA:
lastLayer = 1
layers -= 1
else:
lastLayer = 0
if cells_in_order[0].type == self.STEM or cells_in_order[0].type == self.BASAL:
firstLayer = 1
layers -= 1
else:
firstLayer = 0
layersInBetween = layers
for x in range(firstLayer, len(cells_in_order) - 1 - lastLayer, 1):
if cells_in_order[x].type != self.INTERMEDIATE:
layersInBetween -= 1
lib = 0 if layers == 0 else (layers - layersInBetween) / layers
fitness_a = 1.0 - (
(1.0 - float(firstLayer)) +
(1.0 - float(lastLayer)) +
lib +
(1.0 - float(optimumLayers))) / 4.0
sumFitness_a.append(fitness_a)
# print "!!!!!!!!!!!!!!!!! x: ", x, " steps: ", int(ratio * self.execConfig.xDimension), " fitness_a: ", fitness_a
fitness_a = sum(sumFitness_a) / len(sumFitness_a)
self._addLine(mcs, fitness_a)
def mode(self, IDs):
"""
Returns the mode of the IDs in a certain region.
:param IDs:
:return:
"""
corresponding = {}
occurances = []
for i in IDs:
count = IDs.count(i)
corresponding.update({i: count})
for i in corresponding:
freq = corresponding[i]
occurances.append(freq)
maxFreq = max(occurances)
keys = corresponding.keys()
values = corresponding.values()
index_v = values.index(maxFreq)
mode = keys[index_v]
return mode
| apache-2.0 | -6,240,534,123,263,231,000 | 45.473684 | 134 | 0.503398 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.