repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
victorzhao/miniblink49 | v8_5_1/tools/testrunner/local/pool_unittest.py | 27 | 1252 | #!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from pool import Pool
def Run(x):
if x == 10:
raise Exception("Expected exception triggered by test.")
return x
class PoolTest(unittest.TestCase):
def testNormal(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]):
results.add(result.value)
self.assertEquals(set(range(0, 10)), results)
def testException(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 12)]):
# Item 10 will not appear in results due to an internal exception.
results.add(result.value)
expect = set(range(0, 12))
expect.remove(10)
self.assertEquals(expect, results)
def testAdd(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]):
results.add(result.value)
if result.value < 30:
pool.add([result.value + 20])
self.assertEquals(set(range(0, 10) + range(20, 30) + range(40, 50)),
results)
| gpl-3.0 | -8,542,137,178,266,970,000 | 29.536585 | 72 | 0.646166 | false |
stevenmizuno/QGIS | tests/src/python/test_layer_dependencies.py | 22 | 11160 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsSnappingUtils (complement to C++-based tests)
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Hugo Mercier'
__date__ = '12/07/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
import os
from qgis.core import (QgsProject,
QgsVectorLayer,
QgsMapSettings,
QgsSnappingUtils,
QgsSnappingConfig,
QgsTolerance,
QgsRectangle,
QgsPointXY,
QgsFeature,
QgsGeometry,
QgsLayerDefinition,
QgsMapLayerDependency
)
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import QSize, QPoint
import tempfile
from qgis.utils import spatialite_connect
# Convenience instances in case you may need them
start_app()
class TestLayerDependencies(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# create a temp SpatiaLite db with a trigger
fo = tempfile.NamedTemporaryFile()
fn = fo.name
fo.close()
cls.fn = fn
con = spatialite_connect(fn)
cur = con.cursor()
cur.execute("SELECT InitSpatialMetadata(1)")
cur.execute("create table node(id integer primary key autoincrement);")
cur.execute("select AddGeometryColumn('node', 'geom', 4326, 'POINT');")
cur.execute("create table section(id integer primary key autoincrement, node1 integer, node2 integer);")
cur.execute("select AddGeometryColumn('section', 'geom', 4326, 'LINESTRING');")
cur.execute("create trigger add_nodes after insert on section begin insert into node (geom) values (st_startpoint(NEW.geom)); insert into node (geom) values (st_endpoint(NEW.geom)); end;")
cur.execute("insert into node (geom) values (geomfromtext('point(0 0)', 4326));")
cur.execute("insert into node (geom) values (geomfromtext('point(1 0)', 4326));")
cur.execute("create table node2(id integer primary key autoincrement);")
cur.execute("select AddGeometryColumn('node2', 'geom', 4326, 'POINT');")
cur.execute("create trigger add_nodes2 after insert on node begin insert into node2 (geom) values (st_translate(NEW.geom, 0.2, 0, 0)); end;")
con.commit()
con.close()
cls.pointsLayer = QgsVectorLayer("dbname='%s' table=\"node\" (geom) sql=" % fn, "points", "spatialite")
assert (cls.pointsLayer.isValid())
cls.linesLayer = QgsVectorLayer("dbname='%s' table=\"section\" (geom) sql=" % fn, "lines", "spatialite")
assert (cls.linesLayer.isValid())
cls.pointsLayer2 = QgsVectorLayer("dbname='%s' table=\"node2\" (geom) sql=" % fn, "_points2", "spatialite")
assert (cls.pointsLayer2.isValid())
QgsProject.instance().addMapLayers([cls.pointsLayer, cls.linesLayer, cls.pointsLayer2])
# save the project file
fo = tempfile.NamedTemporaryFile()
fn = fo.name
fo.close()
cls.projectFile = fn
QgsProject.instance().setFileName(cls.projectFile)
QgsProject.instance().write()
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
pass
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
def test_resetSnappingIndex(self):
self.pointsLayer.setDependencies([])
self.linesLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
ms = QgsMapSettings()
ms.setOutputSize(QSize(100, 100))
ms.setExtent(QgsRectangle(0, 0, 1, 1))
self.assertTrue(ms.hasValidSettings())
u = QgsSnappingUtils()
u.setMapSettings(ms)
cfg = u.config()
cfg.setEnabled(True)
cfg.setMode(QgsSnappingConfig.AdvancedConfiguration)
cfg.setIndividualLayerSettings(self.pointsLayer,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
u.setConfig(cfg)
m = u.snapToMap(QPoint(95, 100))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(1, 0))
f = QgsFeature(self.linesLayer.fields())
f.setId(1)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,1 1)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
l1 = len([f for f in self.pointsLayer.getFeatures()])
self.assertEqual(l1, 4)
m = u.snapToMap(QPoint(95, 0))
# snapping not updated
self.pointsLayer.setDependencies([])
self.assertEqual(m.isValid(), False)
# set layer dependencies
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(2)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,0.5 0.5)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the snapped point is OK
m = u.snapToMap(QPoint(45, 50))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.5, 0.5))
self.pointsLayer.setDependencies([])
# test chained layer dependencies A -> B -> C
cfg.setIndividualLayerSettings(self.pointsLayer2,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
u.setConfig(cfg)
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
self.pointsLayer2.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())])
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(3)
geom = QgsGeometry.fromWkt("LINESTRING(0 0.2,0.5 0.8)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the second snapped point is OK
m = u.snapToMap(QPoint(75, 100 - 80))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.7, 0.8))
self.pointsLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
def test_cycleDetection(self):
self.assertTrue(self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())]))
self.assertFalse(self.linesLayer.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())]))
self.pointsLayer.setDependencies([])
self.linesLayer.setDependencies([])
def test_layerDefinitionRewriteId(self):
tmpfile = os.path.join(tempfile.tempdir, "test.qlr")
ltr = QgsProject.instance().layerTreeRoot()
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
QgsLayerDefinition.exportLayerDefinition(tmpfile, [ltr])
grp = ltr.addGroup("imported")
QgsLayerDefinition.loadLayerDefinition(tmpfile, QgsProject.instance(), grp)
newPointsLayer = None
newLinesLayer = None
for l in grp.findLayers():
if l.layerId().startswith('points'):
newPointsLayer = l.layer()
elif l.layerId().startswith('lines'):
newLinesLayer = l.layer()
self.assertIsNotNone(newPointsLayer)
self.assertIsNotNone(newLinesLayer)
self.assertTrue(newLinesLayer.id() in [dep.layerId() for dep in newPointsLayer.dependencies()])
self.pointsLayer.setDependencies([])
def test_signalConnection(self):
# remove all layers
QgsProject.instance().removeAllMapLayers()
# set dependencies and add back layers
self.pointsLayer = QgsVectorLayer("dbname='%s' table=\"node\" (geom) sql=" % self.fn, "points", "spatialite")
assert (self.pointsLayer.isValid())
self.linesLayer = QgsVectorLayer("dbname='%s' table=\"section\" (geom) sql=" % self.fn, "lines", "spatialite")
assert (self.linesLayer.isValid())
self.pointsLayer2 = QgsVectorLayer("dbname='%s' table=\"node2\" (geom) sql=" % self.fn, "_points2", "spatialite")
assert (self.pointsLayer2.isValid())
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
self.pointsLayer2.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())])
# this should update connections between layers
QgsProject.instance().addMapLayers([self.pointsLayer])
QgsProject.instance().addMapLayers([self.linesLayer])
QgsProject.instance().addMapLayers([self.pointsLayer2])
ms = QgsMapSettings()
ms.setOutputSize(QSize(100, 100))
ms.setExtent(QgsRectangle(0, 0, 1, 1))
self.assertTrue(ms.hasValidSettings())
u = QgsSnappingUtils()
u.setMapSettings(ms)
cfg = u.config()
cfg.setEnabled(True)
cfg.setMode(QgsSnappingConfig.AdvancedConfiguration)
cfg.setIndividualLayerSettings(self.pointsLayer,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
cfg.setIndividualLayerSettings(self.pointsLayer2,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
u.setConfig(cfg)
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(4)
geom = QgsGeometry.fromWkt("LINESTRING(0.5 0.2,0.6 0)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the second snapped point is OK
m = u.snapToMap(QPoint(75, 100 - 0))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.8, 0.0))
self.pointsLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 743,857,760,091,974,800 | 41.272727 | 196 | 0.616398 | false |
sunze/py_flask | venv/lib/python3.4/site-packages/celery/concurrency/asynpool.py | 6 | 46754 | # -*- coding: utf-8 -*-
"""
celery.concurrency.asynpool
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. note::
This module will be moved soon, so don't use it directly.
Non-blocking version of :class:`multiprocessing.Pool`.
This code deals with three major challenges:
1) Starting up child processes and keeping them running.
2) Sending jobs to the processes and receiving results back.
3) Safely shutting down this system.
"""
from __future__ import absolute_import
import errno
import os
import select
import socket
import struct
import sys
import time
from collections import deque, namedtuple
from io import BytesIO
from pickle import HIGHEST_PROTOCOL
from time import sleep
from weakref import WeakValueDictionary, ref
from amqp.utils import promise
from billiard.pool import RUN, TERMINATE, ACK, NACK, WorkersJoined
from billiard import pool as _pool
from billiard.compat import buf_t, setblocking, isblocking
from billiard.einfo import ExceptionInfo
from billiard.queues import _SimpleQueue
from kombu.async import READ, WRITE, ERR
from kombu.serialization import pickle as _pickle
from kombu.utils import fxrange
from kombu.utils.compat import get_errno
from kombu.utils.eventio import SELECT_BAD_FD
from celery.five import Counter, items, string_t, text_t, values
from celery.utils.log import get_logger
from celery.utils.text import truncate
from celery.worker import state as worker_state
try:
from _billiard import read as __read__
from struct import unpack_from as _unpack_from
memoryview = memoryview
readcanbuf = True
if sys.version_info[0] == 2 and sys.version_info < (2, 7, 6):
def unpack_from(fmt, view, _unpack_from=_unpack_from): # noqa
return _unpack_from(fmt, view.tobytes()) # <- memoryview
else:
# unpack_from supports memoryview in 2.7.6 and 3.3+
unpack_from = _unpack_from # noqa
except (ImportError, NameError): # pragma: no cover
def __read__(fd, buf, size, read=os.read): # noqa
chunk = read(fd, size)
n = len(chunk)
if n != 0:
buf.write(chunk)
return n
readcanbuf = False # noqa
def unpack_from(fmt, iobuf, unpack=struct.unpack): # noqa
return unpack(fmt, iobuf.getvalue()) # <-- BytesIO
logger = get_logger(__name__)
error, debug = logger.error, logger.debug
UNAVAIL = frozenset([errno.EAGAIN, errno.EINTR])
#: Constant sent by child process when started (ready to accept work)
WORKER_UP = 15
#: A process must have started before this timeout (in secs.) expires.
PROC_ALIVE_TIMEOUT = 4.0
SCHED_STRATEGY_PREFETCH = 1
SCHED_STRATEGY_FAIR = 4
SCHED_STRATEGIES = {
None: SCHED_STRATEGY_PREFETCH,
'fair': SCHED_STRATEGY_FAIR,
}
RESULT_MAXLEN = 128
Ack = namedtuple('Ack', ('id', 'fd', 'payload'))
def gen_not_started(gen):
# gi_frame is None when generator stopped.
return gen.gi_frame and gen.gi_frame.f_lasti == -1
def _get_job_writer(job):
try:
writer = job._writer
except AttributeError:
pass
else:
return writer() # is a weakref
def _select(readers=None, writers=None, err=None, timeout=0):
"""Simple wrapper to :class:`~select.select`.
:param readers: Set of reader fds to test if readable.
:param writers: Set of writer fds to test if writable.
:param err: Set of fds to test for error condition.
All fd sets passed must be mutable as this function
will remove non-working fds from them, this also means
the caller must make sure there are still fds in the sets
before calling us again.
:returns: tuple of ``(readable, writable, again)``, where
``readable`` is a set of fds that have data available for read,
``writable`` is a set of fds that is ready to be written to
and ``again`` is a flag that if set means the caller must
throw away the result and call us again.
"""
readers = set() if readers is None else readers
writers = set() if writers is None else writers
err = set() if err is None else err
try:
r, w, e = select.select(readers, writers, err, timeout)
if e:
r = list(set(r) | set(e))
return r, w, 0
except (select.error, socket.error) as exc:
if get_errno(exc) == errno.EINTR:
return [], [], 1
elif get_errno(exc) in SELECT_BAD_FD:
for fd in readers | writers | err:
try:
select.select([fd], [], [], 0)
except (select.error, socket.error) as exc:
if get_errno(exc) not in SELECT_BAD_FD:
raise
readers.discard(fd)
writers.discard(fd)
err.discard(fd)
return [], [], 1
else:
raise
def _repr_result(obj):
try:
return repr(obj)
except Exception as orig_exc:
try:
return text_t(obj)
except UnicodeDecodeError:
if isinstance(obj, string_t):
try:
return obj.decode('utf-8', errors='replace')
except Exception:
pass
return '<Unrepresentable: {0!r} (o.__repr__ returns unicode?)>'.format(
orig_exc,
)
class Worker(_pool.Worker):
"""Pool worker process."""
dead = False
def on_loop_start(self, pid):
# our version sends a WORKER_UP message when the process is ready
# to accept work, this will tell the parent that the inqueue fd
# is writable.
self.outq.put((WORKER_UP, (pid, )))
def prepare_result(self, result, maxlen=RESULT_MAXLEN, truncate=truncate):
if not isinstance(result, ExceptionInfo):
return truncate(_repr_result(result), maxlen)
return result
class ResultHandler(_pool.ResultHandler):
"""Handles messages from the pool processes."""
def __init__(self, *args, **kwargs):
self.fileno_to_outq = kwargs.pop('fileno_to_outq')
self.on_process_alive = kwargs.pop('on_process_alive')
super(ResultHandler, self).__init__(*args, **kwargs)
# add our custom message handler
self.state_handlers[WORKER_UP] = self.on_process_alive
def _recv_message(self, add_reader, fd, callback,
__read__=__read__, readcanbuf=readcanbuf,
BytesIO=BytesIO, unpack_from=unpack_from,
load=_pickle.load):
Hr = Br = 0
if readcanbuf:
buf = bytearray(4)
bufv = memoryview(buf)
else:
buf = bufv = BytesIO()
# header
while Hr < 4:
try:
n = __read__(
fd, bufv[Hr:] if readcanbuf else bufv, 4 - Hr,
)
except OSError as exc:
if get_errno(exc) not in UNAVAIL:
raise
yield
else:
if n == 0:
raise (OSError('End of file during message') if Hr
else EOFError())
Hr += n
body_size, = unpack_from('>i', bufv)
if readcanbuf:
buf = bytearray(body_size)
bufv = memoryview(buf)
else:
buf = bufv = BytesIO()
while Br < body_size:
try:
n = __read__(
fd, bufv[Br:] if readcanbuf else bufv, body_size - Br,
)
except OSError as exc:
if get_errno(exc) not in UNAVAIL:
raise
yield
else:
if n == 0:
raise (OSError('End of file during message') if Br
else EOFError())
Br += n
add_reader(fd, self.handle_event, fd)
if readcanbuf:
message = load(BytesIO(bufv))
else:
bufv.seek(0)
message = load(bufv)
if message:
callback(message)
def _make_process_result(self, hub):
"""Coroutine that reads messages from the pool processes
and calls the appropriate handler."""
fileno_to_outq = self.fileno_to_outq
on_state_change = self.on_state_change
add_reader = hub.add_reader
remove_reader = hub.remove_reader
recv_message = self._recv_message
def on_result_readable(fileno):
try:
fileno_to_outq[fileno]
except KeyError: # process gone
return remove_reader(fileno)
it = recv_message(add_reader, fileno, on_state_change)
try:
next(it)
except StopIteration:
pass
except (IOError, OSError, EOFError):
remove_reader(fileno)
else:
add_reader(fileno, it)
return on_result_readable
def register_with_event_loop(self, hub):
self.handle_event = self._make_process_result(hub)
def handle_event(self, fileno):
raise RuntimeError('Not registered with event loop')
def on_stop_not_started(self):
"""This method is always used to stop when the helper thread is not
started."""
cache = self.cache
check_timeouts = self.check_timeouts
fileno_to_outq = self.fileno_to_outq
on_state_change = self.on_state_change
join_exited_workers = self.join_exited_workers
# flush the processes outqueues until they have all terminated.
outqueues = set(fileno_to_outq)
while cache and outqueues and self._state != TERMINATE:
if check_timeouts is not None:
# make sure tasks with a time limit will time out.
check_timeouts()
# cannot iterate and remove at the same time
pending_remove_fd = set()
for fd in outqueues:
self._flush_outqueue(
fd, pending_remove_fd.discard, fileno_to_outq,
on_state_change,
)
try:
join_exited_workers(shutdown=True)
except WorkersJoined:
return debug('result handler: all workers terminated')
outqueues.difference_update(pending_remove_fd)
def _flush_outqueue(self, fd, remove, process_index, on_state_change):
try:
proc = process_index[fd]
except KeyError:
# process already found terminated
# which means its outqueue has already been processed
# by the worker lost handler.
return remove(fd)
reader = proc.outq._reader
try:
setblocking(reader, 1)
except (OSError, IOError):
return remove(fd)
try:
if reader.poll(0):
task = reader.recv()
else:
task = None
sleep(0.5)
except (IOError, EOFError):
return remove(fd)
else:
if task:
on_state_change(task)
finally:
try:
setblocking(reader, 0)
except (OSError, IOError):
return remove(fd)
class AsynPool(_pool.Pool):
"""Pool version that uses AIO instead of helper threads."""
ResultHandler = ResultHandler
Worker = Worker
def __init__(self, processes=None, synack=False,
sched_strategy=None, *args, **kwargs):
self.sched_strategy = SCHED_STRATEGIES.get(sched_strategy,
sched_strategy)
processes = self.cpu_count() if processes is None else processes
self.synack = synack
# create queue-pairs for all our processes in advance.
self._queues = dict((self.create_process_queues(), None)
for _ in range(processes))
# inqueue fileno -> process mapping
self._fileno_to_inq = {}
# outqueue fileno -> process mapping
self._fileno_to_outq = {}
# synqueue fileno -> process mapping
self._fileno_to_synq = {}
# We keep track of processes that have not yet
# sent a WORKER_UP message. If a process fails to send
# this message within proc_up_timeout we terminate it
# and hope the next process will recover.
self._proc_alive_timeout = PROC_ALIVE_TIMEOUT
self._waiting_to_start = set()
# denormalized set of all inqueues.
self._all_inqueues = set()
# Set of fds being written to (busy)
self._active_writes = set()
# Set of active co-routines currently writing jobs.
self._active_writers = set()
# Set of fds that are busy (executing task)
self._busy_workers = set()
self._mark_worker_as_available = self._busy_workers.discard
# Holds jobs waiting to be written to child processes.
self.outbound_buffer = deque()
self.write_stats = Counter()
super(AsynPool, self).__init__(processes, *args, **kwargs)
for proc in self._pool:
# create initial mappings, these will be updated
# as processes are recycled, or found lost elsewhere.
self._fileno_to_outq[proc.outqR_fd] = proc
self._fileno_to_synq[proc.synqW_fd] = proc
self.on_soft_timeout = self._timeout_handler.on_soft_timeout
self.on_hard_timeout = self._timeout_handler.on_hard_timeout
def _event_process_exit(self, hub, fd):
# This method is called whenever the process sentinel is readable.
hub.remove(fd)
self.maintain_pool()
def register_with_event_loop(self, hub):
"""Registers the async pool with the current event loop."""
self._result_handler.register_with_event_loop(hub)
self.handle_result_event = self._result_handler.handle_event
self._create_timelimit_handlers(hub)
self._create_process_handlers(hub)
self._create_write_handlers(hub)
# Add handler for when a process exits (calls maintain_pool)
[hub.add_reader(fd, self._event_process_exit, hub, fd)
for fd in self.process_sentinels]
# Handle_result_event is called whenever one of the
# result queues are readable.
[hub.add_reader(fd, self.handle_result_event, fd)
for fd in self._fileno_to_outq]
# Timers include calling maintain_pool at a regular interval
# to be certain processes are restarted.
for handler, interval in items(self.timers):
hub.call_repeatedly(interval, handler)
hub.on_tick.add(self.on_poll_start)
def _create_timelimit_handlers(self, hub, now=time.time):
"""For async pool this sets up the handlers used
to implement time limits."""
call_later = hub.call_later
trefs = self._tref_for_id = WeakValueDictionary()
def on_timeout_set(R, soft, hard):
if soft:
trefs[R._job] = call_later(
soft, self._on_soft_timeout, R._job, soft, hard, hub,
)
elif hard:
trefs[R._job] = call_later(
hard, self._on_hard_timeout, R._job,
)
self.on_timeout_set = on_timeout_set
def _discard_tref(job):
try:
tref = trefs.pop(job)
tref.cancel()
del(tref)
except (KeyError, AttributeError):
pass # out of scope
self._discard_tref = _discard_tref
def on_timeout_cancel(R):
_discard_tref(R._job)
self.on_timeout_cancel = on_timeout_cancel
def _on_soft_timeout(self, job, soft, hard, hub, now=time.time):
# only used by async pool.
if hard:
self._tref_for_id[job] = hub.call_at(
now() + (hard - soft), self._on_hard_timeout, job,
)
try:
result = self._cache[job]
except KeyError:
pass # job ready
else:
self.on_soft_timeout(result)
finally:
if not hard:
# remove tref
self._discard_tref(job)
def _on_hard_timeout(self, job):
# only used by async pool.
try:
result = self._cache[job]
except KeyError:
pass # job ready
else:
self.on_hard_timeout(result)
finally:
# remove tref
self._discard_tref(job)
def on_job_ready(self, job, i, obj, inqW_fd):
self._mark_worker_as_available(inqW_fd)
def _create_process_handlers(self, hub, READ=READ, ERR=ERR):
"""For async pool this will create the handlers called
when a process is up/down and etc."""
add_reader, remove_reader, remove_writer = (
hub.add_reader, hub.remove_reader, hub.remove_writer,
)
cache = self._cache
all_inqueues = self._all_inqueues
fileno_to_inq = self._fileno_to_inq
fileno_to_outq = self._fileno_to_outq
fileno_to_synq = self._fileno_to_synq
busy_workers = self._busy_workers
event_process_exit = self._event_process_exit
handle_result_event = self.handle_result_event
process_flush_queues = self.process_flush_queues
waiting_to_start = self._waiting_to_start
def verify_process_alive(proc):
if proc._is_alive() and proc in waiting_to_start:
assert proc.outqR_fd in fileno_to_outq
assert fileno_to_outq[proc.outqR_fd] is proc
assert proc.outqR_fd in hub.readers
error('Timed out waiting for UP message from %r', proc)
os.kill(proc.pid, 9)
def on_process_up(proc):
"""Called when a process has started."""
# If we got the same fd as a previous process then we will also
# receive jobs in the old buffer, so we need to reset the
# job._write_to and job._scheduled_for attributes used to recover
# message boundaries when processes exit.
infd = proc.inqW_fd
for job in values(cache):
if job._write_to and job._write_to.inqW_fd == infd:
job._write_to = proc
if job._scheduled_for and job._scheduled_for.inqW_fd == infd:
job._scheduled_for = proc
fileno_to_outq[proc.outqR_fd] = proc
# maintain_pool is called whenever a process exits.
add_reader(
proc.sentinel, event_process_exit, hub, proc.sentinel,
)
assert not isblocking(proc.outq._reader)
# handle_result_event is called when the processes outqueue is
# readable.
add_reader(proc.outqR_fd, handle_result_event, proc.outqR_fd)
waiting_to_start.add(proc)
hub.call_later(
self._proc_alive_timeout, verify_process_alive, proc,
)
self.on_process_up = on_process_up
def _remove_from_index(obj, proc, index, remove_fun, callback=None):
# this remove the file descriptors for a process from
# the indices. we have to make sure we don't overwrite
# another processes fds, as the fds may be reused.
try:
fd = obj.fileno()
except (IOError, OSError):
return
try:
if index[fd] is proc:
# fd has not been reused so we can remove it from index.
index.pop(fd, None)
except KeyError:
pass
else:
remove_fun(fd)
if callback is not None:
callback(fd)
return fd
def on_process_down(proc):
"""Called when a worker process exits."""
if proc.dead:
return
process_flush_queues(proc)
_remove_from_index(
proc.outq._reader, proc, fileno_to_outq, remove_reader,
)
if proc.synq:
_remove_from_index(
proc.synq._writer, proc, fileno_to_synq, remove_writer,
)
inq = _remove_from_index(
proc.inq._writer, proc, fileno_to_inq, remove_writer,
callback=all_inqueues.discard,
)
if inq:
busy_workers.discard(inq)
remove_reader(proc.sentinel)
waiting_to_start.discard(proc)
self._active_writes.discard(proc.inqW_fd)
remove_writer(proc.inqW_fd)
remove_reader(proc.outqR_fd)
if proc.synqR_fd:
remove_reader(proc.synqR_fd)
if proc.synqW_fd:
self._active_writes.discard(proc.synqW_fd)
remove_reader(proc.synqW_fd)
self.on_process_down = on_process_down
def _create_write_handlers(self, hub,
pack=struct.pack, dumps=_pickle.dumps,
protocol=HIGHEST_PROTOCOL):
"""For async pool this creates the handlers used to write data to
child processes."""
fileno_to_inq = self._fileno_to_inq
fileno_to_synq = self._fileno_to_synq
outbound = self.outbound_buffer
pop_message = outbound.popleft
append_message = outbound.append
put_back_message = outbound.appendleft
all_inqueues = self._all_inqueues
active_writes = self._active_writes
active_writers = self._active_writers
busy_workers = self._busy_workers
diff = all_inqueues.difference
add_writer = hub.add_writer
hub_add, hub_remove = hub.add, hub.remove
mark_write_fd_as_active = active_writes.add
mark_write_gen_as_active = active_writers.add
mark_worker_as_busy = busy_workers.add
write_generator_done = active_writers.discard
get_job = self._cache.__getitem__
write_stats = self.write_stats
is_fair_strategy = self.sched_strategy == SCHED_STRATEGY_FAIR
revoked_tasks = worker_state.revoked
getpid = os.getpid
precalc = {ACK: self._create_payload(ACK, (0, )),
NACK: self._create_payload(NACK, (0, ))}
def _put_back(job, _time=time.time):
# puts back at the end of the queue
if job._terminated is not None or \
job.correlation_id in revoked_tasks:
if not job._accepted:
job._ack(None, _time(), getpid(), None)
job._set_terminated(job._terminated)
else:
# XXX linear lookup, should find a better way,
# but this happens rarely and is here to protect against races.
if job not in outbound:
outbound.appendleft(job)
self._put_back = _put_back
# called for every event loop iteration, and if there
# are messages pending this will schedule writing one message
# by registering the 'schedule_writes' function for all currently
# inactive inqueues (not already being written to)
# consolidate means the event loop will merge them
# and call the callback once with the list writable fds as
# argument. Using this means we minimize the risk of having
# the same fd receive every task if the pipe read buffer is not
# full.
if is_fair_strategy:
def on_poll_start():
if outbound and len(busy_workers) < len(all_inqueues):
inactive = diff(active_writes)
[hub_add(fd, None, WRITE | ERR, consolidate=True)
for fd in inactive]
else:
[hub_remove(fd) for fd in diff(active_writes)]
else:
def on_poll_start(): # noqa
if outbound:
[hub_add(fd, None, WRITE | ERR, consolidate=True)
for fd in diff(active_writes)]
else:
[hub_remove(fd) for fd in diff(active_writes)]
self.on_poll_start = on_poll_start
def on_inqueue_close(fd, proc):
# Makes sure the fd is removed from tracking when
# the connection is closed, this is essential as fds may be reused.
busy_workers.discard(fd)
try:
if fileno_to_inq[fd] is proc:
fileno_to_inq.pop(fd, None)
active_writes.discard(fd)
all_inqueues.discard(fd)
hub_remove(fd)
except KeyError:
pass
self.on_inqueue_close = on_inqueue_close
def schedule_writes(ready_fds, curindex=[0]):
# Schedule write operation to ready file descriptor.
# The file descriptor is writeable, but that does not
# mean the process is currently reading from the socket.
# The socket is buffered so writeable simply means that
# the buffer can accept at least 1 byte of data.
# This means we have to cycle between the ready fds.
# the first version used shuffle, but using i % total
# is about 30% faster with many processes. The latter
# also shows more fairness in write stats when used with
# many processes [XXX On OS X, this may vary depending
# on event loop implementation (i.e select vs epoll), so
# have to test further]
total = len(ready_fds)
for i in range(total):
ready_fd = ready_fds[curindex[0] % total]
if ready_fd in active_writes:
# already writing to this fd
curindex[0] += 1
continue
if is_fair_strategy and ready_fd in busy_workers:
# worker is already busy with another task
curindex[0] += 1
continue
if ready_fd not in all_inqueues:
hub_remove(ready_fd)
curindex[0] += 1
continue
try:
job = pop_message()
except IndexError:
# no more messages, remove all inactive fds from the hub.
# this is important since the fds are always writeable
# as long as there's 1 byte left in the buffer, and so
# this may create a spinloop where the event loop
# always wakes up.
for inqfd in diff(active_writes):
hub_remove(inqfd)
break
else:
if not job._accepted: # job not accepted by another worker
try:
# keep track of what process the write operation
# was scheduled for.
proc = job._scheduled_for = fileno_to_inq[ready_fd]
except KeyError:
# write was scheduled for this fd but the process
# has since exited and the message must be sent to
# another process.
put_back_message(job)
curindex[0] += 1
continue
cor = _write_job(proc, ready_fd, job)
job._writer = ref(cor)
mark_write_gen_as_active(cor)
mark_write_fd_as_active(ready_fd)
mark_worker_as_busy(ready_fd)
# Try to write immediately, in case there's an error.
try:
next(cor)
except StopIteration:
pass
except OSError as exc:
if get_errno(exc) != errno.EBADF:
raise
else:
add_writer(ready_fd, cor)
curindex[0] += 1
hub.consolidate_callback = schedule_writes
def send_job(tup):
# Schedule writing job request for when one of the process
# inqueues are writable.
body = dumps(tup, protocol=protocol)
body_size = len(body)
header = pack('>I', body_size)
# index 1,0 is the job ID.
job = get_job(tup[1][0])
job._payload = buf_t(header), buf_t(body), body_size
append_message(job)
self._quick_put = send_job
def on_not_recovering(proc, fd, job):
error('Process inqueue damaged: %r %r' % (proc, proc.exitcode))
if proc._is_alive():
proc.terminate()
hub.remove(fd)
self._put_back(job)
def _write_job(proc, fd, job):
# writes job to the worker process.
# Operation must complete if more than one byte of data
# was written. If the broker connection is lost
# and no data was written the operation shall be cancelled.
header, body, body_size = job._payload
errors = 0
try:
# job result keeps track of what process the job is sent to.
job._write_to = proc
send = proc.send_job_offset
Hw = Bw = 0
# write header
while Hw < 4:
try:
Hw += send(header, Hw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
# suspend until more data
errors += 1
if errors > 100:
on_not_recovering(proc, fd, job)
raise StopIteration()
yield
else:
errors = 0
# write body
while Bw < body_size:
try:
Bw += send(body, Bw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
# suspend until more data
errors += 1
if errors > 100:
on_not_recovering(proc, fd, job)
raise StopIteration()
yield
else:
errors = 0
finally:
hub_remove(fd)
write_stats[proc.index] += 1
# message written, so this fd is now available
active_writes.discard(fd)
write_generator_done(job._writer()) # is a weakref
def send_ack(response, pid, job, fd, WRITE=WRITE, ERR=ERR):
# Only used when synack is enabled.
# Schedule writing ack response for when the fd is writeable.
msg = Ack(job, fd, precalc[response])
callback = promise(write_generator_done)
cor = _write_ack(fd, msg, callback=callback)
mark_write_gen_as_active(cor)
mark_write_fd_as_active(fd)
callback.args = (cor, )
add_writer(fd, cor)
self.send_ack = send_ack
def _write_ack(fd, ack, callback=None):
# writes ack back to the worker if synack enabled.
# this operation *MUST* complete, otherwise
# the worker process will hang waiting for the ack.
header, body, body_size = ack[2]
try:
try:
proc = fileno_to_synq[fd]
except KeyError:
# process died, we can safely discard the ack at this
# point.
raise StopIteration()
send = proc.send_syn_offset
Hw = Bw = 0
# write header
while Hw < 4:
try:
Hw += send(header, Hw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
yield
# write body
while Bw < body_size:
try:
Bw += send(body, Bw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
# suspend until more data
yield
finally:
if callback:
callback()
# message written, so this fd is now available
active_writes.discard(fd)
def flush(self):
if self._state == TERMINATE:
return
# cancel all tasks that have not been accepted so that NACK is sent.
for job in values(self._cache):
if not job._accepted:
job._cancel()
# clear the outgoing buffer as the tasks will be redelivered by
# the broker anyway.
if self.outbound_buffer:
self.outbound_buffer.clear()
self.maintain_pool()
try:
# ...but we must continue writing the payloads we already started
# to keep message boundaries.
# The messages may be NACK'ed later if synack is enabled.
if self._state == RUN:
# flush outgoing buffers
intervals = fxrange(0.01, 0.1, 0.01, repeatlast=True)
owned_by = {}
for job in values(self._cache):
writer = _get_job_writer(job)
if writer is not None:
owned_by[writer] = job
while self._active_writers:
writers = list(self._active_writers)
for gen in writers:
if (gen.__name__ == '_write_job' and
gen_not_started(gen)):
# has not started writing the job so can
# discard the task, but we must also remove
# it from the Pool._cache.
try:
job = owned_by[gen]
except KeyError:
pass
else:
# removes from Pool._cache
job.discard()
self._active_writers.discard(gen)
else:
try:
job = owned_by[gen]
except KeyError:
pass
else:
job_proc = job._write_to
if job_proc._is_alive():
self._flush_writer(job_proc, gen)
# workers may have exited in the meantime.
self.maintain_pool()
sleep(next(intervals)) # don't busyloop
finally:
self.outbound_buffer.clear()
self._active_writers.clear()
self._active_writes.clear()
self._busy_workers.clear()
def _flush_writer(self, proc, writer):
fds = set([proc.inq._writer])
try:
while fds:
if not proc._is_alive():
break # process exited
readable, writable, again = _select(
writers=fds, err=fds, timeout=0.5,
)
if not again and (writable or readable):
try:
next(writer)
except (StopIteration, OSError, IOError, EOFError):
break
finally:
self._active_writers.discard(writer)
def get_process_queues(self):
"""Get queues for a new process.
Here we will find an unused slot, as there should always
be one available when we start a new process.
"""
return next(q for q, owner in items(self._queues)
if owner is None)
def on_grow(self, n):
"""Grow the pool by ``n`` proceses."""
diff = max(self._processes - len(self._queues), 0)
if diff:
self._queues.update(
dict((self.create_process_queues(), None) for _ in range(diff))
)
def on_shrink(self, n):
"""Shrink the pool by ``n`` processes."""
pass
def create_process_queues(self):
"""Creates new in, out (and optionally syn) queues,
returned as a tuple."""
# NOTE: Pipes must be set O_NONBLOCK at creation time (the original
# fd), otherwise it will not be possible to change the flags until
# there is an actual reader/writer on the other side.
inq = _SimpleQueue(wnonblock=True)
outq = _SimpleQueue(rnonblock=True)
synq = None
assert isblocking(inq._reader)
assert not isblocking(inq._writer)
assert not isblocking(outq._reader)
assert isblocking(outq._writer)
if self.synack:
synq = _SimpleQueue(wnonblock=True)
assert isblocking(synq._reader)
assert not isblocking(synq._writer)
return inq, outq, synq
def on_process_alive(self, pid):
"""Handler called when the :const:`WORKER_UP` message is received
from a child process, which marks the process as ready
to receive work."""
try:
proc = next(w for w in self._pool if w.pid == pid)
except StopIteration:
return logger.warning('process with pid=%s already exited', pid)
assert proc.inqW_fd not in self._fileno_to_inq
assert proc.inqW_fd not in self._all_inqueues
self._waiting_to_start.discard(proc)
self._fileno_to_inq[proc.inqW_fd] = proc
self._fileno_to_synq[proc.synqW_fd] = proc
self._all_inqueues.add(proc.inqW_fd)
def on_job_process_down(self, job, pid_gone):
"""Handler called for each job when the process it was assigned to
exits."""
if job._write_to and not job._write_to._is_alive():
# job was partially written
self.on_partial_read(job, job._write_to)
elif job._scheduled_for and not job._scheduled_for._is_alive():
# job was only scheduled to be written to this process,
# but no data was sent so put it back on the outbound_buffer.
self._put_back(job)
def on_job_process_lost(self, job, pid, exitcode):
"""Handler called for each *started* job when the process it
was assigned to exited by mysterious means (error exitcodes and
signals)"""
self.mark_as_worker_lost(job, exitcode)
def human_write_stats(self):
if self.write_stats is None:
return 'N/A'
vals = list(values(self.write_stats))
total = sum(vals)
def per(v, total):
return '{0:.2f}%'.format((float(v) / total) * 100.0 if v else 0)
return {
'total': total,
'avg': per(total / len(self.write_stats) if total else 0, total),
'all': ', '.join(per(v, total) for v in vals),
'raw': ', '.join(map(str, vals)),
'inqueues': {
'total': len(self._all_inqueues),
'active': len(self._active_writes),
}
}
def _process_cleanup_queues(self, proc):
"""Handler called to clean up a processes queues after process
exit."""
if not proc.dead:
try:
self._queues[self._find_worker_queues(proc)] = None
except (KeyError, ValueError):
pass
@staticmethod
def _stop_task_handler(task_handler):
"""Called at shutdown to tell processes that we are shutting down."""
for proc in task_handler.pool:
try:
setblocking(proc.inq._writer, 1)
except (OSError, IOError):
pass
else:
try:
proc.inq.put(None)
except OSError as exc:
if get_errno(exc) != errno.EBADF:
raise
def create_result_handler(self):
return super(AsynPool, self).create_result_handler(
fileno_to_outq=self._fileno_to_outq,
on_process_alive=self.on_process_alive,
)
def _process_register_queues(self, proc, queues):
"""Marks new ownership for ``queues`` so that the fileno indices are
updated."""
assert queues in self._queues
b = len(self._queues)
self._queues[queues] = proc
assert b == len(self._queues)
def _find_worker_queues(self, proc):
"""Find the queues owned by ``proc``."""
try:
return next(q for q, owner in items(self._queues)
if owner == proc)
except StopIteration:
raise ValueError(proc)
def _setup_queues(self):
# this is only used by the original pool which uses a shared
# queue for all processes.
# these attributes makes no sense for us, but we will still
# have to initialize them.
self._inqueue = self._outqueue = \
self._quick_put = self._quick_get = self._poll_result = None
def process_flush_queues(self, proc):
"""Flushes all queues, including the outbound buffer, so that
all tasks that have not been started will be discarded.
In Celery this is called whenever the transport connection is lost
(consumer restart).
"""
resq = proc.outq._reader
on_state_change = self._result_handler.on_state_change
fds = set([resq])
while fds and not resq.closed and self._state != TERMINATE:
readable, _, again = _select(fds, None, fds, timeout=0.01)
if readable:
try:
task = resq.recv()
except (OSError, IOError, EOFError) as exc:
if get_errno(exc) == errno.EINTR:
continue
elif get_errno(exc) == errno.EAGAIN:
break
else:
debug('got %r while flushing process %r',
exc, proc, exc_info=1)
if get_errno(exc) not in UNAVAIL:
debug('got %r while flushing process %r',
exc, proc, exc_info=1)
break
else:
if task is None:
debug('got sentinel while flushing process %r', proc)
break
else:
on_state_change(task)
else:
break
def on_partial_read(self, job, proc):
"""Called when a job was only partially written to a child process
and it exited."""
# worker terminated by signal:
# we cannot reuse the sockets again, because we don't know if
# the process wrote/read anything frmo them, and if so we cannot
# restore the message boundaries.
if not job._accepted:
# job was not acked, so find another worker to send it to.
self._put_back(job)
writer = _get_job_writer(job)
if writer:
self._active_writers.discard(writer)
del(writer)
if not proc.dead:
proc.dead = True
# Replace queues to avoid reuse
before = len(self._queues)
try:
queues = self._find_worker_queues(proc)
if self.destroy_queues(queues, proc):
self._queues[self.create_process_queues()] = None
except ValueError:
pass
assert len(self._queues) == before
def destroy_queues(self, queues, proc):
"""Destroy queues that can no longer be used, so that they
be replaced by new sockets."""
assert not proc._is_alive()
self._waiting_to_start.discard(proc)
removed = 1
try:
self._queues.pop(queues)
except KeyError:
removed = 0
try:
self.on_inqueue_close(queues[0]._writer.fileno(), proc)
except IOError:
pass
for queue in queues:
if queue:
for sock in (queue._reader, queue._writer):
if not sock.closed:
try:
sock.close()
except (IOError, OSError):
pass
return removed
def _create_payload(self, type_, args,
dumps=_pickle.dumps, pack=struct.pack,
protocol=HIGHEST_PROTOCOL):
body = dumps((type_, args), protocol=protocol)
size = len(body)
header = pack('>I', size)
return header, body, size
@classmethod
def _set_result_sentinel(cls, _outqueue, _pool):
# unused
pass
def _help_stuff_finish_args(self):
# Pool._help_stuff_finished is a classmethod so we have to use this
# trick to modify the arguments passed to it.
return (self._pool, )
@classmethod
def _help_stuff_finish(cls, pool):
debug(
'removing tasks from inqueue until task handler finished',
)
fileno_to_proc = {}
inqR = set()
for w in pool:
try:
fd = w.inq._reader.fileno()
inqR.add(fd)
fileno_to_proc[fd] = w
except IOError:
pass
while inqR:
readable, _, again = _select(inqR, timeout=0.5)
if again:
continue
if not readable:
break
for fd in readable:
fileno_to_proc[fd].inq._reader.recv()
sleep(0)
@property
def timers(self):
return {self.maintain_pool: 5.0}
| mit | -3,831,370,166,860,871,700 | 36.523274 | 79 | 0.52573 | false |
JieweiWei/googletest | test/gtest_catch_exceptions_test.py | 2139 | 9901 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's exception catching behavior.
This script invokes gtest_catch_exceptions_test_ and
gtest_catch_exceptions_ex_test_ (programs written with
Google Test) and verifies their output.
"""
__author__ = '[email protected] (Vlad Losev)'
import os
import gtest_test_utils
# Constants.
FLAG_PREFIX = '--gtest_'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'
FILTER_FLAG = FLAG_PREFIX + 'filter'
# Path to the gtest_catch_exceptions_ex_test_ binary, compiled with
# exceptions enabled.
EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_ex_test_')
# Path to the gtest_catch_exceptions_test_ binary, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_no_ex_test_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
TEST_LIST = gtest_test_utils.Subprocess(
[EXE_PATH, LIST_TESTS_FLAG], env=environ).output
SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
if SUPPORTS_SEH_EXCEPTIONS:
BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
[EX_EXE_PATH], env=environ).output
# The tests.
if SUPPORTS_SEH_EXCEPTIONS:
# pylint:disable-msg=C6302
class CatchSehExceptionsTest(gtest_test_utils.TestCase):
"""Tests exception-catching behavior."""
def TestSehExceptions(self, test_output):
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s constructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s destructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUp()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDown()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in the test body'
in test_output)
def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
self.TestSehExceptions(EX_BINARY_OUTPUT)
def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):
self.TestSehExceptions(BINARY_OUTPUT)
class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""Tests C++ exception-catching behavior.
Tests in this test case verify that:
* C++ exceptions are caught and logged as C++ (not SEH) exceptions
* Exception thrown affect the remainder of the test work flow in the
expected manner.
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s constructor'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInConstructorTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
EX_BINARY_OUTPUT):
def testCatchesCxxExceptionsInFixtureDestructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s destructor'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUpTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUpTestCase()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest test body '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTearDownTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDownTestCase()'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUp(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUp()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInSetUpTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
def testCatchesCxxExceptionsInTearDown(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDown()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTestBody(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in the test body'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesNonStdCxxExceptions(self):
self.assert_('Unknown C++ exception thrown in the test body'
in EX_BINARY_OUTPUT)
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
# cause tests to show pop-up windows there.
FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
# By default, Google Test doesn't catch the exceptions.
uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
[EX_EXE_PATH,
NO_CATCH_EXCEPTIONS_FLAG,
FITLER_OUT_SEH_TESTS_FLAG],
env=environ).output
self.assert_('Unhandled C++ exception terminating the program'
in uncaught_exceptions_ex_binary_output)
self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause | 8,499,911,459,083,694,000 | 40.776371 | 78 | 0.669326 | false |
vmturbo/nova | nova/tests/functional/regressions/test_bug_1522536.py | 4 | 2538 | # Copyright 2016 HPE, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import nova.scheduler.utils
import nova.servicegroup
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api import client
from nova.tests.unit import cast_as_call
import nova.tests.unit.image.fake
from nova.tests.unit import policy_fixture
class TestServerGet(test.TestCase):
REQUIRES_LOCKING = True
def setUp(self):
super(TestServerGet, self).setUp()
self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
# the image fake backend needed for image discovery
nova.tests.unit.image.fake.stub_out_image_service(self)
self.start_service('conductor')
self.flags(driver='chance_scheduler', group='scheduler')
self.start_service('scheduler')
self.compute = self.start_service('compute')
self.useFixture(cast_as_call.CastAsCall(self.stubs))
self.addCleanup(nova.tests.unit.image.fake.FakeImageService_reset)
self.image_id = self.api.get_images()[0]['id']
self.flavor_id = self.api.get_flavors()[0]['id']
def test_id_overlap(self):
"""Regression test for bug #1522536.
Before fixing this bug, getting a numeric id caused a 500
error because it treated the numeric value as the db index,
fetched the server, but then processing of extensions blew up.
Since we have fixed this bug it returns a 404, which is
expected. In future a 400 might be more appropriate.
"""
server = dict(name='server1',
imageRef=self.image_id,
flavorRef=self.flavor_id)
self.api.post_server({'server': server})
self.assertRaises(client.OpenStackApiNotFoundException,
self.api.get_server, 1)
| apache-2.0 | 8,155,192,952,797,554,000 | 37.454545 | 75 | 0.687155 | false |
akretion/odoo | addons/website_hr_recruitment/models/hr_recruitment.py | 10 | 2093 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from werkzeug import urls
from odoo import api, fields, models
from odoo.tools.translate import html_translate
class RecruitmentSource(models.Model):
_inherit = 'hr.recruitment.source'
url = fields.Char(compute='_compute_url', string='Url Parameters')
@api.one
@api.depends('source_id', 'source_id.name', 'job_id')
def _compute_url(self):
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
for source in self:
source.url = urls.url_join(base_url, "%s?%s" % (source.job_id.website_url,
urls.url_encode({
'utm_campaign': self.env.ref('hr_recruitment.utm_campaign_job').name,
'utm_medium': self.env.ref('utm.utm_medium_website').name,
'utm_source': source.source_id.name
})
))
class Applicant(models.Model):
_inherit = 'hr.applicant'
def website_form_input_filter(self, request, values):
if 'partner_name' in values:
values.setdefault('name', '%s\'s Application' % values['partner_name'])
return values
class Job(models.Model):
_name = 'hr.job'
_inherit = ['hr.job', 'website.seo.metadata', 'website.published.multi.mixin']
def _get_default_website_description(self):
default_description = self.env["ir.model.data"].xmlid_to_object("website_hr_recruitment.default_website_description")
return (default_description.render() if default_description else "")
website_description = fields.Html('Website description', translate=html_translate, sanitize_attributes=False, default=_get_default_website_description, prefetch=False)
@api.multi
def _compute_website_url(self):
super(Job, self)._compute_website_url()
for job in self:
job.website_url = "/jobs/detail/%s" % job.id
@api.multi
def set_open(self):
self.write({'website_published': False})
return super(Job, self).set_open()
| agpl-3.0 | 318,444,764,145,407,700 | 34.474576 | 171 | 0.636407 | false |
gregbuehler/ansible-modules-extras | packaging/pacman.py | 22 | 7083 | #!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2012, Afterburn <http://github.com/afterburn>
# (c) 2013, Aaron Bull Schaefer <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: pacman
short_description: Manage packages with I(pacman)
description:
- Manage packages with the I(pacman) package manager, which is used by
Arch Linux and its variants.
version_added: "1.0"
author: Afterburn
notes: []
requirements: []
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: false
default: null
state:
description:
- Desired state of the package.
required: false
default: "present"
choices: ["present", "absent"]
recurse:
description:
- When removing a package, also remove its dependencies, provided
that they are not required by other packages and were not
explicitly installed by a user.
required: false
default: "no"
choices: ["yes", "no"]
version_added: "1.3"
update_cache:
description:
- Whether or not to refresh the master package lists. This can be
run as part of a package installation or as a separate step.
required: false
default: "no"
choices: ["yes", "no"]
'''
EXAMPLES = '''
# Install package foo
- pacman: name=foo state=present
# Remove packages foo and bar
- pacman: name=foo,bar state=absent
# Recursively remove package baz
- pacman: name=baz state=absent recurse=yes
# Run the equivalent of "pacman -Syy" as a separate step
- pacman: update_cache=yes
'''
import json
import shlex
import os
import re
import sys
PACMAN_PATH = "/usr/bin/pacman"
def query_package(module, name, state="present"):
# pacman -Q returns 0 if the package is installed,
# 1 if it is not installed
if state == "present":
cmd = "pacman -Q %s" % (name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
return False
def update_package_db(module):
cmd = "pacman -Syy"
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
module.fail_json(msg="could not update package db")
def remove_packages(module, packages):
if module.params["recurse"]:
args = "Rs"
else:
args = "R"
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
cmd = "pacman -%s %s --noconfirm" % (args, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, packages, package_files):
install_c = 0
for i, package in enumerate(packages):
if query_package(module, package):
continue
if package_files[i]:
params = '-U %s' % package_files[i]
else:
params = '-S %s' % package
cmd = "pacman %s --noconfirm" % (params)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s" % (package))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already installed")
def check_packages(module, packages, state):
would_be_changed = []
for package in packages:
installed = query_package(module, package)
if ((state == "present" and not installed) or
(state == "absent" and installed)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
else:
module.exit_json(change=False, msg="package(s) already %s" % state)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['pkg']),
state = dict(default='present', choices=['present', 'installed', 'absent', 'removed']),
recurse = dict(default='no', choices=BOOLEANS, type='bool'),
update_cache = dict(default='no', aliases=['update-cache'], choices=BOOLEANS, type='bool')),
required_one_of = [['name', 'update_cache']],
supports_check_mode = True)
if not os.path.exists(PACMAN_PATH):
module.fail_json(msg="cannot find pacman, looking for %s" % (PACMAN_PATH))
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p["update_cache"] and not module.check_mode:
update_package_db(module)
if not p['name']:
module.exit_json(changed=True, msg='updated the package master lists')
if p['update_cache'] and module.check_mode and not p['name']:
module.exit_json(changed=True, msg='Would have updated the package cache')
if p['name']:
pkgs = p['name'].split(',')
pkg_files = []
for i, pkg in enumerate(pkgs):
if pkg.endswith('.pkg.tar.xz'):
# The package given is a filename, extract the raw pkg name from
# it and store the filename
pkg_files.append(pkg)
pkgs[i] = re.sub('-[0-9].*$', '', pkgs[i].split('/')[-1])
else:
pkg_files.append(None)
if module.check_mode:
check_packages(module, pkgs, p['state'])
if p['state'] == 'present':
install_packages(module, pkgs, pkg_files)
elif p['state'] == 'absent':
remove_packages(module, pkgs)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 | 952,118,532,664,916,500 | 29.269231 | 106 | 0.602711 | false |
JazzeYoung/VeryDeepAutoEncoder | pylearn2/train_extensions/window_flip.py | 41 | 7218 | """ TrainExtensions for doing random spatial windowing and flipping of an
image dataset on every epoch. TODO: fill out properly."""
import warnings
import numpy
from . import TrainExtension
from pylearn2.datasets.preprocessing import CentralWindow
from pylearn2.utils.exc import reraise_as
from pylearn2.utils.rng import make_np_rng
from pylearn2.utils import py_integer_types
try:
from ..utils._window_flip import random_window_and_flip_c01b
from ..utils._window_flip import random_window_and_flip_b01c
except ImportError:
reraise_as(ImportError("Import of Cython module failed. Please make sure "
"you have run 'python setup.py develop' in the "
"pylearn2 directory"))
__authors__ = "David Warde-Farley"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "David Warde-Farley"
__email__ = "wardefar@iro"
def _zero_pad(array, amount, axes=(1, 2)):
"""
Returns a copy of <array> with zero-filled padding around the margins.
The new array has the same dimensions as the input array, except for
the dimensions given by <axes>, which are increased by 2*<amount>.
Parameters
----------
array: numpy.ndarray
The array to zero-pad.
amount: int
The number of zeros to append to the beginning and end of each dimension
in <axes>. (That axis will grow by 2*<amount>).
axes: tuple
The dimensions to pad. These are indices, not axis names like the 0, 1
in ('b', 0, 1, 'c').
"""
if amount == 0:
return array
new_shape = []
slices = []
for i, s in enumerate(array.shape):
if i in axes:
new_shape.append(s + 2 * amount)
slices.append(slice(amount, -amount))
else:
new_shape.append(s)
slices.append(slice(None))
new_shape = tuple(new_shape)
slices = tuple(slices)
new_array = numpy.zeros(new_shape, dtype=array.dtype)
new_array[slices] = array
return new_array
class WindowAndFlip(TrainExtension):
"""
An extension that allows an image dataset to be flipped and
windowed after each epoch of training.
Parameters
----------
window_shape : WRITEME
randomize : list, optional
If specified, a list of Datasets to randomly window and
flip at each epoch.
randomize_once : list, optional
If specified, a list of Datasets to randomly window and
flip once at the start of training.
center : list, optional
If specified, a list of Datasets to centrally window
once at the start of training.
rng : numpy.random.RandomState object or seed, optional
A random number generator or seed used to create one.
Seeded deterministically by default.
pad_randomized : int, optional
Amount of padding to add to each side of the images
in `randomize` and `randomize_once`. Useful if you
want to do zero-padded windowing with `window_shape`
the actual size of the dataset, and validate/test on
full-size images instead of central patches. Default
is 0.
flip : bool, optional
Reflect images on the horizontal axis with probability
0.5. `True` by default.
"""
def __init__(self,
window_shape,
randomize=None,
randomize_once=None,
center=None,
rng=(2013, 2, 20),
pad_randomized=0,
flip=True):
self._window_shape = tuple(window_shape)
# Defined in setup(). A dict that maps Datasets in self._randomize and
# self._randomize_once to zero-padded versions of their topological
# views.
self._original = None
self._randomize = randomize if randomize else []
self._randomize_once = randomize_once if randomize_once else []
self._center = center if center else []
self._pad_randomized = pad_randomized
self._flip = flip
assert isinstance(self._randomize, list), (
"The 'randomize' parameter of WindowAndFlip should be a list")
assert isinstance(self._randomize_once, list), (
"The 'randomize_once' parameter of WindowAndFlip should be a list")
assert isinstance(self._center, list), (
"The 'center' parameter of WindowAndFlip should be a list")
assert isinstance(self._pad_randomized, py_integer_types), (
"The 'pad_randomized' parameter of WindowAndFlip should be an int")
if randomize is None and randomize_once is None and center is None:
warnings.warn(self.__class__.__name__ + " instantiated without "
"any dataset arguments, and therefore does nothing",
stacklevel=2)
self._rng = make_np_rng(rng, which_method="random_integers")
def setup(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
Notes
-----
`dataset` argument is ignored
"""
dataset = None
# Central windowing of auxiliary datasets (e.g. validation sets)
preprocessor = CentralWindow(self._window_shape)
for data in self._center:
preprocessor.apply(data)
#
# Do the initial random windowing
#
randomize_now = self._randomize + self._randomize_once
# maps each dataset in randomize_now to a zero-padded topological view
# of its data.
self._original = dict((data, _zero_pad(
data.get_topological_view().astype('float32'),
self._pad_randomized))
for data in randomize_now)
# For each dataset, for each image, extract a randomly positioned and
# potentially horizontal-flipped window
self.randomize_datasets(randomize_now)
def randomize_datasets(self, datasets):
"""
Applies random translations and flips to the selected datasets.
Parameters
----------
datasets : WRITEME
"""
for dataset in datasets:
if tuple(dataset.view_converter.axes) == ('c', 0, 1, 'b'):
wf_func = random_window_and_flip_c01b
elif tuple(dataset.view_converter.axes) == ('b', 0, 1, 'c'):
wf_func = random_window_and_flip_b01c
else:
raise ValueError("Axes of dataset is not supported: %s" %
(str(dataset.view_converter.axes)))
arr = wf_func(self._original[dataset],
self._window_shape,
rng=self._rng, flip=self._flip)
dataset.set_topological_view(arr, axes=dataset.view_converter.axes)
def on_monitor(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
Notes
-----
All arguments are ignored.
"""
model = None
dataset = None
algorithm = None
self.randomize_datasets(self._randomize)
| bsd-3-clause | -5,384,234,551,923,231,000 | 34.55665 | 79 | 0.598781 | false |
s7v7nislands/flask | tests/test_instance_config.py | 157 | 4365 | # -*- coding: utf-8 -*-
"""
tests.test_instance
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Flask Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pytest
import flask
from flask._compat import PY2
def test_explicit_instance_paths(modules_tmpdir):
with pytest.raises(ValueError) as excinfo:
flask.Flask(__name__, instance_path='instance')
assert 'must be absolute' in str(excinfo.value)
app = flask.Flask(__name__, instance_path=str(modules_tmpdir))
assert app.instance_path == str(modules_tmpdir)
def test_main_module_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.join('main_app.py')
app.write('import flask\n\napp = flask.Flask("__main__")')
purge_module('main_app')
from main_app import app
here = os.path.abspath(os.getcwd())
assert app.instance_path == os.path.join(here, 'instance')
def test_uninstalled_module_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.join('config_module_app.py').write(
'import os\n'
'import flask\n'
'here = os.path.abspath(os.path.dirname(__file__))\n'
'app = flask.Flask(__name__)\n'
)
purge_module('config_module_app')
from config_module_app import app
assert app.instance_path == str(modules_tmpdir.join('instance'))
def test_uninstalled_package_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.mkdir('config_package_app')
init = app.join('__init__.py')
init.write(
'import os\n'
'import flask\n'
'here = os.path.abspath(os.path.dirname(__file__))\n'
'app = flask.Flask(__name__)\n'
)
purge_module('config_package_app')
from config_package_app import app
assert app.instance_path == str(modules_tmpdir.join('instance'))
def test_installed_module_paths(modules_tmpdir, modules_tmpdir_prefix,
purge_module, site_packages, limit_loader):
site_packages.join('site_app.py').write(
'import flask\n'
'app = flask.Flask(__name__)\n'
)
purge_module('site_app')
from site_app import app
assert app.instance_path == \
modules_tmpdir.join('var').join('site_app-instance')
def test_installed_package_paths(limit_loader, modules_tmpdir,
modules_tmpdir_prefix, purge_module,
monkeypatch):
installed_path = modules_tmpdir.mkdir('path')
monkeypatch.syspath_prepend(installed_path)
app = installed_path.mkdir('installed_package')
init = app.join('__init__.py')
init.write('import flask\napp = flask.Flask(__name__)')
purge_module('installed_package')
from installed_package import app
assert app.instance_path == \
modules_tmpdir.join('var').join('installed_package-instance')
def test_prefix_package_paths(limit_loader, modules_tmpdir,
modules_tmpdir_prefix, purge_module,
site_packages):
app = site_packages.mkdir('site_package')
init = app.join('__init__.py')
init.write('import flask\napp = flask.Flask(__name__)')
purge_module('site_package')
import site_package
assert site_package.app.instance_path == \
modules_tmpdir.join('var').join('site_package-instance')
def test_egg_installed_paths(install_egg, modules_tmpdir,
modules_tmpdir_prefix):
modules_tmpdir.mkdir('site_egg').join('__init__.py').write(
'import flask\n\napp = flask.Flask(__name__)'
)
install_egg('site_egg')
try:
import site_egg
assert site_egg.app.instance_path == \
str(modules_tmpdir.join('var/').join('site_egg-instance'))
finally:
if 'site_egg' in sys.modules:
del sys.modules['site_egg']
@pytest.mark.skipif(not PY2, reason='This only works under Python 2.')
def test_meta_path_loader_without_is_package(request, modules_tmpdir):
app = modules_tmpdir.join('unimportable.py')
app.write('import flask\napp = flask.Flask(__name__)')
class Loader(object):
def find_module(self, name, path=None):
return self
sys.meta_path.append(Loader())
request.addfinalizer(sys.meta_path.pop)
with pytest.raises(AttributeError):
import unimportable
| bsd-3-clause | 28,225,350,566,165,856 | 31.574627 | 75 | 0.629095 | false |
canwe/NewsBlur | apps/feed_import/views.py | 5 | 12057 | import datetime
import pickle
import base64
import httplib2
from utils import log as logging
from oauth2client.client import OAuth2WebServerFlow, FlowExchangeError
from bson.errors import InvalidStringData
import uuid
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
# from django.db import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.contrib.auth import login as login_user
from django.shortcuts import render_to_response
from apps.reader.forms import SignupForm
from apps.reader.models import UserSubscription
from apps.feed_import.models import OAuthToken, GoogleReaderImporter
from apps.feed_import.models import OPMLImporter, OPMLExporter, UploadedOPML
from apps.feed_import.tasks import ProcessOPML, ProcessReaderImport, ProcessReaderStarredImport
from utils import json_functions as json
from utils.user_functions import ajax_login_required, get_user
from utils.feed_functions import TimeoutError
@ajax_login_required
def opml_upload(request):
xml_opml = None
message = "OK"
code = 1
payload = {}
if request.method == 'POST':
if 'file' in request.FILES:
logging.user(request, "~FR~SBOPML upload starting...")
file = request.FILES['file']
xml_opml = str(file.read().decode('utf-8', 'ignore'))
try:
UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml)
except (UnicodeDecodeError, InvalidStringData):
folders = None
code = -1
message = "There was a Unicode decode error when reading your OPML file."
opml_importer = OPMLImporter(xml_opml, request.user)
try:
folders = opml_importer.try_processing()
except TimeoutError:
folders = None
ProcessOPML.delay(request.user.pk)
feed_count = opml_importer.count_feeds_in_opml()
logging.user(request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count)
payload = dict(folders=folders, delayed=True, feed_count=feed_count)
code = 2
message = ""
except AttributeError:
code = -1
message = "OPML import failed. Couldn't parse XML file."
folders = None
if folders:
code = 1
feeds = UserSubscription.objects.filter(user=request.user).values()
payload = dict(folders=folders, feeds=feeds)
logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds)))
request.session['import_from_google_reader'] = False
else:
message = "Attach an .opml file."
code = -1
return HttpResponse(json.encode(dict(message=message, code=code, payload=payload)),
mimetype='text/html')
def opml_export(request):
user = get_user(request)
now = datetime.datetime.now()
if request.REQUEST.get('user_id') and user.is_staff:
user = User.objects.get(pk=request.REQUEST['user_id'])
exporter = OPMLExporter(user)
opml = exporter.process()
response = HttpResponse(opml, mimetype='text/xml')
response['Content-Disposition'] = 'attachment; filename=NewsBlur Subscriptions - %s - %s' % (
user.username,
now.strftime('%Y-%m-%d')
)
return response
def reader_authorize(request):
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
reader_importer = GoogleReaderImporter(request.user)
if reader_importer.test():
logging.user(request, "~BB~FW~SBSkipping Google Reader import, already tokened")
return render_to_response('social/social_connect.xhtml', {
}, context_instance=RequestContext(request))
domain = Site.objects.get_current().domain
STEP2_URI = "http://%s%s" % (
(domain + '.com') if not domain.endswith('.com') else domain,
reverse('google-reader-callback'),
)
FLOW = OAuth2WebServerFlow(
client_id=settings.GOOGLE_OAUTH2_CLIENTID,
client_secret=settings.GOOGLE_OAUTH2_SECRET,
scope="http://www.google.com/reader/api",
redirect_uri=STEP2_URI,
user_agent='NewsBlur Pro, www.newsblur.com',
approval_prompt="force",
)
logging.user(request, "~BB~FW~SBAuthorize Google Reader import - %s" % (
request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', ""),
))
authorize_url = FLOW.step1_get_authorize_url(redirect_uri=STEP2_URI)
response = render_to_response('social/social_connect.xhtml', {
'next': authorize_url,
}, context_instance=RequestContext(request))
# Save request token and delete old tokens
auth_token_dict = dict()
if request.user.is_authenticated():
OAuthToken.objects.filter(user=request.user).delete()
auth_token_dict['user'] = request.user
else:
OAuthToken.objects.filter(session_id=request.session.session_key).delete()
OAuthToken.objects.filter(remote_ip=ip).delete()
auth_token_dict['uuid'] = str(uuid.uuid4())
auth_token_dict['session_id'] = request.session.session_key
auth_token_dict['remote_ip'] = ip
OAuthToken.objects.create(**auth_token_dict)
response.set_cookie('newsblur_reader_uuid', str(uuid.uuid4()))
return response
def reader_callback(request):
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
domain = Site.objects.get_current().domain
STEP2_URI = "http://%s%s" % (
(domain + '.com') if not domain.endswith('.com') else domain,
reverse('google-reader-callback'),
)
FLOW = OAuth2WebServerFlow(
client_id=settings.GOOGLE_OAUTH2_CLIENTID,
client_secret=settings.GOOGLE_OAUTH2_SECRET,
scope="http://www.google.com/reader/api",
redirect_uri=STEP2_URI,
user_agent='NewsBlur Pro, www.newsblur.com',
)
FLOW.redirect_uri = STEP2_URI
http = httplib2.Http()
http.disable_ssl_certificate_validation = True
try:
credential = FLOW.step2_exchange(request.REQUEST)
except FlowExchangeError:
logging.info(" ***> [%s] Bad token from Google Reader." % (request.user,))
return render_to_response('social/social_connect.xhtml', {
'error': 'There was an error trying to import from Google Reader. Trying again will probably fix the issue.'
}, context_instance=RequestContext(request))
user_token = None
if request.user.is_authenticated():
user_token = OAuthToken.objects.filter(user=request.user).order_by('-created_date')
if not user_token:
user_uuid = request.COOKIES.get('newsblur_reader_uuid')
if user_uuid:
user_token = OAuthToken.objects.filter(uuid=user_uuid).order_by('-created_date')
if not user_token:
session = request.session
if session.session_key:
user_token = OAuthToken.objects.filter(session_id=request.session.session_key).order_by('-created_date')
if not user_token:
user_token = OAuthToken.objects.filter(remote_ip=ip).order_by('-created_date')
if user_token:
user_token = user_token[0]
user_token.credential = base64.b64encode(pickle.dumps(credential))
user_token.session_id = request.session.session_key
user_token.save()
# Fetch imported feeds on next page load
request.session['import_from_google_reader'] = True
logging.user(request, "~BB~FW~SBFinishing Google Reader import - %s" % ip)
if request.user.is_authenticated():
return render_to_response('social/social_connect.xhtml', {}, context_instance=RequestContext(request))
return HttpResponseRedirect(reverse('import-signup'))
@json.json_view
def import_from_google_reader(request):
code = 0
feed_count = 0
starred_count = 0
delayed = False
if request.user.is_authenticated():
reader_importer = GoogleReaderImporter(request.user)
auto_active = bool(request.REQUEST.get('auto_active') or False)
try:
code = reader_importer.try_import_feeds(auto_active=auto_active)
except TimeoutError:
ProcessReaderImport.delay(request.user.pk, auto_active=auto_active)
feed_count = UserSubscription.objects.filter(user=request.user).count()
logging.user(request, "~FR~SBGoogle Reader import took too long, found %s feeds. Tasking..." % feed_count)
delayed = True
code = 2
if 'import_from_google_reader' in request.session:
del request.session['import_from_google_reader']
feed_count = UserSubscription.objects.filter(user=request.user).count()
return dict(code=code, delayed=delayed, feed_count=feed_count, starred_count=starred_count)
@json.json_view
def import_starred_stories_from_google_reader(request):
code = 0
feed_count = 0
starred_count = 0
delayed = False
if request.user.is_authenticated():
reader_importer = GoogleReaderImporter(request.user)
try:
starred_count = reader_importer.try_import_starred_stories()
except TimeoutError:
ProcessReaderStarredImport.delay(request.user.pk)
feed_count = UserSubscription.objects.filter(user=request.user).count()
logging.user(request, "~FR~SBGoogle Reader starred stories import took too long, found %s feeds, %s stories. Tasking..." % (feed_count, starred_count))
delayed = True
code = 2
feed_count = UserSubscription.objects.filter(user=request.user).count()
return dict(code=code, delayed=delayed, feed_count=feed_count, starred_count=starred_count)
def import_signup(request):
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
if request.method == "POST":
signup_form = SignupForm(prefix='signup', data=request.POST)
if signup_form.is_valid():
new_user = signup_form.save()
user_token = OAuthToken.objects.filter(user=new_user)
if not user_token:
user_uuid = request.COOKIES.get('newsblur_reader_uuid')
if user_uuid:
user_token = OAuthToken.objects.filter(uuid=user_uuid).order_by('-created_date')
if not user_token:
if request.session.session_key:
user_token = OAuthToken.objects.filter(session_id=request.session.session_key).order_by('-created_date')
if not user_token:
user_token = OAuthToken.objects.filter(remote_ip=ip).order_by('-created_date')
if user_token:
user_token = user_token[0]
user_token.session_id = request.session.session_key
user_token.user = new_user
user_token.save()
login_user(request, new_user)
if request.user.profile.is_premium:
return HttpResponseRedirect(reverse('index'))
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
else:
logging.user(request, "~BR~FW ***> Can't find user token during import/signup. Re-authenticating...")
return HttpResponseRedirect(reverse('google-reader-authorize'))
else:
signup_form = SignupForm(prefix='signup')
return render_to_response('import/signup.xhtml', {
'signup_form': signup_form,
}, context_instance=RequestContext(request)) | mit | 8,419,470,520,517,388,000 | 41.457746 | 163 | 0.63855 | false |
kzvyahin/cfme_tests | utils/units.py | 1 | 3393 | # -*- coding: utf-8 -*-
import math
import re
# TODO: Split the 1000 and 1024 factor out. Now it is not an issue as it is used FOR COMPARISON ONLY
FACTOR = 1024
PREFIXES = ['', 'K', 'M', 'G', 'T', 'P']
FACTORS = {prefix: int(math.pow(FACTOR, i)) for i, prefix in enumerate(PREFIXES)}
UNITS = ['Byte', 'Bytes', 'B', 'b', 'Hz']
EQUAL_UNITS = {
'B': ('Byte', 'Bytes')
}
# Sanity check
for target_unit, units in EQUAL_UNITS.iteritems():
assert target_unit in UNITS
for unit in units:
assert unit in UNITS
REGEXP = re.compile(
r'^\s*(\d+(?:\.\d+)?)\s*({})?({})\s*$'.format('|'.join(PREFIXES), '|'.join(UNITS)))
class Unit(object):
"""This class serves for simple comparison of numbers that have units.
Imagine you pull a text value from the UI. 2 GB. By doing ``Unit.parse('2 GB')`` you get an
instance of :py:class:`Unit`, which is comparable.
You can compare two :py:class:`Unit` instances or you can compare :py:class:`Unit` with
:py:class:`int`, :py:class:`float` or any :py:class:`str` as long as it can go through the
:py:method:`Unit.parse`.
If you compare :py:class:`Unit` only (or a string that gets subsequently parsed), it also takes
the kind of the unit it is, you cannot compare bytes with hertzes. It then calculates the
absolute value in the base units and that gets compared.
If you compare with a number, it does it like it was the number of the same unit. So eg.
doing ``Unit.parse('2 GB') == 2 *1024 * 1024 * 1024 `` is True.
"""
__slots__ = ['number', 'prefix', 'unit_type']
@classmethod
def parse(cls, s):
s = str(s)
match = REGEXP.match(s)
if match is None:
raise ValueError('{} is not a proper value to be parsed!'.format(repr(s)))
number, prefix, unit_type = match.groups()
# Check if it isnt just an another name for another unit.
for target_unit, units in EQUAL_UNITS.iteritems():
if unit_type in units:
unit_type = target_unit
return cls(float(number), prefix, unit_type)
def __init__(self, number, prefix, unit_type):
self.number = float(number)
self.prefix = prefix
self.unit_type = unit_type
@property
def absolute(self):
return self.number * FACTORS[self.prefix]
def _as_same_unit(self, int_or_float):
return type(self)(int_or_float, PREFIXES[0], self.unit_type)
def __cmp__(self, other):
if isinstance(other, basestring):
other = self.parse(other)
elif isinstance(other, (int, float)):
other = self._as_same_unit(other)
elif not isinstance(other, Unit):
raise TypeError('Incomparable types {} and {}'.format(type(self), type(other)))
# other is instance of this class too now
if self.unit_type != other.unit_type:
raise TypeError('Incomparable units {} and {}'.format(self.unit_type, other.unit_type))
return cmp(self.absolute, other.absolute)
def __float__(self):
return self.absolute
def __int__(self):
return int(self.absolute)
def __repr__(self):
return '{}({}, {}, {})'.format(
type(self).__name__, repr(self.number), repr(self.prefix), repr(self.unit_type))
def __str__(self):
return '{} {}{}'.format(self.number, self.prefix, self.unit_type)
| gpl-2.0 | -2,956,546,172,373,863,000 | 35.095745 | 100 | 0.609195 | false |
ptitjes/quodlibet | quodlibet/ext/editing/resub.py | 2 | 1625 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import re
from gi.repository import Gtk, GObject
from quodlibet import _
from quodlibet.plugins.editing import RenameFilesPlugin, TagsFromPathPlugin
from quodlibet.util import connect_obj
from quodlibet.qltk import Icons
class RegExpSub(Gtk.HBox, RenameFilesPlugin, TagsFromPathPlugin):
PLUGIN_ID = "Regex Substitution"
PLUGIN_NAME = _("Regex Substitution")
PLUGIN_DESC = _("Allows arbitrary regex substitutions (s///) when "
"tagging or renaming files.")
PLUGIN_ICON = Icons.EDIT_FIND_REPLACE
__gsignals__ = {
"changed": (GObject.SignalFlags.RUN_LAST, None, ())
}
active = True
def __init__(self):
super(RegExpSub, self).__init__()
self._from = Gtk.Entry()
self._to = Gtk.Entry()
self.pack_start(Gtk.Label("s/"), True, True, 0)
self.pack_start(self._from, True, True, 0)
self.pack_start(Gtk.Label("/"), True, True, 0)
self.pack_start(self._to, True, True, 0)
self.pack_start(Gtk.Label("/"), True, True, 0)
connect_obj(self._from, 'changed', self.emit, 'changed')
connect_obj(self._to, 'changed', self.emit, 'changed')
def filter(self, orig_or_tag, value):
fr = self._from.get_text()
to = self._to.get_text()
try:
return re.sub(fr, to, value)
except:
return value
| gpl-2.0 | 6,530,016,615,934,299,000 | 33.574468 | 75 | 0.634462 | false |
pku9104038/edx-platform | common/djangoapps/student/migrations/0020_add_test_center_user.py | 188 | 15924 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TestCenterUser'
db.create_table('student_testcenteruser', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['auth.User'], unique=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('user_updated_at', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
('candidate_id', self.gf('django.db.models.fields.IntegerField')(null=True, db_index=True)),
('client_candidate_id', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, db_index=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('middle_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('suffix', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('salutation', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('address_1', self.gf('django.db.models.fields.CharField')(max_length=40)),
('address_2', self.gf('django.db.models.fields.CharField')(max_length=40, blank=True)),
('address_3', self.gf('django.db.models.fields.CharField')(max_length=40, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)),
('state', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=20, blank=True)),
('postal_code', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=16, blank=True)),
('country', self.gf('django.db.models.fields.CharField')(max_length=3, db_index=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=35)),
('extension', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=8, blank=True)),
('phone_country_code', self.gf('django.db.models.fields.CharField')(max_length=3, db_index=True)),
('fax', self.gf('django.db.models.fields.CharField')(max_length=35, blank=True)),
('fax_country_code', self.gf('django.db.models.fields.CharField')(max_length=3, blank=True)),
('company_name', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
))
db.send_create_signal('student', ['TestCenterUser'])
def backwards(self, orm):
# Deleting model 'TestCenterUser'
db.delete_table('student_testcenteruser')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.courseenrollment': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.testcenteruser': {
'Meta': {'object_name': 'TestCenterUser'},
'address_1': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'address_2': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'address_3': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'candidate_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'client_candidate_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'extension': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'fax_country_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'middle_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'phone_country_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'db_index': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'salutation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'unique': 'True'}),
'user_updated_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
| agpl-3.0 | -2,089,642,465,708,827,100 | 83.702128 | 182 | 0.563301 | false |
rickmendes/ansible-modules-extras | cloud/amazon/sns_topic.py | 33 | 13805 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: sns_topic
short_description: Manages AWS SNS topics and subscriptions
description:
- The M(sns_topic) module allows you to create, delete, and manage subscriptions for AWS SNS topics.
version_added: 2.0
author:
- "Joel Thompson (@joelthompson)"
- "Fernando Jose Pando (@nand0p)"
options:
name:
description:
- The name or ARN of the SNS topic to converge
required: True
state:
description:
- Whether to create or destroy an SNS topic
required: False
default: present
choices: ["absent", "present"]
display_name:
description:
- Display name of the topic
required: False
default: None
policy:
description:
- Policy to apply to the SNS topic
required: False
default: None
delivery_policy:
description:
- Delivery policy to apply to the SNS topic
required: False
default: None
subscriptions:
description:
- List of subscriptions to apply to the topic. Note that AWS requires
subscriptions to be confirmed, so you will need to confirm any new
subscriptions.
required: False
default: []
purge_subscriptions:
description:
- "Whether to purge any subscriptions not listed here. NOTE: AWS does not
allow you to purge any PendingConfirmation subscriptions, so if any
exist and would be purged, they are silently skipped. This means that
somebody could come back later and confirm the subscription. Sorry.
Blame Amazon."
required: False
default: True
extends_documentation_fragment: aws
requirements: [ "boto" ]
"""
EXAMPLES = """
- name: Create alarm SNS topic
sns_topic:
name: "alarms"
state: present
display_name: "alarm SNS topic"
delivery_policy:
http:
defaultHealthyRetryPolicy:
minDelayTarget: 2
maxDelayTarget: 4
numRetries: 3
numMaxDelayRetries: 5
backoffFunction: "<linear|arithmetic|geometric|exponential>"
disableSubscriptionOverrides: True
defaultThrottlePolicy:
maxReceivesPerSecond: 10
subscriptions:
- endpoint: "[email protected]"
protocol: "email"
- endpoint: "my_mobile_number"
protocol: "sms"
"""
RETURN = '''
sns_arn:
description: The ARN of the topic you are modifying
type: string
sample: "arn:aws:sns:us-east-1:123456789012:my_topic_name"
sns_topic:
description: Dict of sns topic details
type: dict
sample:
name: sns-topic-name
state: present
display_name: default
policy: {}
delivery_policy: {}
subscriptions_new: []
subscriptions_existing: []
subscriptions_deleted: []
subscriptions_added: []
subscriptions_purge': false
check_mode: false
topic_created: false
topic_deleted: false
attributes_set: []
'''
import sys
import time
import json
import re
try:
import boto.sns
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class SnsTopicManager(object):
""" Handles SNS Topic creation and destruction """
def __init__(self,
module,
name,
state,
display_name,
policy,
delivery_policy,
subscriptions,
purge_subscriptions,
check_mode,
region,
**aws_connect_params):
self.region = region
self.aws_connect_params = aws_connect_params
self.connection = self._get_boto_connection()
self.changed = False
self.module = module
self.name = name
self.state = state
self.display_name = display_name
self.policy = policy
self.delivery_policy = delivery_policy
self.subscriptions = subscriptions
self.subscriptions_existing = []
self.subscriptions_deleted = []
self.subscriptions_added = []
self.purge_subscriptions = purge_subscriptions
self.check_mode = check_mode
self.topic_created = False
self.topic_deleted = False
self.arn_topic = None
self.attributes_set = []
def _get_boto_connection(self):
try:
return connect_to_aws(boto.sns, self.region,
**self.aws_connect_params)
except BotoServerError, err:
self.module.fail_json(msg=err.message)
def _get_all_topics(self):
next_token = None
topics = []
while True:
try:
response = self.connection.get_all_topics(next_token)
except BotoServerError, err:
module.fail_json(msg=err.message)
topics.extend(response['ListTopicsResponse']['ListTopicsResult']['Topics'])
next_token = response['ListTopicsResponse']['ListTopicsResult']['NextToken']
if not next_token:
break
return [t['TopicArn'] for t in topics]
def _arn_topic_lookup(self):
# topic names cannot have colons, so this captures the full topic name
all_topics = self._get_all_topics()
lookup_topic = ':%s' % self.name
for topic in all_topics:
if topic.endswith(lookup_topic):
return topic
def _create_topic(self):
self.changed = True
self.topic_created = True
if not self.check_mode:
self.connection.create_topic(self.name)
self.arn_topic = self._arn_topic_lookup()
while not self.arn_topic:
time.sleep(3)
self.arn_topic = self._arn_topic_lookup()
def _set_topic_attrs(self):
topic_attributes = self.connection.get_topic_attributes(self.arn_topic) \
['GetTopicAttributesResponse'] ['GetTopicAttributesResult'] \
['Attributes']
if self.display_name and self.display_name != topic_attributes['DisplayName']:
self.changed = True
self.attributes_set.append('display_name')
if not self.check_mode:
self.connection.set_topic_attributes(self.arn_topic, 'DisplayName',
self.display_name)
if self.policy and self.policy != json.loads(topic_attributes['Policy']):
self.changed = True
self.attributes_set.append('policy')
if not self.check_mode:
self.connection.set_topic_attributes(self.arn_topic, 'Policy',
json.dumps(self.policy))
if self.delivery_policy and ('DeliveryPolicy' not in topic_attributes or \
self.delivery_policy != json.loads(topic_attributes['DeliveryPolicy'])):
self.changed = True
self.attributes_set.append('delivery_policy')
if not self.check_mode:
self.connection.set_topic_attributes(self.arn_topic, 'DeliveryPolicy',
json.dumps(self.delivery_policy))
def _canonicalize_endpoint(self, protocol, endpoint):
if protocol == 'sms':
return re.sub('[^0-9]*', '', endpoint)
return endpoint
def _get_topic_subs(self):
next_token = None
while True:
response = self.connection.get_all_subscriptions_by_topic(self.arn_topic, next_token)
self.subscriptions_existing.extend(response['ListSubscriptionsByTopicResponse'] \
['ListSubscriptionsByTopicResult']['Subscriptions'])
next_token = response['ListSubscriptionsByTopicResponse'] \
['ListSubscriptionsByTopicResult']['NextToken']
if not next_token:
break
def _set_topic_subs(self):
subscriptions_existing_list = []
desired_subscriptions = [(sub['protocol'],
self._canonicalize_endpoint(sub['protocol'], sub['endpoint'])) for sub in
self.subscriptions]
if self.subscriptions_existing:
for sub in self.subscriptions_existing:
sub_key = (sub['Protocol'], sub['Endpoint'])
subscriptions_existing_list.append(sub_key)
if self.purge_subscriptions and sub_key not in desired_subscriptions and \
sub['SubscriptionArn'] != 'PendingConfirmation':
self.changed = True
self.subscriptions_deleted.append(sub_key)
if not self.check_mode:
self.connection.unsubscribe(sub['SubscriptionArn'])
for (protocol, endpoint) in desired_subscriptions:
if (protocol, endpoint) not in subscriptions_existing_list:
self.changed = True
self.subscriptions_added.append(sub)
if not self.check_mode:
self.connection.subscribe(self.arn_topic, protocol, endpoint)
def _delete_subscriptions(self):
# NOTE: subscriptions in 'PendingConfirmation' timeout in 3 days
# https://forums.aws.amazon.com/thread.jspa?threadID=85993
for sub in self.subscriptions_existing:
if sub['SubscriptionArn'] != 'PendingConfirmation':
self.subscriptions_deleted.append(sub['SubscriptionArn'])
self.changed = True
if not self.check_mode:
self.connection.unsubscribe(sub['SubscriptionArn'])
def _delete_topic(self):
self.topic_deleted = True
self.changed = True
if not self.check_mode:
self.connection.delete_topic(self.arn_topic)
def ensure_ok(self):
self.arn_topic = self._arn_topic_lookup()
if not self.arn_topic:
self._create_topic()
self._set_topic_attrs()
self._get_topic_subs()
self._set_topic_subs()
def ensure_gone(self):
self.arn_topic = self._arn_topic_lookup()
if self.arn_topic:
self._get_topic_subs()
if self.subscriptions_existing:
self._delete_subscriptions()
self._delete_topic()
def get_info(self):
info = {
'name': self.name,
'state': self.state,
'display_name': self.display_name,
'policy': self.policy,
'delivery_policy': self.delivery_policy,
'subscriptions_new': self.subscriptions,
'subscriptions_existing': self.subscriptions_existing,
'subscriptions_deleted': self.subscriptions_deleted,
'subscriptions_added': self.subscriptions_added,
'subscriptions_purge': self.purge_subscriptions,
'check_mode': self.check_mode,
'topic_created': self.topic_created,
'topic_deleted': self.topic_deleted,
'attributes_set': self.attributes_set
}
return info
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present',
'absent']),
display_name=dict(type='str', required=False),
policy=dict(type='dict', required=False),
delivery_policy=dict(type='dict', required=False),
subscriptions=dict(default=[], type='list', required=False),
purge_subscriptions=dict(type='bool', default=True),
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
name = module.params.get('name')
state = module.params.get('state')
display_name = module.params.get('display_name')
policy = module.params.get('policy')
delivery_policy = module.params.get('delivery_policy')
subscriptions = module.params.get('subscriptions')
purge_subscriptions = module.params.get('purge_subscriptions')
check_mode = module.check_mode
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="region must be specified")
sns_topic = SnsTopicManager(module,
name,
state,
display_name,
policy,
delivery_policy,
subscriptions,
purge_subscriptions,
check_mode,
region,
**aws_connect_params)
if state == 'present':
sns_topic.ensure_ok()
elif state == 'absent':
sns_topic.ensure_gone()
sns_facts = dict(changed=sns_topic.changed,
sns_arn=sns_topic.arn_topic,
sns_topic=sns_topic.get_info())
module.exit_json(**sns_facts)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 | 3,116,885,137,244,525,600 | 32.918919 | 104 | 0.596523 | false |
georchestra/cadastrapp | addons/cadastrapp/js/external/openlayers2/openlayers/tools/jsmin.py | 513 | 7471 | #!/usr/bin/python
# This code is original from jsmin by Douglas Crockford, it was translated to
# Python by Baruch Even. The original code had the following copyright and
# license.
#
# /* jsmin.c
# 2007-01-08
#
# Copyright (c) 2002 Douglas Crockford (www.crockford.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# The Software shall be used for Good, not Evil.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# */
from StringIO import StringIO
def jsmin(js):
ins = StringIO(js)
outs = StringIO()
JavascriptMinify().minify(ins, outs)
str = outs.getvalue()
if len(str) > 0 and str[0] == '\n':
str = str[1:]
return str
def isAlphanum(c):
"""return true if the character is a letter, digit, underscore,
dollar sign, or non-ASCII character.
"""
return ((c >= 'a' and c <= 'z') or (c >= '0' and c <= '9') or
(c >= 'A' and c <= 'Z') or c == '_' or c == '$' or c == '\\' or (c is not None and ord(c) > 126));
class UnterminatedComment(Exception):
pass
class UnterminatedStringLiteral(Exception):
pass
class UnterminatedRegularExpression(Exception):
pass
class JavascriptMinify(object):
def _outA(self):
self.outstream.write(self.theA)
def _outB(self):
self.outstream.write(self.theB)
def _get(self):
"""return the next character from stdin. Watch out for lookahead. If
the character is a control character, translate it to a space or
linefeed.
"""
c = self.theLookahead
self.theLookahead = None
if c == None:
c = self.instream.read(1)
if c >= ' ' or c == '\n':
return c
if c == '': # EOF
return '\000'
if c == '\r':
return '\n'
return ' '
def _peek(self):
self.theLookahead = self._get()
return self.theLookahead
def _next(self):
"""get the next character, excluding comments. peek() is used to see
if a '/' is followed by a '/' or '*'.
"""
c = self._get()
if c == '/':
p = self._peek()
if p == '/':
c = self._get()
while c > '\n':
c = self._get()
return c
if p == '*':
c = self._get()
while 1:
c = self._get()
if c == '*':
if self._peek() == '/':
self._get()
return ' '
if c == '\000':
raise UnterminatedComment()
return c
def _action(self, action):
"""do something! What you do is determined by the argument:
1 Output A. Copy B to A. Get the next B.
2 Copy B to A. Get the next B. (Delete A).
3 Get the next B. (Delete B).
action treats a string as a single character. Wow!
action recognizes a regular expression if it is preceded by ( or , or =.
"""
if action <= 1:
self._outA()
if action <= 2:
self.theA = self.theB
if self.theA == "'" or self.theA == '"':
while 1:
self._outA()
self.theA = self._get()
if self.theA == self.theB:
break
if self.theA <= '\n':
raise UnterminatedStringLiteral()
if self.theA == '\\':
self._outA()
self.theA = self._get()
if action <= 3:
self.theB = self._next()
if self.theB == '/' and (self.theA == '(' or self.theA == ',' or
self.theA == '=' or self.theA == ':' or
self.theA == '[' or self.theA == '?' or
self.theA == '!' or self.theA == '&' or
self.theA == '|'):
self._outA()
self._outB()
while 1:
self.theA = self._get()
if self.theA == '/':
break
elif self.theA == '\\':
self._outA()
self.theA = self._get()
elif self.theA <= '\n':
raise UnterminatedRegularExpression()
self._outA()
self.theB = self._next()
def _jsmin(self):
"""Copy the input to the output, deleting the characters which are
insignificant to JavaScript. Comments will be removed. Tabs will be
replaced with spaces. Carriage returns will be replaced with linefeeds.
Most spaces and linefeeds will be removed.
"""
self.theA = '\n'
self._action(3)
while self.theA != '\000':
if self.theA == ' ':
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
elif self.theA == '\n':
if self.theB in ['{', '[', '(', '+', '-']:
self._action(1)
elif self.theB == ' ':
self._action(3)
else:
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
else:
if self.theB == ' ':
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
elif self.theB == '\n':
if self.theA in ['}', ']', ')', '+', '-', '"', '\'']:
self._action(1)
else:
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
else:
self._action(1)
def minify(self, instream, outstream):
self.instream = instream
self.outstream = outstream
self.theA = None
self.thaB = None
self.theLookahead = None
self._jsmin()
self.instream.close()
if __name__ == '__main__':
import sys
jsm = JavascriptMinify()
jsm.minify(sys.stdin, sys.stdout)
| gpl-3.0 | 7,599,308,147,446,390,000 | 33.587963 | 110 | 0.478517 | false |
Venturi/cms | env/lib/python2.7/site-packages/django/contrib/postgres/fields/array.py | 1 | 8579 | import json
from django.contrib.postgres import lookups
from django.contrib.postgres.forms import SimpleArrayField
from django.contrib.postgres.validators import ArrayMaxLengthValidator
from django.core import checks, exceptions
from django.db.models import Field, IntegerField, Transform
from django.db.models.lookups import Exact
from django.utils import six
from django.utils.translation import string_concat, ugettext_lazy as _
from .utils import AttributeSetter
__all__ = ['ArrayField']
class ArrayField(Field):
empty_strings_allowed = False
default_error_messages = {
'item_invalid': _('Item %(nth)s in the array did not validate: '),
'nested_array_mismatch': _('Nested arrays must have the same length.'),
}
def __init__(self, base_field, size=None, **kwargs):
self.base_field = base_field
self.size = size
if self.size:
self.default_validators = self.default_validators[:]
self.default_validators.append(ArrayMaxLengthValidator(self.size))
super(ArrayField, self).__init__(**kwargs)
def check(self, **kwargs):
errors = super(ArrayField, self).check(**kwargs)
if self.base_field.rel:
errors.append(
checks.Error(
'Base field for array cannot be a related field.',
hint=None,
obj=self,
id='postgres.E002'
)
)
else:
# Remove the field name checks as they are not needed here.
base_errors = self.base_field.check()
if base_errors:
messages = '\n '.join('%s (%s)' % (error.msg, error.id) for error in base_errors)
errors.append(
checks.Error(
'Base field for array has errors:\n %s' % messages,
hint=None,
obj=self,
id='postgres.E001'
)
)
return errors
def set_attributes_from_name(self, name):
super(ArrayField, self).set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
@property
def description(self):
return 'Array of %s' % self.base_field.description
def db_type(self, connection):
size = self.size or ''
return '%s[%s]' % (self.base_field.db_type(connection), size)
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, list) or isinstance(value, tuple):
return [self.base_field.get_db_prep_value(i, connection, prepared) for i in value]
return value
def deconstruct(self):
name, path, args, kwargs = super(ArrayField, self).deconstruct()
if path == 'django.contrib.postgres.fields.array.ArrayField':
path = 'django.contrib.postgres.fields.ArrayField'
kwargs.update({
'base_field': self.base_field,
'size': self.size,
})
return name, path, args, kwargs
def to_python(self, value):
if isinstance(value, six.string_types):
# Assume we're deserializing
vals = json.loads(value)
value = [self.base_field.to_python(val) for val in vals]
return value
def value_to_string(self, obj):
values = []
vals = self._get_val_from_obj(obj)
base_field = self.base_field
for val in vals:
obj = AttributeSetter(base_field.attname, val)
values.append(base_field.value_to_string(obj))
return json.dumps(values)
def get_transform(self, name):
transform = super(ArrayField, self).get_transform(name)
if transform:
return transform
try:
index = int(name)
except ValueError:
pass
else:
index += 1 # postgres uses 1-indexing
return IndexTransformFactory(index, self.base_field)
try:
start, end = name.split('_')
start = int(start) + 1
end = int(end) # don't add one here because postgres slices are weird
except ValueError:
pass
else:
return SliceTransformFactory(start, end)
def validate(self, value, model_instance):
super(ArrayField, self).validate(value, model_instance)
for i, part in enumerate(value):
try:
self.base_field.validate(part, model_instance)
except exceptions.ValidationError as e:
raise exceptions.ValidationError(
string_concat(self.error_messages['item_invalid'], e.message),
code='item_invalid',
params={'nth': i},
)
if isinstance(self.base_field, ArrayField):
if len({len(i) for i in value}) > 1:
raise exceptions.ValidationError(
self.error_messages['nested_array_mismatch'],
code='nested_array_mismatch',
)
def run_validators(self, value):
super(ArrayField, self).run_validators(value)
for i, part in enumerate(value):
try:
self.base_field.run_validators(part)
except exceptions.ValidationError as e:
raise exceptions.ValidationError(
string_concat(self.error_messages['item_invalid'], ' '.join(e.messages)),
code='item_invalid',
params={'nth': i},
)
def formfield(self, **kwargs):
defaults = {
'form_class': SimpleArrayField,
'base_field': self.base_field.formfield(),
'max_length': self.size,
}
defaults.update(kwargs)
return super(ArrayField, self).formfield(**defaults)
@ArrayField.register_lookup
class ArrayContains(lookups.DataContains):
def as_sql(self, qn, connection):
sql, params = super(ArrayContains, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayContainedBy(lookups.ContainedBy):
def as_sql(self, qn, connection):
sql, params = super(ArrayContainedBy, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayExact(Exact):
def as_sql(self, qn, connection):
sql, params = super(ArrayExact, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayOverlap(lookups.Overlap):
def as_sql(self, qn, connection):
sql, params = super(ArrayOverlap, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayLenTransform(Transform):
lookup_name = 'len'
output_field = IntegerField()
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'array_length(%s, 1)' % lhs, params
class IndexTransform(Transform):
def __init__(self, index, base_field, *args, **kwargs):
super(IndexTransform, self).__init__(*args, **kwargs)
self.index = index
self.base_field = base_field
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return '%s[%s]' % (lhs, self.index), params
@property
def output_field(self):
return self.base_field
class IndexTransformFactory(object):
def __init__(self, index, base_field):
self.index = index
self.base_field = base_field
def __call__(self, *args, **kwargs):
return IndexTransform(self.index, self.base_field, *args, **kwargs)
class SliceTransform(Transform):
def __init__(self, start, end, *args, **kwargs):
super(SliceTransform, self).__init__(*args, **kwargs)
self.start = start
self.end = end
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return '%s[%s:%s]' % (lhs, self.start, self.end), params
class SliceTransformFactory(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, *args, **kwargs):
return SliceTransform(self.start, self.end, *args, **kwargs)
| gpl-2.0 | -6,522,329,163,716,691,000 | 33.453815 | 100 | 0.589696 | false |
lord63-forks/flask | examples/persona/persona.py | 159 | 1442 | from flask import Flask, render_template, session, request, abort, g
import requests
app = Flask(__name__)
app.config.update(
DEBUG=True,
SECRET_KEY='my development key',
PERSONA_JS='https://login.persona.org/include.js',
PERSONA_VERIFIER='https://verifier.login.persona.org/verify',
)
app.config.from_envvar('PERSONA_SETTINGS', silent=True)
@app.before_request
def get_current_user():
g.user = None
email = session.get('email')
if email is not None:
g.user = email
@app.route('/')
def index():
"""Just a generic index page to show."""
return render_template('index.html')
@app.route('/_auth/login', methods=['GET', 'POST'])
def login_handler():
"""This is used by the persona.js file to kick off the
verification securely from the server side. If all is okay
the email address is remembered on the server.
"""
resp = requests.post(app.config['PERSONA_VERIFIER'], data={
'assertion': request.form['assertion'],
'audience': request.host_url,
}, verify=True)
if resp.ok:
verification_data = resp.json()
if verification_data['status'] == 'okay':
session['email'] = verification_data['email']
return 'OK'
abort(400)
@app.route('/_auth/logout', methods=['POST'])
def logout_handler():
"""This is what persona.js will call to sign the user
out again.
"""
session.clear()
return 'OK'
| bsd-3-clause | -1,153,104,284,159,957,200 | 25.218182 | 68 | 0.637309 | false |
yaroslavvb/tensorflow | tensorflow/contrib/slim/python/slim/nets/inception_v1_test.py | 112 | 8960 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nets.inception_v1."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.framework.python.ops import variables as variables_lib
from tensorflow.contrib.slim.python.slim import model_analyzer
from tensorflow.contrib.slim.python.slim.nets import inception_v1
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class InceptionV1Test(test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = random_ops.random_uniform((batch_size, height, width, 3))
logits, end_points = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
mixed_6c, end_points = inception_v1.inception_v1_base(inputs)
self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_6c.get_shape().as_list(),
[batch_size, 7, 7, 1024])
expected_endpoints = [
'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c'
]
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
height, width = 224, 224
endpoints = [
'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c'
]
for index, endpoint in enumerate(endpoints):
with ops.Graph().as_default():
inputs = random_ops.random_uniform((batch_size, height, width, 3))
out_tensor, end_points = inception_v1.inception_v1_base(
inputs, final_endpoint=endpoint)
self.assertTrue(
out_tensor.op.name.startswith('InceptionV1/' + endpoint))
self.assertItemsEqual(endpoints[:index + 1], end_points)
def testBuildAndCheckAllEndPointsUptoMixed5c(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
_, end_points = inception_v1.inception_v1_base(
inputs, final_endpoint='Mixed_5c')
endpoints_shapes = {
'Conv2d_1a_7x7': [5, 112, 112, 64],
'MaxPool_2a_3x3': [5, 56, 56, 64],
'Conv2d_2b_1x1': [5, 56, 56, 64],
'Conv2d_2c_3x3': [5, 56, 56, 192],
'MaxPool_3a_3x3': [5, 28, 28, 192],
'Mixed_3b': [5, 28, 28, 256],
'Mixed_3c': [5, 28, 28, 480],
'MaxPool_4a_3x3': [5, 14, 14, 480],
'Mixed_4b': [5, 14, 14, 512],
'Mixed_4c': [5, 14, 14, 512],
'Mixed_4d': [5, 14, 14, 512],
'Mixed_4e': [5, 14, 14, 528],
'Mixed_4f': [5, 14, 14, 832],
'MaxPool_5a_2x2': [5, 7, 7, 832],
'Mixed_5b': [5, 7, 7, 832],
'Mixed_5c': [5, 7, 7, 1024]
}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name in endpoints_shapes:
expected_shape = endpoints_shapes[endpoint_name]
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testModelHasExpectedNumberOfParameters(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
with arg_scope(inception_v1.inception_v1_arg_scope()):
inception_v1.inception_v1_base(inputs)
total_params, _ = model_analyzer.analyze_vars(
variables_lib.get_model_variables())
self.assertAlmostEqual(5607184, total_params)
def testHalfSizeImages(self):
batch_size = 5
height, width = 112, 112
inputs = random_ops.random_uniform((batch_size, height, width, 3))
mixed_5c, _ = inception_v1.inception_v1_base(inputs)
self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_5c.get_shape().as_list(),
[batch_size, 4, 4, 1024])
def testUnknownImageShape(self):
ops.reset_default_graph()
batch_size = 2
height, width = 224, 224
num_classes = 1000
input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
with self.test_session() as sess:
inputs = array_ops.placeholder(
dtypes.float32, shape=(batch_size, None, None, 3))
logits, end_points = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Mixed_5c']
feed_dict = {inputs: input_np}
variables.global_variables_initializer().run()
pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])
def testUnknownBatchSize(self):
batch_size = 1
height, width = 224, 224
num_classes = 1000
inputs = array_ops.placeholder(dtypes.float32, (None, height, width, 3))
logits, _ = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(), [None, num_classes])
images = random_ops.random_uniform((batch_size, height, width, 3))
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEquals(output.shape, (batch_size, num_classes))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
eval_inputs = random_ops.random_uniform((batch_size, height, width, 3))
logits, _ = inception_v1.inception_v1(
eval_inputs, num_classes, is_training=False)
predictions = math_ops.argmax(logits, 1)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
def testTrainEvalWithReuse(self):
train_batch_size = 5
eval_batch_size = 2
height, width = 224, 224
num_classes = 1000
train_inputs = random_ops.random_uniform(
(train_batch_size, height, width, 3))
inception_v1.inception_v1(train_inputs, num_classes)
eval_inputs = random_ops.random_uniform((eval_batch_size, height, width, 3))
logits, _ = inception_v1.inception_v1(eval_inputs, num_classes, reuse=True)
predictions = math_ops.argmax(logits, 1)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (eval_batch_size,))
def testLogitsNotSqueezed(self):
num_classes = 25
images = random_ops.random_uniform([1, 224, 224, 3])
logits, _ = inception_v1.inception_v1(
images, num_classes=num_classes, spatial_squeeze=False)
with self.test_session() as sess:
variables.global_variables_initializer().run()
logits_out = sess.run(logits)
self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
if __name__ == '__main__':
test.main()
| apache-2.0 | -580,618,712,173,807,600 | 39.542986 | 80 | 0.653683 | false |
Garmelon/itbot | script.py | 1 | 20301 | # Rewrite of the original script; it worked, but not well enough...
# still ugly tho
CONFIGFILE = "config.txt"
import re
import json
import time
import requests
import datetime
import configparser
import imgurpython
# overwrite print() to only print ascii
import builtins
def asciify(text):
return ''.join([i if ord(i) < 128 else '?' for i in text])
def print(*args, **kwargs):
newargs = []
for text in args:
newargs.append(asciify(text))
builtins.print(*newargs, **kwargs)
class Client:
"""
Imgur API and config+authentication
"""
def __init__(self, config):
self.config = config
if not self.config.has_section("auth"):
self.config.modified = True
self.config["auth"] = {}
if ( not self.config.has_option("auth", "client_id")
and not self.config.has_option("auth", "client_secret")):
self.prompt_client_info()
self.connect()
self.account = self.client.get_account("me")
def prompt_client_info(self):
print("No client info found. If you haven't yet, visit")
print("https://api.imgur.com/oauth2/addclient and register an application.")
print("Pick 'OAuth 2 authorization without a callback URL'.")
print("If you have already registered an application, visit")
print("https://imgur.com/account/settings/apps and generate a new secret.")
print("Then, fill in the client id and secret below.")
self.config["auth"]["client_id"] = input("Client ID: ").strip()
self.config["auth"]["client_secret"] = input("Client Secret: ").strip()
self.config.modified = True
print("")
def prompt_pin(self):
"""
prompt_pin() -> pin
Assumes that there is already a client connected to Imgur.
"""
authorization_url = self.client.get_auth_url("pin")
print("Please visit {}".format(authorization_url))
print("and enter the PIN code displayed on the site.")
return input("PIN code: ").strip()
def connect(self):
"""
Creates and connects self.client.
"""
if self.config.has_option("auth", "refresh_token"):
self.client = imgurpython.ImgurClient(self.config["auth"]["client_id"],
self.config["auth"]["client_secret"],
refresh_token=self.config["auth"]["refresh_token"])
else:
self.client = imgurpython.ImgurClient(self.config["auth"]["client_id"],
self.config["auth"]["client_secret"])
credentials = self.client.authorize(self.prompt_pin(), "pin")
self.config["auth"]["refresh_token"] = credentials["refresh_token"]
self.config.modified = True
self.client.set_user_auth(credentials["access_token"], credentials["refresh_token"])
class Subscribers:
"""
Manages subscribers and subscribing/unsubscribing
"""
subregex = re.compile(r"^<?subscribe>?.?$",
flags=re.IGNORECASE)
unsubregex = re.compile(r"^<?unsubscribe>?\.?$",
flags=re.IGNORECASE)
askregex = re.compile(r"subscri|\bsign.*\b(up|in|on)\b|\b(join|tag|includ)|<.*>|\bdot|\b(leav|cancel)$",
flags=re.IGNORECASE)
def __init__(self, subsfile):
self.subsfile = subsfile
self.subs = {}
self.modified = False
self.load()
def load(self):
try:
with open(self.subsfile) as f:
for line in f:
self.load_line(line)
except FileNotFoundError:
print("File not found: {}".format(repr(self.subsfile)))
print("If you already have a subscribers file, you can set it in the config file.")
print("A new file will be created.")
def load_line(self, line):
if line[0] == "#":
return
parts = line[:-1].split(" ")
parts = [item for item in parts if item] # remove empty strings
if not parts:
return
status = parts[0]
nick = parts[1].lower()
datetime = int(parts[2])
self.subs[nick] = {"status": status, "dt": datetime}
def save(self):
with open(self.subsfile, "w") as f:
for sub, info in sorted(self.subs.items()):
f.write("{} {} {}\n".format(info["status"], sub, info["dt"]))
def add(self, nick, datetime=None):
print("Adding {}.".format(nick))
nick = nick.lower()
if nick in self.subs:
self.subs[nick] = {"status": "s", "dt": max(datetime or 0, self.subs[nick]["dt"])}
else:
self.subs[nick] = {"status": "s", "dt": datetime or 0}
self.modified = True
def remove(self, nick, datetime=None):
print("Removing {}.".format(nick))
nick = nick.lower()
if nick in self.subs:
self.subs[nick] = {"status": "u", "dt": max(datetime or 0, self.subs[nick]["dt"])}
else:
self.subs[nick] = {"status": "u", "dt": datetime or 0}
self.modified = True
def subscribed(self):
return {sub: info for sub, info in self.subs.items() if info["status"] == "s"}
def clean_up(self):
self.subs = self.subscribed()
self.modified = True
def count(self):
return len(self.subscribed())
def to_comments(self):
comments = []
comment = ""
for sub in self.subscribed():
sub = "@" + sub
if comment:
if len(comment) + len(sub) + 1 <= 140: #character limit
comment += " " + sub
continue
else:
comments.append(comment)
comment = sub
if comment:
comments.append(comment)
return comments
def check_comment(self, nick, comment, datetime):
"""
Returns True when comment is to be added to the ignore list.
"""
nick = nick.lower()
if nick in self.subs and self.subs[nick]["dt"] >= datetime:
return
if self.subregex.search(comment):
self.add(nick, datetime=datetime)
elif self.unsubregex.search(comment):
self.remove(nick, datetime=datetime)
elif self.askregex.search(comment):
action = self.ask_user_about_comment(comment)
if action == "add":
self.add(nick, datetime=datetime)
elif action == "remove":
self.remove(nick, datetime=datetime)
else:
return True
def ask_user_about_comment(self, comment):
print("\nWhat is the following comment?")
print(comment)
print("[s] subscribe | [d] unsubscribe | [anything else] neither")
action = input("[s/d/f] ").strip().lower()
print("")
if action == "s":
return "add"
elif action == "d":
return "remove"
class Albums:
"""
Manages added albums and keeps track of comments with uninteresting content
"""
def __init__(self, albumsfile):
self.albumsfile = albumsfile
self.albums = {}
self.modified = False
self.load()
def load(self):
try:
with open(self.albumsfile) as f:
for line in f:
self.load_line(line)
except FileNotFoundError:
print("File not found: {}".format(repr(self.albumsfile)))
print("If you already have an albums file, you can set it in the config file.")
print("A new file will be created.")
def load_line(self, line):
if line[0] == "#":
return
parts = line[:-1].split(" ", 1)
if len(parts) < 2:
return
album = parts[0]
comments = json.loads(parts[1])
if album in self.albums:
for comment in comments:
if not comment in self.albums[album]:
self.albums[album].append(comment)
else:
self.albums[album] = comments
def save(self):
with open(self.albumsfile, "w") as f:
for album, comments in sorted(self.albums.items()):
f.write("{} {}\n".format(album, json.dumps(comments)))
def add(self, album):
print ("Adding album {}".format(album))
if not album in self.albums:
self.albums[album] = []
self.modified = True
def remove(self, album):
print ("Removing album {}".format(album))
if album in self.albums:
del self.albums[album]
self.modified = True
def add_comment(self, album, comment):
print ("Adding comment {} to album {} ignore list".format(comment, album))
if not comment in self.albums[album]:
self.albums[album].append(comment)
self.modified = True
def in_album(self, album, comment):
return comment in self.albums[album]
class ITBot:
"""
Manage the input and resources
"""
def __init__(self, configfile="config.txt"):
"""
Load the config and connect to imgur.
"""
self.configfile = configfile
self.config = configparser.ConfigParser()
self.config.read(self.configfile)
self.config.modified = False
if not self.config.has_section("misc"):
self.config["misc"] = {}
self.config.modified = True
if not self.config.has_option("misc", "delay"):
self.config["misc"]["delay"] = "10"
self.config.modified = True
if not self.config.has_option("misc", "retry_delay"):
self.config["misc"]["retry_delay"] = "60"
self.config.modified = True
if not self.config.has_option("misc", "branches_per_node"):
self.config["misc"]["branches_per_node"] = "10"
self.config.modified = True
if not self.config.has_option("misc", "subsfile"):
self.config["misc"]["subsfile"] = "subscribers.txt"
self.config.modified = True
if not self.config.has_option("misc", "albumsfile"):
self.config["misc"]["albumsfile"] = "albums.txt"
self.config.modified = True
self.client = Client(self.config)
self.subs = Subscribers(self.config["misc"]["subsfile"])
self.albums = Albums(self.config["misc"]["albumsfile"])
self._commands = {}
self._add_command("quit", self.command_quit, "Quit.",
("It's just quitting. Why would you call help on that?\n"
"Ctrl+D (EOF) or Ctrl+C (KeyboardInterrupt) work too."))
self._add_command("q", self.command_quit, "Short for 'quit'.",
("You seem desparate... There really is nothing new here."))
self._add_command("help", self.command_help, "Show th- Oh, you already figured it out...",
("I believe there is nothing more I could tell you about this command.\n"
"Go and try out the other commands instead of doing - well, this :P"))
self._add_command("comment", self.command_comment, "Comment on an image with all your subs.",
("comment <image_id>\n"
"Posts a top-level comment and then replies with the full list of your subs."))
self._add_command("scan", self.command_scan, "Scan your albums' comments for (un)subscribers.",
("Scans through the comments below your albums and processes any obvious '(un)subscribe's.\n"
"In difficult cases, presents the comment to you and lets you decide."))
self._add_command("add", self.command_add, "Add subscribers.",
("add <nick> [<nick> [...]]\n"
"List all the nicks after the command and they'll be added to your\n"
"subs in the subscribers file."))
self._add_command("remove", self.command_remove, "Remove subscribers.",
("remove <nick> [<nick> [...]]\n"
"Works the same way as add, but in reverse :P"))
self._add_command("reg", self.command_reg, "Register albums.",
("reg <album_id> [<album_id> [...]]\n"
"Register albums to be scanned by the scan command."))
self._add_command("dereg", self.command_dereg, "Deregister albums.",
("dereg <album_id> [<album_id> [...]]\n"
"The albums will no longer be included in further calls to the scan command.\n"
"WARNING: This also deletes all info about messages from those albums which were\n"
"marked as \"ignore\" (neither a subscribe nor an unsubscribe)."))
self._add_command("count", self.command_count, "Boost ego.",
("Lean back and relax"))
self._add_command("cleanup", self.command_count, "Removes all unsubscribed nicks from the subsfile.",
("Don't do this unless your subsfile is too large.\n"
"Normally, it is not necessary to clean up at all."))
def _add_command(self, command, function, shorthelp, longhelp):
"""
Helps organising commands
"""
self._commands[command] = {
"function": function,
"shorthelp": shorthelp,
"longhelp": longhelp
}
def fancy_intro(self):
"""
Nothing important...
"""
logo = [" ___________________",
" .' '.",
" / _ \\",
"| (_)_ __ __ _ _ _ _ _ |",
"| | | ' \/ _` | || | '_| |",
"| |_|_|_|_\__, |\_,_|_| |",
" \\ |___/ /",
" '.___________________.'"]
for line in logo:
print(line)
time.sleep(0.1)
def fancy_outtro(self):
"""
Nothing important...
"""
logo = [" ________________",
" .' '.",
" / ____ _ \\",
"| | __ ) _ _ ___| | |",
"| | _ \| | | |/ _ \ | |",
"| | |_) | |_| | __/_| |",
"| |____/ \__, |\___(_) |",
" \\ |___/ /",
" '.________________.'"]
for line in logo:
print(line)
time.sleep(0.1)
def command_help(self, args):
if args:
if args[0] in self._commands:
print(self._commands[args[0]]["longhelp"])
else:
print("No help found for {}. You might want to check 'help'.".format(args[0]))
else:
print("Use 'help <command>' for a more detailed help text.\n")
for command, info in sorted(self._commands.items()):
print(" {} - {}".format(command.ljust(10), info["shorthelp"]))
def command_quit(self, args):
return True
def command_add(self, args):
if not args:
print("No names found, check the 'help subadd' or just enter some names...")
return
for arg in args:
self.subs.add(arg)
def command_remove(self, args):
if not args:
print("No names found, check the 'help subremove' or just enter some names...")
return
for arg in args:
self.subs.remove(arg)
def command_count(self, args):
print("You currently have {} subscribers.".format(self.subs.count()))
print("\\(^o^)/")
def command_comment(self, args):
try:
image_id = args[0]
except IndexError:
print("Image ID missing. Maybe check the 'help comment'?")
return
comments = self.subs.to_comments()
print("{} subscribers in {} comments.".format(self.subs.count(), len(comments)))
top_comment = input("Top-level comment: ").strip()
if not top_comment:
print("Comment can't be empty.")
return
if len(top_comment) > 140:
print("Too many characters (>140), aborting!")
return
print("\nYou entered the following:")
print("Image ID:", repr(image_id))
print("Top-level comment:", repr(top_comment))
if input("Do you want to continue? [Y/n] ").lower() != "y":
return
# use tree of comments to lower the lag on mobile
comment_count = len(comments)
print("\nBuilding tree")
tree = self.build_comment_tree(comments)
print("Posting top-level comment")
root_comment = self.client.client.post_comment(image_id, top_comment)
print("Posting rest of comments")
print("This may take a few hours.")
print("The number of branches per node can be adjusted in the config file.")
self.post_comment_tree(image_id, tree, root_comment["id"], comment_count)
# old comment posting code
"""
print("\nPosting top-level comment")
root_comment = self.client.client.post_comment(image_id, top_comment)
for index, comment in enumerate(comments):
print("Posting comment {} of {}".format(index+1, len(comments)))
while(True):
time.sleep(self.config.getint("misc", "delay"))
try:
self.client.client.post_comment_reply(root_comment["id"], image_id, comment)
except imgurpython.helpers.error.ImgurClientError:
print("An error occurred while sending this comment. Retrying...")
except imgurpython.helpers.error.ImgurClientRateLimitError:
print("Rate limit hit. Retrying...")
except requests.exceptions.ConnectionError:
delay = self.config.getint("misc", "retry_delay")
print("Connection problems, retrying in {}s...".format(delay))
time.sleep(delay)
else:
break
"""
def traverse_level(self, tree, level):
if level == 0:
yield from tree.values()
else:
for _, branch in tree.items():
yield from self.traverse_level(branch, level - 1)
def build_comment_tree(self, comments):
tree = {"root":{}}
level = 0
while True:
for branch in self.traverse_level(tree, level):
for i in range(self.config.getint("misc", "branches_per_node")):
if comments:
branch[comments.pop()] = {}
else:
return tree["root"]
level += 1
def post_comment_tree(self, image_id, tree, root_comment_id, comment_count):
for comment, branch in tree.items():
time.sleep(self.config.getint("misc", "delay"))
while(True):
try:
comment_id = self.client.client.post_comment_reply(root_comment_id, image_id, comment)["id"]
except imgurpython.helpers.error.ImgurClientError as e:
print("An error occurred while sending this comment ({}: {}). Retrying...".format(e.status_code, e.error_message))
except imgurpython.helpers.error.ImgurClientRateLimitError:
print("Rate limit hit. Retrying...")
except requests.exceptions.ConnectionError:
print("Connection problems. Retrying...")
else:
time_per_comment = self.config.getint("misc", "delay") + 1
delta = datetime.timedelta(seconds=time_per_comment*comment_count)
print("{} comments left; estimated time: {}".format(comment_count, delta))
comment_count -= 1
break
time.sleep(self.config.getint("misc", "retry_delay")) # something went wrong, so we wait...
comment_count = self.post_comment_tree(image_id, branch, comment_id, comment_count)
return comment_count
def command_scan(self, args):
for album in self.albums.albums:
print("Scanning album {}...".format(album))
try:
comments = self.client.client.gallery_item_comments(album, sort="new")
except imgurpython.helpers.error.ImgurClientError:
print("Error while loading comments. You might want to double-check your albums file.")
else:
for comment in self.flatten_comments(comments):
if comment.author_id != self.client.account.id \
and not self.albums.in_album(album, comment.id) \
and self.subs.check_comment(comment.author, comment.comment, comment.datetime):
self.albums.add_comment(album, comment.id)
def command_reg(self, args):
if not args:
print("Album IDs missing. Maybe check the 'help reg'?")
for album in args:
self.albums.add(album)
def command_dereg(self, args):
if not args:
print("Album IDs missing. Maybe check the 'help dereg'?")
for album in args:
self.albums.remove(album)
def flatten_comments(self, comments):
for comment in comments:
yield comment
if comment.children:
yield from self.flatten_comments(comment.children)
def parse_command(self, inputstr):
"""
parse_command(inputstring) -> command, [args]
In case command parsing will need to be improved in the future.
"""
args = inputstr.split(" ")
args = [arg for arg in args if arg] # remove empty strings
if not args: # no command found
return "", []
command = args[0]
args = args[1:]
return command, args
def prompt_command(self):
"""
prompt_command() -> exit
Takes a command and calls the respective functions.
Returns True if user exited.
"""
inputstr = input("\n>>> ")
command, args = self.parse_command(inputstr)
if not command:
return
if command in self._commands:
return self._commands[command]["function"](args)
else:
print("Invalid command. Type 'help' for a list of available commands.")
def interactive(self):
"""
Start the interactive mode (entering commands)
"""
self.fancy_intro()
print("\nWelcome to TITsBot v.2 *dial-up noises in background*")
print("('help' for a list of commands)")
try:
while(True):
if self.prompt_command():
break
except (EOFError, KeyboardInterrupt):
print("")
if self.config.modified:
print("Saving config.")
with open(self.configfile, "w") as f:
self.config.write(f)
if self.subs.modified:
print("Saving subs.")
self.subs.save()
if self.albums.modified:
print("Saving albums.")
self.albums.save()
self.fancy_outtro()
print("\nGoodbye! *beeping noise, then bluescreen*")
if __name__ == "__main__":
bot = ITBot(CONFIGFILE)
bot.interactive()
| mit | -8,835,114,608,182,748,000 | 29.52782 | 119 | 0.619231 | false |
amitsela/incubator-beam | sdks/python/apache_beam/pipeline.py | 3 | 21582 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pipeline, the top-level Dataflow object.
A pipeline holds a DAG of data transforms. Conceptually the nodes of the DAG
are transforms (PTransform objects) and the edges are values (mostly PCollection
objects). The transforms take as inputs one or more PValues and output one or
more PValues.
The pipeline offers functionality to traverse the graph. The actual operation
to be executed for each node visited is specified through a runner object.
Typical usage:
# Create a pipeline object using a local runner for execution.
p = beam.Pipeline('DirectRunner')
# Add to the pipeline a "Create" transform. When executed this
# transform will produce a PCollection object with the specified values.
pcoll = p | 'create' >> beam.Create([1, 2, 3])
# Another transform could be applied to pcoll, e.g., writing to a text file.
# For other transforms, refer to transforms/ directory.
pcoll | 'write' >> beam.io.WriteToText('./output')
# run() will execute the DAG stored in the pipeline. The execution of the
# nodes visited is done using the specified local runner.
p.run()
"""
from __future__ import absolute_import
import collections
import logging
import os
import shutil
import tempfile
from google.protobuf import wrappers_pb2
from apache_beam import pvalue
from apache_beam import typehints
from apache_beam.internal import pickler
from apache_beam.runners import create_runner
from apache_beam.runners import PipelineRunner
from apache_beam.transforms import ptransform
from apache_beam.typehints import TypeCheckError
from apache_beam.utils import proto_utils
from apache_beam.utils import urns
from apache_beam.utils.pipeline_options import PipelineOptions
from apache_beam.utils.pipeline_options import SetupOptions
from apache_beam.utils.pipeline_options import StandardOptions
from apache_beam.utils.pipeline_options import TypeOptions
from apache_beam.utils.pipeline_options_validator import PipelineOptionsValidator
class Pipeline(object):
"""A pipeline object that manages a DAG of PValues and their PTransforms.
Conceptually the PValues are the DAG's nodes and the PTransforms computing
the PValues are the edges.
All the transforms applied to the pipeline must have distinct full labels.
If same transform instance needs to be applied then a clone should be created
with a new label (e.g., transform.clone('new label')).
"""
def __init__(self, runner=None, options=None, argv=None):
"""Initialize a pipeline object.
Args:
runner: An object of type 'PipelineRunner' that will be used to execute
the pipeline. For registered runners, the runner name can be specified,
otherwise a runner object must be supplied.
options: A configured 'PipelineOptions' object containing arguments
that should be used for running the Dataflow job.
argv: a list of arguments (such as sys.argv) to be used for building a
'PipelineOptions' object. This will only be used if argument 'options'
is None.
Raises:
ValueError: if either the runner or options argument is not of the
expected type.
"""
if options is not None:
if isinstance(options, PipelineOptions):
self.options = options
else:
raise ValueError(
'Parameter options, if specified, must be of type PipelineOptions. '
'Received : %r', options)
elif argv is not None:
if isinstance(argv, list):
self.options = PipelineOptions(argv)
else:
raise ValueError(
'Parameter argv, if specified, must be a list. Received : %r', argv)
else:
self.options = PipelineOptions([])
if runner is None:
runner = self.options.view_as(StandardOptions).runner
if runner is None:
runner = StandardOptions.DEFAULT_RUNNER
logging.info(('Missing pipeline option (runner). Executing pipeline '
'using the default runner: %s.'), runner)
if isinstance(runner, str):
runner = create_runner(runner)
elif not isinstance(runner, PipelineRunner):
raise TypeError('Runner must be a PipelineRunner object or the '
'name of a registered runner.')
# Validate pipeline options
errors = PipelineOptionsValidator(self.options, runner).validate()
if errors:
raise ValueError(
'Pipeline has validations errors: \n' + '\n'.join(errors))
# Default runner to be used.
self.runner = runner
# Stack of transforms generated by nested apply() calls. The stack will
# contain a root node as an enclosing (parent) node for top transforms.
self.transforms_stack = [AppliedPTransform(None, None, '', None)]
# Set of transform labels (full labels) applied to the pipeline.
# If a transform is applied and the full label is already in the set
# then the transform will have to be cloned with a new label.
self.applied_labels = set()
def _current_transform(self):
"""Returns the transform currently on the top of the stack."""
return self.transforms_stack[-1]
def _root_transform(self):
"""Returns the root transform of the transform stack."""
return self.transforms_stack[0]
def run(self, test_runner_api=True):
"""Runs the pipeline. Returns whatever our runner returns after running."""
# When possible, invoke a round trip through the runner API.
if test_runner_api and self._verify_runner_api_compatible():
return Pipeline.from_runner_api(
self.to_runner_api(), self.runner, self.options).run(False)
if self.options.view_as(SetupOptions).save_main_session:
# If this option is chosen, verify we can pickle the main session early.
tmpdir = tempfile.mkdtemp()
try:
pickler.dump_session(os.path.join(tmpdir, 'main_session.pickle'))
finally:
shutil.rmtree(tmpdir)
return self.runner.run(self)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if not exc_type:
self.run().wait_until_finish()
def visit(self, visitor):
"""Visits depth-first every node of a pipeline's DAG.
Args:
visitor: PipelineVisitor object whose callbacks will be called for each
node visited. See PipelineVisitor comments.
Raises:
TypeError: if node is specified and is not a PValue.
pipeline.PipelineError: if node is specified and does not belong to this
pipeline instance.
"""
visited = set()
self._root_transform().visit(visitor, self, visited)
def apply(self, transform, pvalueish=None, label=None):
"""Applies a custom transform using the pvalueish specified.
Args:
transform: the PTranform to apply.
pvalueish: the input for the PTransform (typically a PCollection).
label: label of the PTransform.
Raises:
TypeError: if the transform object extracted from the argument list is
not a PTransform.
RuntimeError: if the transform object was already applied to this pipeline
and needs to be cloned in order to apply again.
"""
if isinstance(transform, ptransform._NamedPTransform):
return self.apply(transform.transform, pvalueish,
label or transform.label)
if not isinstance(transform, ptransform.PTransform):
raise TypeError("Expected a PTransform object, got %s" % transform)
if label:
# Fix self.label as it is inspected by some PTransform operations
# (e.g. to produce error messages for type hint violations).
try:
old_label, transform.label = transform.label, label
return self.apply(transform, pvalueish)
finally:
transform.label = old_label
full_label = '/'.join([self._current_transform().full_label,
label or transform.label]).lstrip('/')
if full_label in self.applied_labels:
raise RuntimeError(
'Transform "%s" does not have a stable unique label. '
'This will prevent updating of pipelines. '
'To apply a transform with a specified label write '
'pvalue | "label" >> transform'
% full_label)
self.applied_labels.add(full_label)
pvalueish, inputs = transform._extract_input_pvalues(pvalueish)
try:
inputs = tuple(inputs)
for leaf_input in inputs:
if not isinstance(leaf_input, pvalue.PValue):
raise TypeError
except TypeError:
raise NotImplementedError(
'Unable to extract PValue inputs from %s; either %s does not accept '
'inputs of this format, or it does not properly override '
'_extract_input_pvalues' % (pvalueish, transform))
current = AppliedPTransform(
self._current_transform(), transform, full_label, inputs)
self._current_transform().add_part(current)
self.transforms_stack.append(current)
type_options = self.options.view_as(TypeOptions)
if type_options.pipeline_type_check:
transform.type_check_inputs(pvalueish)
pvalueish_result = self.runner.apply(transform, pvalueish)
if type_options is not None and type_options.pipeline_type_check:
transform.type_check_outputs(pvalueish_result)
for result in ptransform.GetPValues().visit(pvalueish_result):
assert isinstance(result, (pvalue.PValue, pvalue.DoOutputsTuple))
# Make sure we set the producer only for a leaf node in the transform DAG.
# This way we preserve the last transform of a composite transform as
# being the real producer of the result.
if result.producer is None:
result.producer = current
# TODO(robertwb): Multi-input, multi-output inference.
# TODO(robertwb): Ideally we'd do intersection here.
if (type_options is not None and type_options.pipeline_type_check
and isinstance(result, pvalue.PCollection)
and not result.element_type):
input_element_type = (
inputs[0].element_type
if len(inputs) == 1
else typehints.Any)
type_hints = transform.get_type_hints()
declared_output_type = type_hints.simple_output_type(transform.label)
if declared_output_type:
input_types = type_hints.input_types
if input_types and input_types[0]:
declared_input_type = input_types[0][0]
result.element_type = typehints.bind_type_variables(
declared_output_type,
typehints.match_type_variables(declared_input_type,
input_element_type))
else:
result.element_type = declared_output_type
else:
result.element_type = transform.infer_output_type(input_element_type)
assert isinstance(result.producer.inputs, tuple)
current.add_output(result)
if (type_options is not None and
type_options.type_check_strictness == 'ALL_REQUIRED' and
transform.get_type_hints().output_types is None):
ptransform_name = '%s(%s)' % (transform.__class__.__name__, full_label)
raise TypeCheckError('Pipeline type checking is enabled, however no '
'output type-hint was found for the '
'PTransform %s' % ptransform_name)
current.update_input_refcounts()
self.transforms_stack.pop()
return pvalueish_result
def _verify_runner_api_compatible(self):
class Visitor(PipelineVisitor): # pylint: disable=used-before-assignment
ok = True # Really a nonlocal.
def visit_transform(self, transform_node):
if transform_node.side_inputs:
# No side inputs (yet).
Visitor.ok = False
try:
# Transforms must be picklable.
pickler.loads(pickler.dumps(transform_node.transform))
except Exception:
Visitor.ok = False
def visit_value(self, value, _):
if isinstance(value, pvalue.PDone):
Visitor.ok = False
self.visit(Visitor())
return Visitor.ok
def to_runner_api(self):
from apache_beam.runners import pipeline_context
from apache_beam.runners.api import beam_runner_api_pb2
context = pipeline_context.PipelineContext()
# Mutates context; placing inline would force dependence on
# argument evaluation order.
root_transform_id = context.transforms.get_id(self._root_transform())
proto = beam_runner_api_pb2.Pipeline(
root_transform_id=root_transform_id,
components=context.to_runner_api())
return proto
@staticmethod
def from_runner_api(proto, runner, options):
p = Pipeline(runner=runner, options=options)
from apache_beam.runners import pipeline_context
context = pipeline_context.PipelineContext(proto.components)
p.transforms_stack = [
context.transforms.get_by_id(proto.root_transform_id)]
# TODO(robertwb): These are only needed to continue construction. Omit?
p.applied_labels = set([
t.unique_name for t in proto.components.transforms.values()])
for id in proto.components.pcollections:
context.pcollections.get_by_id(id).pipeline = p
return p
class PipelineVisitor(object):
"""Visitor pattern class used to traverse a DAG of transforms.
This is an internal class used for bookkeeping by a Pipeline.
"""
def visit_value(self, value, producer_node):
"""Callback for visiting a PValue in the pipeline DAG.
Args:
value: PValue visited (typically a PCollection instance).
producer_node: AppliedPTransform object whose transform produced the
pvalue.
"""
pass
def visit_transform(self, transform_node):
"""Callback for visiting a transform node in the pipeline DAG."""
pass
def enter_composite_transform(self, transform_node):
"""Callback for entering traversal of a composite transform node."""
pass
def leave_composite_transform(self, transform_node):
"""Callback for leaving traversal of a composite transform node."""
pass
class AppliedPTransform(object):
"""A transform node representing an instance of applying a PTransform.
This is an internal class used for bookkeeping by a Pipeline.
"""
def __init__(self, parent, transform, full_label, inputs):
self.parent = parent
self.transform = transform
# Note that we want the PipelineVisitor classes to use the full_label,
# inputs, side_inputs, and outputs fields from this instance instead of the
# ones of the PTransform instance associated with it. Doing this permits
# reusing PTransform instances in different contexts (apply() calls) without
# any interference. This is particularly useful for composite transforms.
self.full_label = full_label
self.inputs = inputs or ()
self.side_inputs = () if transform is None else tuple(transform.side_inputs)
self.outputs = {}
self.parts = []
# Per tag refcount dictionary for PValues for which this node is a
# root producer.
self.refcounts = collections.defaultdict(int)
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__, self.full_label,
type(self.transform).__name__)
def update_input_refcounts(self):
"""Increment refcounts for all transforms providing inputs."""
def real_producer(pv):
real = pv.producer
while real.parts:
real = real.parts[-1]
return real
if not self.is_composite():
for main_input in self.inputs:
if not isinstance(main_input, pvalue.PBegin):
real_producer(main_input).refcounts[main_input.tag] += 1
for side_input in self.side_inputs:
real_producer(side_input.pvalue).refcounts[side_input.pvalue.tag] += 1
def add_output(self, output, tag=None):
if isinstance(output, pvalue.DoOutputsTuple):
self.add_output(output[output._main_tag])
elif isinstance(output, pvalue.PValue):
# TODO(BEAM-1833): Require tags when calling this method.
if tag is None and None in self.outputs:
tag = len(self.outputs)
assert tag not in self.outputs
self.outputs[tag] = output
else:
raise TypeError("Unexpected output type: %s" % output)
def add_part(self, part):
assert isinstance(part, AppliedPTransform)
self.parts.append(part)
def is_composite(self):
"""Returns whether this is a composite transform.
A composite transform has parts (inner transforms) or isn't the
producer for any of its outputs. (An example of a transform that
is not a producer is one that returns its inputs instead.)
"""
return bool(self.parts) or all(
pval.producer is not self for pval in self.outputs.values())
def visit(self, visitor, pipeline, visited):
"""Visits all nodes reachable from the current node."""
for pval in self.inputs:
if pval not in visited and not isinstance(pval, pvalue.PBegin):
assert pval.producer is not None
pval.producer.visit(visitor, pipeline, visited)
# The value should be visited now since we visit outputs too.
assert pval in visited, pval
# Visit side inputs.
for pval in self.side_inputs:
if isinstance(pval, pvalue.AsSideInput) and pval.pvalue not in visited:
pval = pval.pvalue # Unpack marker-object-wrapped pvalue.
assert pval.producer is not None
pval.producer.visit(visitor, pipeline, visited)
# The value should be visited now since we visit outputs too.
assert pval in visited
# TODO(silviuc): Is there a way to signal that we are visiting a side
# value? The issue is that the same PValue can be reachable through
# multiple paths and therefore it is not guaranteed that the value
# will be visited as a side value.
# Visit a composite or primitive transform.
if self.is_composite():
visitor.enter_composite_transform(self)
for part in self.parts:
part.visit(visitor, pipeline, visited)
visitor.leave_composite_transform(self)
else:
visitor.visit_transform(self)
# Visit the outputs (one or more). It is essential to mark as visited the
# tagged PCollections of the DoOutputsTuple object. A tagged PCollection is
# connected directly with its producer (a multi-output ParDo), but the
# output of such a transform is the containing DoOutputsTuple, not the
# PCollection inside it. Without the code below a tagged PCollection will
# not be marked as visited while visiting its producer.
for pval in self.outputs.values():
if isinstance(pval, pvalue.DoOutputsTuple):
pvals = (v for v in pval)
else:
pvals = (pval,)
for v in pvals:
if v not in visited:
visited.add(v)
visitor.visit_value(v, self)
def named_inputs(self):
# TODO(BEAM-1833): Push names up into the sdk construction.
return {str(ix): input for ix, input in enumerate(self.inputs)
if isinstance(input, pvalue.PCollection)}
def named_outputs(self):
return {str(tag): output for tag, output in self.outputs.items()
if isinstance(output, pvalue.PCollection)}
def to_runner_api(self, context):
from apache_beam.runners.api import beam_runner_api_pb2
return beam_runner_api_pb2.PTransform(
unique_name=self.full_label,
spec=beam_runner_api_pb2.FunctionSpec(
urn=urns.PICKLED_TRANSFORM,
parameter=proto_utils.pack_Any(
wrappers_pb2.BytesValue(value=pickler.dumps(self.transform)))),
subtransforms=[context.transforms.get_id(part) for part in self.parts],
# TODO(BEAM-115): Side inputs.
inputs={tag: context.pcollections.get_id(pc)
for tag, pc in self.named_inputs().items()},
outputs={str(tag): context.pcollections.get_id(out)
for tag, out in self.named_outputs().items()},
# TODO(BEAM-115): display_data
display_data=None)
@staticmethod
def from_runner_api(proto, context):
result = AppliedPTransform(
parent=None,
transform=pickler.loads(
proto_utils.unpack_Any(proto.spec.parameter,
wrappers_pb2.BytesValue).value),
full_label=proto.unique_name,
inputs=[
context.pcollections.get_by_id(id) for id in proto.inputs.values()])
result.parts = [
context.transforms.get_by_id(id) for id in proto.subtransforms]
result.outputs = {
None if tag == 'None' else tag: context.pcollections.get_by_id(id)
for tag, id in proto.outputs.items()}
if not result.parts:
for tag, pc in result.outputs.items():
if pc not in result.inputs:
pc.producer = result
pc.tag = tag
result.update_input_refcounts()
return result
| apache-2.0 | -1,065,219,102,471,002,500 | 38.24 | 81 | 0.68066 | false |
aarchiba/numpy | numpy/ma/tests/test_extras.py | 41 | 44785 | # pylint: disable-msg=W0611, W0612, W0511
"""Tests suite for MaskedArray.
Adapted from the original test_ma by Pierre Gerard-Marchant
:author: Pierre Gerard-Marchant
:contact: pierregm_at_uga_dot_edu
:version: $Id: test_extras.py 3473 2007-10-29 15:18:13Z jarrod.millman $
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
from numpy.testing import (
TestCase, run_module_suite, assert_warns, clear_and_catch_warnings
)
from numpy.ma.testutils import (
assert_, assert_array_equal, assert_equal, assert_almost_equal
)
from numpy.ma.core import (
array, arange, masked, MaskedArray, masked_array, getmaskarray, shape,
nomask, ones, zeros, count
)
from numpy.ma.extras import (
atleast_2d, mr_, dot, polyfit, cov, corrcoef, median, average, unique,
setxor1d, setdiff1d, union1d, intersect1d, in1d, ediff1d,
apply_over_axes, apply_along_axis, compress_nd, compress_rowcols,
mask_rowcols, clump_masked, clump_unmasked, flatnotmasked_contiguous,
notmasked_contiguous, notmasked_edges, masked_all, masked_all_like
)
import numpy.ma.extras as mae
class TestGeneric(TestCase):
#
def test_masked_all(self):
# Tests masked_all
# Standard dtype
test = masked_all((2,), dtype=float)
control = array([1, 1], mask=[1, 1], dtype=float)
assert_equal(test, control)
# Flexible dtype
dt = np.dtype({'names': ['a', 'b'], 'formats': ['f', 'f']})
test = masked_all((2,), dtype=dt)
control = array([(0, 0), (0, 0)], mask=[(1, 1), (1, 1)], dtype=dt)
assert_equal(test, control)
test = masked_all((2, 2), dtype=dt)
control = array([[(0, 0), (0, 0)], [(0, 0), (0, 0)]],
mask=[[(1, 1), (1, 1)], [(1, 1), (1, 1)]],
dtype=dt)
assert_equal(test, control)
# Nested dtype
dt = np.dtype([('a', 'f'), ('b', [('ba', 'f'), ('bb', 'f')])])
test = masked_all((2,), dtype=dt)
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
assert_equal(test, control)
test = masked_all((2,), dtype=dt)
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
assert_equal(test, control)
test = masked_all((1, 1), dtype=dt)
control = array([[(1, (1, 1))]], mask=[[(1, (1, 1))]], dtype=dt)
assert_equal(test, control)
def test_masked_all_like(self):
# Tests masked_all
# Standard dtype
base = array([1, 2], dtype=float)
test = masked_all_like(base)
control = array([1, 1], mask=[1, 1], dtype=float)
assert_equal(test, control)
# Flexible dtype
dt = np.dtype({'names': ['a', 'b'], 'formats': ['f', 'f']})
base = array([(0, 0), (0, 0)], mask=[(1, 1), (1, 1)], dtype=dt)
test = masked_all_like(base)
control = array([(10, 10), (10, 10)], mask=[(1, 1), (1, 1)], dtype=dt)
assert_equal(test, control)
# Nested dtype
dt = np.dtype([('a', 'f'), ('b', [('ba', 'f'), ('bb', 'f')])])
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
test = masked_all_like(control)
assert_equal(test, control)
def test_clump_masked(self):
# Test clump_masked
a = masked_array(np.arange(10))
a[[0, 1, 2, 6, 8, 9]] = masked
#
test = clump_masked(a)
control = [slice(0, 3), slice(6, 7), slice(8, 10)]
assert_equal(test, control)
def test_clump_unmasked(self):
# Test clump_unmasked
a = masked_array(np.arange(10))
a[[0, 1, 2, 6, 8, 9]] = masked
test = clump_unmasked(a)
control = [slice(3, 6), slice(7, 8), ]
assert_equal(test, control)
def test_flatnotmasked_contiguous(self):
# Test flatnotmasked_contiguous
a = arange(10)
# No mask
test = flatnotmasked_contiguous(a)
assert_equal(test, slice(0, a.size))
# Some mask
a[(a < 3) | (a > 8) | (a == 5)] = masked
test = flatnotmasked_contiguous(a)
assert_equal(test, [slice(3, 5), slice(6, 9)])
#
a[:] = masked
test = flatnotmasked_contiguous(a)
assert_equal(test, None)
class TestAverage(TestCase):
# Several tests of average. Why so many ? Good point...
def test_testAverage1(self):
# Test of average.
ott = array([0., 1., 2., 3.], mask=[True, False, False, False])
assert_equal(2.0, average(ott, axis=0))
assert_equal(2.0, average(ott, weights=[1., 1., 2., 1.]))
result, wts = average(ott, weights=[1., 1., 2., 1.], returned=1)
assert_equal(2.0, result)
self.assertTrue(wts == 4.0)
ott[:] = masked
assert_equal(average(ott, axis=0).mask, [True])
ott = array([0., 1., 2., 3.], mask=[True, False, False, False])
ott = ott.reshape(2, 2)
ott[:, 1] = masked
assert_equal(average(ott, axis=0), [2.0, 0.0])
assert_equal(average(ott, axis=1).mask[0], [True])
assert_equal([2., 0.], average(ott, axis=0))
result, wts = average(ott, axis=0, returned=1)
assert_equal(wts, [1., 0.])
def test_testAverage2(self):
# More tests of average.
w1 = [0, 1, 1, 1, 1, 0]
w2 = [[0, 1, 1, 1, 1, 0], [1, 0, 0, 0, 0, 1]]
x = arange(6, dtype=np.float_)
assert_equal(average(x, axis=0), 2.5)
assert_equal(average(x, axis=0, weights=w1), 2.5)
y = array([arange(6, dtype=np.float_), 2.0 * arange(6)])
assert_equal(average(y, None), np.add.reduce(np.arange(6)) * 3. / 12.)
assert_equal(average(y, axis=0), np.arange(6) * 3. / 2.)
assert_equal(average(y, axis=1),
[average(x, axis=0), average(x, axis=0) * 2.0])
assert_equal(average(y, None, weights=w2), 20. / 6.)
assert_equal(average(y, axis=0, weights=w2),
[0., 1., 2., 3., 4., 10.])
assert_equal(average(y, axis=1),
[average(x, axis=0), average(x, axis=0) * 2.0])
m1 = zeros(6)
m2 = [0, 0, 1, 1, 0, 0]
m3 = [[0, 0, 1, 1, 0, 0], [0, 1, 1, 1, 1, 0]]
m4 = ones(6)
m5 = [0, 1, 1, 1, 1, 1]
assert_equal(average(masked_array(x, m1), axis=0), 2.5)
assert_equal(average(masked_array(x, m2), axis=0), 2.5)
assert_equal(average(masked_array(x, m4), axis=0).mask, [True])
assert_equal(average(masked_array(x, m5), axis=0), 0.0)
assert_equal(count(average(masked_array(x, m4), axis=0)), 0)
z = masked_array(y, m3)
assert_equal(average(z, None), 20. / 6.)
assert_equal(average(z, axis=0), [0., 1., 99., 99., 4.0, 7.5])
assert_equal(average(z, axis=1), [2.5, 5.0])
assert_equal(average(z, axis=0, weights=w2),
[0., 1., 99., 99., 4.0, 10.0])
def test_testAverage3(self):
# Yet more tests of average!
a = arange(6)
b = arange(6) * 3
r1, w1 = average([[a, b], [b, a]], axis=1, returned=1)
assert_equal(shape(r1), shape(w1))
assert_equal(r1.shape, w1.shape)
r2, w2 = average(ones((2, 2, 3)), axis=0, weights=[3, 1], returned=1)
assert_equal(shape(w2), shape(r2))
r2, w2 = average(ones((2, 2, 3)), returned=1)
assert_equal(shape(w2), shape(r2))
r2, w2 = average(ones((2, 2, 3)), weights=ones((2, 2, 3)), returned=1)
assert_equal(shape(w2), shape(r2))
a2d = array([[1, 2], [0, 4]], float)
a2dm = masked_array(a2d, [[False, False], [True, False]])
a2da = average(a2d, axis=0)
assert_equal(a2da, [0.5, 3.0])
a2dma = average(a2dm, axis=0)
assert_equal(a2dma, [1.0, 3.0])
a2dma = average(a2dm, axis=None)
assert_equal(a2dma, 7. / 3.)
a2dma = average(a2dm, axis=1)
assert_equal(a2dma, [1.5, 4.0])
def test_onintegers_with_mask(self):
# Test average on integers with mask
a = average(array([1, 2]))
assert_equal(a, 1.5)
a = average(array([1, 2, 3, 4], mask=[False, False, True, True]))
assert_equal(a, 1.5)
def test_complex(self):
# Test with complex data.
# (Regression test for https://github.com/numpy/numpy/issues/2684)
mask = np.array([[0, 0, 0, 1, 0],
[0, 1, 0, 0, 0]], dtype=bool)
a = masked_array([[0, 1+2j, 3+4j, 5+6j, 7+8j],
[9j, 0+1j, 2+3j, 4+5j, 7+7j]],
mask=mask)
av = average(a)
expected = np.average(a.compressed())
assert_almost_equal(av.real, expected.real)
assert_almost_equal(av.imag, expected.imag)
av0 = average(a, axis=0)
expected0 = average(a.real, axis=0) + average(a.imag, axis=0)*1j
assert_almost_equal(av0.real, expected0.real)
assert_almost_equal(av0.imag, expected0.imag)
av1 = average(a, axis=1)
expected1 = average(a.real, axis=1) + average(a.imag, axis=1)*1j
assert_almost_equal(av1.real, expected1.real)
assert_almost_equal(av1.imag, expected1.imag)
# Test with the 'weights' argument.
wts = np.array([[0.5, 1.0, 2.0, 1.0, 0.5],
[1.0, 1.0, 1.0, 1.0, 1.0]])
wav = average(a, weights=wts)
expected = np.average(a.compressed(), weights=wts[~mask])
assert_almost_equal(wav.real, expected.real)
assert_almost_equal(wav.imag, expected.imag)
wav0 = average(a, weights=wts, axis=0)
expected0 = (average(a.real, weights=wts, axis=0) +
average(a.imag, weights=wts, axis=0)*1j)
assert_almost_equal(wav0.real, expected0.real)
assert_almost_equal(wav0.imag, expected0.imag)
wav1 = average(a, weights=wts, axis=1)
expected1 = (average(a.real, weights=wts, axis=1) +
average(a.imag, weights=wts, axis=1)*1j)
assert_almost_equal(wav1.real, expected1.real)
assert_almost_equal(wav1.imag, expected1.imag)
class TestConcatenator(TestCase):
# Tests for mr_, the equivalent of r_ for masked arrays.
def test_1d(self):
# Tests mr_ on 1D arrays.
assert_array_equal(mr_[1, 2, 3, 4, 5, 6], array([1, 2, 3, 4, 5, 6]))
b = ones(5)
m = [1, 0, 0, 0, 0]
d = masked_array(b, mask=m)
c = mr_[d, 0, 0, d]
self.assertTrue(isinstance(c, MaskedArray))
assert_array_equal(c, [1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1])
assert_array_equal(c.mask, mr_[m, 0, 0, m])
def test_2d(self):
# Tests mr_ on 2D arrays.
a_1 = np.random.rand(5, 5)
a_2 = np.random.rand(5, 5)
m_1 = np.round_(np.random.rand(5, 5), 0)
m_2 = np.round_(np.random.rand(5, 5), 0)
b_1 = masked_array(a_1, mask=m_1)
b_2 = masked_array(a_2, mask=m_2)
# append columns
d = mr_['1', b_1, b_2]
self.assertTrue(d.shape == (5, 10))
assert_array_equal(d[:, :5], b_1)
assert_array_equal(d[:, 5:], b_2)
assert_array_equal(d.mask, np.r_['1', m_1, m_2])
d = mr_[b_1, b_2]
self.assertTrue(d.shape == (10, 5))
assert_array_equal(d[:5,:], b_1)
assert_array_equal(d[5:,:], b_2)
assert_array_equal(d.mask, np.r_[m_1, m_2])
class TestNotMasked(TestCase):
# Tests notmasked_edges and notmasked_contiguous.
def test_edges(self):
# Tests unmasked_edges
data = masked_array(np.arange(25).reshape(5, 5),
mask=[[0, 0, 1, 0, 0],
[0, 0, 0, 1, 1],
[1, 1, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 0, 0]],)
test = notmasked_edges(data, None)
assert_equal(test, [0, 24])
test = notmasked_edges(data, 0)
assert_equal(test[0], [(0, 0, 1, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(3, 3, 3, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data, 1)
assert_equal(test[0], [(0, 1, 2, 3, 4), (0, 0, 2, 0, 3)])
assert_equal(test[1], [(0, 1, 2, 3, 4), (4, 2, 4, 4, 4)])
#
test = notmasked_edges(data.data, None)
assert_equal(test, [0, 24])
test = notmasked_edges(data.data, 0)
assert_equal(test[0], [(0, 0, 0, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(4, 4, 4, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data.data, -1)
assert_equal(test[0], [(0, 1, 2, 3, 4), (0, 0, 0, 0, 0)])
assert_equal(test[1], [(0, 1, 2, 3, 4), (4, 4, 4, 4, 4)])
#
data[-2] = masked
test = notmasked_edges(data, 0)
assert_equal(test[0], [(0, 0, 1, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(1, 1, 2, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data, -1)
assert_equal(test[0], [(0, 1, 2, 4), (0, 0, 2, 3)])
assert_equal(test[1], [(0, 1, 2, 4), (4, 2, 4, 4)])
def test_contiguous(self):
# Tests notmasked_contiguous
a = masked_array(np.arange(24).reshape(3, 8),
mask=[[0, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 0], ])
tmp = notmasked_contiguous(a, None)
assert_equal(tmp[-1], slice(23, 24, None))
assert_equal(tmp[-2], slice(16, 22, None))
assert_equal(tmp[-3], slice(0, 4, None))
#
tmp = notmasked_contiguous(a, 0)
self.assertTrue(len(tmp[-1]) == 1)
self.assertTrue(tmp[-2] is None)
assert_equal(tmp[-3], tmp[-1])
self.assertTrue(len(tmp[0]) == 2)
#
tmp = notmasked_contiguous(a, 1)
assert_equal(tmp[0][-1], slice(0, 4, None))
self.assertTrue(tmp[1] is None)
assert_equal(tmp[2][-1], slice(7, 8, None))
assert_equal(tmp[2][-2], slice(0, 6, None))
class TestCompressFunctions(TestCase):
def test_compress_nd(self):
# Tests compress_nd
x = np.array(list(range(3*4*5))).reshape(3, 4, 5)
m = np.zeros((3,4,5)).astype(bool)
m[1,1,1] = True
x = array(x, mask=m)
# axis=None
a = compress_nd(x)
assert_equal(a, [[[ 0, 2, 3, 4],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[40, 42, 43, 44],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
# axis=0
a = compress_nd(x, 0)
assert_equal(a, [[[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]],
[[40, 41, 42, 43, 44],
[45, 46, 47, 48, 49],
[50, 51, 52, 53, 54],
[55, 56, 57, 58, 59]]])
# axis=1
a = compress_nd(x, 1)
assert_equal(a, [[[ 0, 1, 2, 3, 4],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]],
[[20, 21, 22, 23, 24],
[30, 31, 32, 33, 34],
[35, 36, 37, 38, 39]],
[[40, 41, 42, 43, 44],
[50, 51, 52, 53, 54],
[55, 56, 57, 58, 59]]])
a2 = compress_nd(x, (1,))
a3 = compress_nd(x, -2)
a4 = compress_nd(x, (-2,))
assert_equal(a, a2)
assert_equal(a, a3)
assert_equal(a, a4)
# axis=2
a = compress_nd(x, 2)
assert_equal(a, [[[ 0, 2, 3, 4],
[ 5, 7, 8, 9],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[20, 22, 23, 24],
[25, 27, 28, 29],
[30, 32, 33, 34],
[35, 37, 38, 39]],
[[40, 42, 43, 44],
[45, 47, 48, 49],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
a2 = compress_nd(x, (2,))
a3 = compress_nd(x, -1)
a4 = compress_nd(x, (-1,))
assert_equal(a, a2)
assert_equal(a, a3)
assert_equal(a, a4)
# axis=(0, 1)
a = compress_nd(x, (0, 1))
assert_equal(a, [[[ 0, 1, 2, 3, 4],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]],
[[40, 41, 42, 43, 44],
[50, 51, 52, 53, 54],
[55, 56, 57, 58, 59]]])
a2 = compress_nd(x, (0, -2))
assert_equal(a, a2)
# axis=(1, 2)
a = compress_nd(x, (1, 2))
assert_equal(a, [[[ 0, 2, 3, 4],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[20, 22, 23, 24],
[30, 32, 33, 34],
[35, 37, 38, 39]],
[[40, 42, 43, 44],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
a2 = compress_nd(x, (-2, 2))
a3 = compress_nd(x, (1, -1))
a4 = compress_nd(x, (-2, -1))
assert_equal(a, a2)
assert_equal(a, a3)
assert_equal(a, a4)
# axis=(0, 2)
a = compress_nd(x, (0, 2))
assert_equal(a, [[[ 0, 2, 3, 4],
[ 5, 7, 8, 9],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[40, 42, 43, 44],
[45, 47, 48, 49],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
a2 = compress_nd(x, (0, -1))
assert_equal(a, a2)
def test_compress_rowcols(self):
# Tests compress_rowcols
x = array(np.arange(9).reshape(3, 3),
mask=[[1, 0, 0], [0, 0, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[4, 5], [7, 8]])
assert_equal(compress_rowcols(x, 0), [[3, 4, 5], [6, 7, 8]])
assert_equal(compress_rowcols(x, 1), [[1, 2], [4, 5], [7, 8]])
x = array(x._data, mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[0, 2], [6, 8]])
assert_equal(compress_rowcols(x, 0), [[0, 1, 2], [6, 7, 8]])
assert_equal(compress_rowcols(x, 1), [[0, 2], [3, 5], [6, 8]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[8]])
assert_equal(compress_rowcols(x, 0), [[6, 7, 8]])
assert_equal(compress_rowcols(x, 1,), [[2], [5], [8]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
assert_equal(compress_rowcols(x).size, 0)
assert_equal(compress_rowcols(x, 0).size, 0)
assert_equal(compress_rowcols(x, 1).size, 0)
def test_mask_rowcols(self):
# Tests mask_rowcols.
x = array(np.arange(9).reshape(3, 3),
mask=[[1, 0, 0], [0, 0, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask,
[[1, 1, 1], [1, 0, 0], [1, 0, 0]])
assert_equal(mask_rowcols(x, 0).mask,
[[1, 1, 1], [0, 0, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1).mask,
[[1, 0, 0], [1, 0, 0], [1, 0, 0]])
x = array(x._data, mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask,
[[0, 1, 0], [1, 1, 1], [0, 1, 0]])
assert_equal(mask_rowcols(x, 0).mask,
[[0, 0, 0], [1, 1, 1], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1).mask,
[[0, 1, 0], [0, 1, 0], [0, 1, 0]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask,
[[1, 1, 1], [1, 1, 1], [1, 1, 0]])
assert_equal(mask_rowcols(x, 0).mask,
[[1, 1, 1], [1, 1, 1], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1,).mask,
[[1, 1, 0], [1, 1, 0], [1, 1, 0]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.assertTrue(mask_rowcols(x).all() is masked)
self.assertTrue(mask_rowcols(x, 0).all() is masked)
self.assertTrue(mask_rowcols(x, 1).all() is masked)
self.assertTrue(mask_rowcols(x).mask.all())
self.assertTrue(mask_rowcols(x, 0).mask.all())
self.assertTrue(mask_rowcols(x, 1).mask.all())
def test_dot(self):
# Tests dot product
n = np.arange(1, 7)
#
m = [1, 0, 0, 0, 0, 0]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 1], [1, 0]])
c = dot(b, a, True)
assert_equal(c.mask, [[1, 1, 1], [1, 0, 0], [1, 0, 0]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
m = [0, 0, 0, 0, 0, 1]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[0, 1], [1, 1]])
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [0, 0, 1], [1, 1, 1]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
assert_equal(c, dot(a, b))
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
m = [0, 0, 0, 0, 0, 0]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b)
assert_equal(c.mask, nomask)
c = dot(b, a)
assert_equal(c.mask, nomask)
#
a = masked_array(n, mask=[1, 0, 0, 0, 0, 0]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 0, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 1], [0, 0]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[1, 0, 0], [1, 0, 0], [1, 0, 0]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
a = masked_array(n, mask=[0, 0, 0, 0, 0, 1]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 0, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[0, 0], [1, 1]])
c = dot(a, b)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [0, 0, 1], [0, 0, 1]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
a = masked_array(n, mask=[0, 0, 0, 0, 0, 1]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 1, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 0], [1, 1]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [1, 1, 1], [0, 0, 1]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
class TestApplyAlongAxis(TestCase):
# Tests 2D functions
def test_3d(self):
a = arange(12.).reshape(2, 2, 3)
def myfunc(b):
return b[1]
xa = apply_along_axis(myfunc, 2, a)
assert_equal(xa, [[1, 4], [7, 10]])
# Tests kwargs functions
def test_3d_kwargs(self):
a = arange(12).reshape(2, 2, 3)
def myfunc(b, offset=0):
return b[1+offset]
xa = apply_along_axis(myfunc, 2, a, offset=1)
assert_equal(xa, [[2, 5], [8, 11]])
class TestApplyOverAxes(TestCase):
# Tests apply_over_axes
def test_basic(self):
a = arange(24).reshape(2, 3, 4)
test = apply_over_axes(np.sum, a, [0, 2])
ctrl = np.array([[[60], [92], [124]]])
assert_equal(test, ctrl)
a[(a % 2).astype(np.bool)] = masked
test = apply_over_axes(np.sum, a, [0, 2])
ctrl = np.array([[[28], [44], [60]]])
assert_equal(test, ctrl)
class TestMedian(TestCase):
def test_pytype(self):
r = np.ma.median([[np.inf, np.inf], [np.inf, np.inf]], axis=-1)
assert_equal(r, np.inf)
def test_non_masked(self):
assert_equal(np.ma.median(np.arange(9)), 4.)
assert_equal(np.ma.median(range(9)), 4)
def test_2d(self):
# Tests median w/ 2D
(n, p) = (101, 30)
x = masked_array(np.linspace(-1., 1., n),)
x[:10] = x[-10:] = masked
z = masked_array(np.empty((n, p), dtype=float))
z[:, 0] = x[:]
idx = np.arange(len(x))
for i in range(1, p):
np.random.shuffle(idx)
z[:, i] = x[idx]
assert_equal(median(z[:, 0]), 0)
assert_equal(median(z), 0)
assert_equal(median(z, axis=0), np.zeros(p))
assert_equal(median(z.T, axis=1), np.zeros(p))
def test_2d_waxis(self):
# Tests median w/ 2D arrays and different axis.
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
assert_equal(median(x), 14.5)
assert_equal(median(x, axis=0), [13.5, 14.5, 15.5])
assert_equal(median(x, axis=1), [0, 0, 0, 10, 13, 16, 19, 0, 0, 0])
assert_equal(median(x, axis=1).mask, [1, 1, 1, 0, 0, 0, 0, 1, 1, 1])
def test_3d(self):
# Tests median w/ 3D
x = np.ma.arange(24).reshape(3, 4, 2)
x[x % 3 == 0] = masked
assert_equal(median(x, 0), [[12, 9], [6, 15], [12, 9], [18, 15]])
x.shape = (4, 3, 2)
assert_equal(median(x, 0), [[99, 10], [11, 99], [13, 14]])
x = np.ma.arange(24).reshape(4, 3, 2)
x[x % 5 == 0] = masked
assert_equal(median(x, 0), [[12, 10], [8, 9], [16, 17]])
def test_neg_axis(self):
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
assert_equal(median(x, axis=-1), median(x, axis=1))
def test_out(self):
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
out = masked_array(np.ones(10))
r = median(x, axis=1, out=out)
assert_equal(r, out)
assert_(type(r) == MaskedArray)
class TestCov(TestCase):
def setUp(self):
self.data = array(np.random.rand(12))
def test_1d_wo_missing(self):
# Test cov on 1D variable w/o missing values
x = self.data
assert_almost_equal(np.cov(x), cov(x))
assert_almost_equal(np.cov(x, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(x, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
def test_2d_wo_missing(self):
# Test cov on 1 2D variable w/o missing values
x = self.data.reshape(3, 4)
assert_almost_equal(np.cov(x), cov(x))
assert_almost_equal(np.cov(x, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(x, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
def test_1d_w_missing(self):
# Test cov 1 1D variable w/missing values
x = self.data
x[-1] = masked
x -= x.mean()
nx = x.compressed()
assert_almost_equal(np.cov(nx), cov(x))
assert_almost_equal(np.cov(nx, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(nx, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
#
try:
cov(x, allow_masked=False)
except ValueError:
pass
#
# 2 1D variables w/ missing values
nx = x[1:-1]
assert_almost_equal(np.cov(nx, nx[::-1]), cov(x, x[::-1]))
assert_almost_equal(np.cov(nx, nx[::-1], rowvar=False),
cov(x, x[::-1], rowvar=False))
assert_almost_equal(np.cov(nx, nx[::-1], rowvar=False, bias=True),
cov(x, x[::-1], rowvar=False, bias=True))
def test_2d_w_missing(self):
# Test cov on 2D variable w/ missing value
x = self.data
x[-1] = masked
x = x.reshape(3, 4)
valid = np.logical_not(getmaskarray(x)).astype(int)
frac = np.dot(valid, valid.T)
xf = (x - x.mean(1)[:, None]).filled(0)
assert_almost_equal(cov(x),
np.cov(xf) * (x.shape[1] - 1) / (frac - 1.))
assert_almost_equal(cov(x, bias=True),
np.cov(xf, bias=True) * x.shape[1] / frac)
frac = np.dot(valid.T, valid)
xf = (x - x.mean(0)).filled(0)
assert_almost_equal(cov(x, rowvar=False),
(np.cov(xf, rowvar=False) *
(x.shape[0] - 1) / (frac - 1.)))
assert_almost_equal(cov(x, rowvar=False, bias=True),
(np.cov(xf, rowvar=False, bias=True) *
x.shape[0] / frac))
class catch_warn_mae(clear_and_catch_warnings):
""" Context manager to catch, reset warnings in ma.extras module
"""
class_modules = (mae,)
class TestCorrcoef(TestCase):
def setUp(self):
self.data = array(np.random.rand(12))
self.data2 = array(np.random.rand(12))
def test_ddof(self):
# ddof raises DeprecationWarning
x, y = self.data, self.data2
expected = np.corrcoef(x)
expected2 = np.corrcoef(x, y)
with catch_warn_mae():
warnings.simplefilter("always")
assert_warns(DeprecationWarning, corrcoef, x, ddof=-1)
warnings.simplefilter("ignore")
# ddof has no or negligible effect on the function
assert_almost_equal(np.corrcoef(x, ddof=0), corrcoef(x, ddof=0))
assert_almost_equal(corrcoef(x, ddof=-1), expected)
assert_almost_equal(corrcoef(x, y, ddof=-1), expected2)
assert_almost_equal(corrcoef(x, ddof=3), expected)
assert_almost_equal(corrcoef(x, y, ddof=3), expected2)
def test_bias(self):
x, y = self.data, self.data2
expected = np.corrcoef(x)
# bias raises DeprecationWarning
with catch_warn_mae():
warnings.simplefilter("always")
assert_warns(DeprecationWarning, corrcoef, x, y, True, False)
assert_warns(DeprecationWarning, corrcoef, x, y, True, True)
assert_warns(DeprecationWarning, corrcoef, x, bias=False)
warnings.simplefilter("ignore")
# bias has no or negligible effect on the function
assert_almost_equal(corrcoef(x, bias=1), expected)
def test_1d_wo_missing(self):
# Test cov on 1D variable w/o missing values
x = self.data
assert_almost_equal(np.corrcoef(x), corrcoef(x))
assert_almost_equal(np.corrcoef(x, rowvar=False),
corrcoef(x, rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
assert_almost_equal(np.corrcoef(x, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
def test_2d_wo_missing(self):
# Test corrcoef on 1 2D variable w/o missing values
x = self.data.reshape(3, 4)
assert_almost_equal(np.corrcoef(x), corrcoef(x))
assert_almost_equal(np.corrcoef(x, rowvar=False),
corrcoef(x, rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
assert_almost_equal(np.corrcoef(x, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
def test_1d_w_missing(self):
# Test corrcoef 1 1D variable w/missing values
x = self.data
x[-1] = masked
x -= x.mean()
nx = x.compressed()
assert_almost_equal(np.corrcoef(nx), corrcoef(x))
assert_almost_equal(np.corrcoef(nx, rowvar=False),
corrcoef(x, rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
assert_almost_equal(np.corrcoef(nx, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
try:
corrcoef(x, allow_masked=False)
except ValueError:
pass
# 2 1D variables w/ missing values
nx = x[1:-1]
assert_almost_equal(np.corrcoef(nx, nx[::-1]), corrcoef(x, x[::-1]))
assert_almost_equal(np.corrcoef(nx, nx[::-1], rowvar=False),
corrcoef(x, x[::-1], rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
# ddof and bias have no or negligible effect on the function
assert_almost_equal(np.corrcoef(nx, nx[::-1]),
corrcoef(x, x[::-1], bias=1))
assert_almost_equal(np.corrcoef(nx, nx[::-1]),
corrcoef(x, x[::-1], ddof=2))
def test_2d_w_missing(self):
# Test corrcoef on 2D variable w/ missing value
x = self.data
x[-1] = masked
x = x.reshape(3, 4)
test = corrcoef(x)
control = np.corrcoef(x)
assert_almost_equal(test[:-1, :-1], control[:-1, :-1])
with catch_warn_mae():
warnings.simplefilter("ignore")
# ddof and bias have no or negligible effect on the function
assert_almost_equal(corrcoef(x, ddof=-2)[:-1, :-1],
control[:-1, :-1])
assert_almost_equal(corrcoef(x, ddof=3)[:-1, :-1],
control[:-1, :-1])
assert_almost_equal(corrcoef(x, bias=1)[:-1, :-1],
control[:-1, :-1])
class TestPolynomial(TestCase):
#
def test_polyfit(self):
# Tests polyfit
# On ndarrays
x = np.random.rand(10)
y = np.random.rand(20).reshape(-1, 2)
assert_almost_equal(polyfit(x, y, 3), np.polyfit(x, y, 3))
# ON 1D maskedarrays
x = x.view(MaskedArray)
x[0] = masked
y = y.view(MaskedArray)
y[0, 0] = y[-1, -1] = masked
#
(C, R, K, S, D) = polyfit(x, y[:, 0], 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:], y[1:, 0].compressed(), 3,
full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
(C, R, K, S, D) = polyfit(x, y[:, -1], 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1, -1], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
(C, R, K, S, D) = polyfit(x, y, 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1,:], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
w = np.random.rand(10) + 1
wo = w.copy()
xs = x[1:-1]
ys = y[1:-1]
ws = w[1:-1]
(C, R, K, S, D) = polyfit(x, y, 3, full=True, w=w)
(c, r, k, s, d) = np.polyfit(xs, ys, 3, full=True, w=ws)
assert_equal(w, wo)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
def test_polyfit_with_masked_NaNs(self):
x = np.random.rand(10)
y = np.random.rand(20).reshape(-1, 2)
x[0] = np.nan
y[-1,-1] = np.nan
x = x.view(MaskedArray)
y = y.view(MaskedArray)
x[0] = masked
y[-1,-1] = masked
(C, R, K, S, D) = polyfit(x, y, 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1,:], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
class TestArraySetOps(TestCase):
def test_unique_onlist(self):
# Test unique on list
data = [1, 1, 1, 2, 2, 3]
test = unique(data, return_index=True, return_inverse=True)
self.assertTrue(isinstance(test[0], MaskedArray))
assert_equal(test[0], masked_array([1, 2, 3], mask=[0, 0, 0]))
assert_equal(test[1], [0, 3, 5])
assert_equal(test[2], [0, 0, 0, 1, 1, 2])
def test_unique_onmaskedarray(self):
# Test unique on masked data w/use_mask=True
data = masked_array([1, 1, 1, 2, 2, 3], mask=[0, 0, 1, 0, 1, 0])
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, 2, 3, -1], mask=[0, 0, 0, 1]))
assert_equal(test[1], [0, 3, 5, 2])
assert_equal(test[2], [0, 0, 3, 1, 3, 2])
#
data.fill_value = 3
data = masked_array(data=[1, 1, 1, 2, 2, 3],
mask=[0, 0, 1, 0, 1, 0], fill_value=3)
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, 2, 3, -1], mask=[0, 0, 0, 1]))
assert_equal(test[1], [0, 3, 5, 2])
assert_equal(test[2], [0, 0, 3, 1, 3, 2])
def test_unique_allmasked(self):
# Test all masked
data = masked_array([1, 1, 1], mask=True)
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, ], mask=[True]))
assert_equal(test[1], [0])
assert_equal(test[2], [0, 0, 0])
#
# Test masked
data = masked
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array(masked))
assert_equal(test[1], [0])
assert_equal(test[2], [0])
def test_ediff1d(self):
# Tests mediff1d
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
control = array([1, 1, 1, 4], mask=[1, 0, 0, 1])
test = ediff1d(x)
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_tobegin(self):
# Test ediff1d w/ to_begin
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_begin=masked)
control = array([0, 1, 1, 1, 4], mask=[1, 1, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_begin=[1, 2, 3])
control = array([1, 2, 3, 1, 1, 1, 4], mask=[0, 0, 0, 1, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_toend(self):
# Test ediff1d w/ to_end
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_end=masked)
control = array([1, 1, 1, 4, 0], mask=[1, 0, 0, 1, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=[1, 2, 3])
control = array([1, 1, 1, 4, 1, 2, 3], mask=[1, 0, 0, 1, 0, 0, 0])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_tobegin_toend(self):
# Test ediff1d w/ to_begin and to_end
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_end=masked, to_begin=masked)
control = array([0, 1, 1, 1, 4, 0], mask=[1, 1, 0, 0, 1, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=[1, 2, 3], to_begin=masked)
control = array([0, 1, 1, 1, 4, 1, 2, 3],
mask=[1, 1, 0, 0, 1, 0, 0, 0])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_ndarray(self):
# Test ediff1d w/ a ndarray
x = np.arange(5)
test = ediff1d(x)
control = array([1, 1, 1, 1], mask=[0, 0, 0, 0])
assert_equal(test, control)
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=masked, to_begin=masked)
control = array([0, 1, 1, 1, 1, 0], mask=[1, 0, 0, 0, 0, 1])
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_intersect1d(self):
# Test intersect1d
x = array([1, 3, 3, 3], mask=[0, 0, 0, 1])
y = array([3, 1, 1, 1], mask=[0, 0, 0, 1])
test = intersect1d(x, y)
control = array([1, 3, -1], mask=[0, 0, 1])
assert_equal(test, control)
def test_setxor1d(self):
# Test setxor1d
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = setxor1d(a, b)
assert_equal(test, array([3, 4, 7]))
#
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = [1, 2, 3, 4, 5]
test = setxor1d(a, b)
assert_equal(test, array([3, 4, 7, -1], mask=[0, 0, 0, 1]))
#
a = array([1, 2, 3])
b = array([6, 5, 4])
test = setxor1d(a, b)
assert_(isinstance(test, MaskedArray))
assert_equal(test, [1, 2, 3, 4, 5, 6])
#
a = array([1, 8, 2, 3], mask=[0, 1, 0, 0])
b = array([6, 5, 4, 8], mask=[0, 0, 0, 1])
test = setxor1d(a, b)
assert_(isinstance(test, MaskedArray))
assert_equal(test, [1, 2, 3, 4, 5, 6])
#
assert_array_equal([], setxor1d([], []))
def test_in1d(self):
# Test in1d
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = in1d(a, b)
assert_equal(test, [True, True, True, False, True])
#
a = array([5, 5, 2, 1, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 5, -1], mask=[0, 0, 1])
test = in1d(a, b)
assert_equal(test, [True, True, False, True, True])
#
assert_array_equal([], in1d([], []))
def test_in1d_invert(self):
# Test in1d's invert parameter
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
assert_equal(np.invert(in1d(a, b)), in1d(a, b, invert=True))
a = array([5, 5, 2, 1, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 5, -1], mask=[0, 0, 1])
assert_equal(np.invert(in1d(a, b)), in1d(a, b, invert=True))
assert_array_equal([], in1d([], [], invert=True))
def test_union1d(self):
# Test union1d
a = array([1, 2, 5, 7, 5, -1], mask=[0, 0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = union1d(a, b)
control = array([1, 2, 3, 4, 5, 7, -1], mask=[0, 0, 0, 0, 0, 0, 1])
assert_equal(test, control)
#
assert_array_equal([], union1d([], []))
def test_setdiff1d(self):
# Test setdiff1d
a = array([6, 5, 4, 7, 7, 1, 2, 1], mask=[0, 0, 0, 0, 0, 0, 0, 1])
b = array([2, 4, 3, 3, 2, 1, 5])
test = setdiff1d(a, b)
assert_equal(test, array([6, 7, -1], mask=[0, 0, 1]))
#
a = arange(10)
b = arange(8)
assert_equal(setdiff1d(a, b), array([8, 9]))
a = array([], np.uint32, mask=[])
assert_equal(setdiff1d(a, []).dtype, np.uint32)
def test_setdiff1d_char_array(self):
# Test setdiff1d_charray
a = np.array(['a', 'b', 'c'])
b = np.array(['a', 'b', 's'])
assert_array_equal(setdiff1d(a, b), np.array(['c']))
class TestShapeBase(TestCase):
def test_atleast2d(self):
# Test atleast_2d
a = masked_array([0, 1, 2], mask=[0, 1, 0])
b = atleast_2d(a)
assert_equal(b.shape, (1, 3))
assert_equal(b.mask.shape, b.data.shape)
assert_equal(a.shape, (3,))
assert_equal(a.mask.shape, a.data.shape)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | 967,578,434,001,526,700 | 38.562721 | 78 | 0.482952 | false |
frankiecjunle/yunblog | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py | 515 | 2165 | from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check via the official file-like-object way.
return obj.closed
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.")
def assert_header_parsing(headers):
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param headers: Headers to verify.
:type headers: `httplib.HTTPMessage`.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError('expected httplib.Message, got {0}.'.format(
type(headers)))
defects = getattr(headers, 'defects', None)
get_payload = getattr(headers, 'get_payload', None)
unparsed_data = None
if get_payload: # Platform-specific: Python 3.
unparsed_data = get_payload()
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
def is_response_to_head(response):
"""
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param conn:
:type conn: :class:`httplib.HTTPResponse`
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
return method.upper() == 'HEAD'
| mit | 3,664,133,512,059,769,300 | 28.256757 | 78 | 0.674827 | false |
cloudendpoints/endpoints-management-python | test/test_timestamp.py | 3 | 4311 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import datetime
import unittest2
from expects import be_below_or_equal, expect, equal, raise_error
from endpoints_management.control import timestamp
class TestToRfc3339(unittest2.TestCase):
A_LONG_TIME_AGO = datetime.datetime(1971, 12, 31, 21, 0, 20, 21000)
TESTS = [
(A_LONG_TIME_AGO, u'1971-12-31T21:00:20.021Z'),
(A_LONG_TIME_AGO - datetime.datetime(1970, 1, 1),
u'1971-12-31T21:00:20.021Z')
]
def test_should_converts_correctly(self):
for t in self.TESTS:
expect(timestamp.to_rfc3339(t[0])).to(equal(t[1]))
def test_should_fail_on_invalid_input(self):
testf = lambda: timestamp.to_rfc3339(u'this will not work')
expect(testf).to(raise_error(ValueError))
class TestFromRfc3339(unittest2.TestCase):
TOLERANCE = 10000 # 1e-5 * 1e9
TESTS = [
# Simple
(u'1971-12-31T21:00:20.021Z',
datetime.datetime(1971, 12, 31, 21, 0, 20, 21000)),
# different timezone
(u'1996-12-19T16:39:57-08:00',
datetime.datetime(1996, 12, 20, 0, 39, 57, 0)),
# microseconds
(u'1996-12-19T16:39:57.123456-08:00',
datetime.datetime(1996, 12, 20, 0, 39, 57, 123456)),
# Beyond 2038
(u'2100-01-01T00:00:00Z',
datetime.datetime(2100, 1, 1, 0, 0, 0, 0))
]
NANO_TESTS = [
# Simple
(u'1971-12-31T21:00:20.021Z',
(datetime.datetime(1971, 12, 31, 21, 0, 20, 21000), 21000000)),
# different timezone
(u'1996-12-19T16:39:57-08:00',
(datetime.datetime(1996, 12, 20, 0, 39, 57, 0), 0)),
# microseconds
(u'1996-12-19T16:39:57.123456789-08:00',
(datetime.datetime(1996, 12, 20, 0, 39, 57, 123457), 123456789)),
]
def test_should_convert_correctly_without_nanos(self):
for t in self.TESTS:
expect(timestamp.from_rfc3339(t[0])).to(equal(t[1]))
def test_should_convert_correctly_with_nanos(self):
for t in self.NANO_TESTS:
dt, nanos = timestamp.from_rfc3339(t[0], with_nanos=True)
expect(dt).to(equal(t[1][0]))
epsilon = abs(nanos - t[1][1])
# expect(epsilon).to(equal(0))
expect(epsilon).to(be_below_or_equal(self.TOLERANCE))
class TestCompare(unittest2.TestCase):
TESTS = [
# Strings
(u'1971-10-31T21:00:20.021Z', u'1971-11-30T21:00:20.021Z', -1),
(u'1971-11-30T21:00:20.021Z', u'1971-10-30T21:00:20.021Z', 1),
(u'1971-11-30T21:00:20Z', u'1971-11-30T21:00:20Z', 0),
(u'1971-11-30T21:00:20.021Z', u'1971-11-30T21:00:20.041Z', -1),
(u'1971-11-30T21:00:20.021Z', u'1971-11-30T21:00:20.001Z', 1),
# Datetimes
(datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
datetime.datetime(1996, 11, 20, 0, 39, 57, 0),
-1),
(datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
0),
(datetime.datetime(1996, 11, 20, 0, 39, 57, 0),
datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
1)
]
def test_should_compare_correctly(self):
for t in self.TESTS:
a, b, want = t
expect(timestamp.compare(a, b)).to(equal(want))
def test_should_fail_if_inputs_do_not_have_the_same_type(self):
testf = lambda: timestamp.compare(self.TESTS[0][0],
datetime.datetime.utcnow())
expect(testf).to(raise_error(ValueError))
testf = lambda: timestamp.compare(self.TESTS[0],
datetime.datetime.utcnow())
expect(testf).to(raise_error(ValueError))
| apache-2.0 | -3,452,052,248,995,342,000 | 36.486957 | 74 | 0.599397 | false |
ville-k/tensorflow | tensorflow/compiler/tests/pooling_ops_3d_test.py | 85 | 12646 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for 3d pooling operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import test
# Wrapper around AvgPoolGrad that ignores extra arguments needed by
# MaxPoolGrad.
def _AvgPoolGrad(inputs, outputs, output_gradients, ksize, strides, padding):
del outputs # Unused by average-pooling gradients.
return gen_nn_ops._avg_pool3d_grad(
inputs.get_shape().as_list(),
output_gradients,
ksize=ksize,
strides=strides,
padding=padding)
class Pooling3DTest(XLATestCase):
def _VerifyValues(self, pool_func, input_sizes, window, strides, padding,
expected):
"""Verifies the output values of the pooling function.
Args:
pool_func: Function to be called: co.MaxPool, co.AvgPool.
input_sizes: Input tensor dimensions.
window: Tuple of kernel dims: planes, rows, cols.
strides: Tuple of strides for dims: planes, rows, cols.
padding: Padding type.
expected: An array containing the expected operation outputs.
"""
total_size = 1
for s in input_sizes:
total_size *= s
# Initializes the input tensor with array containing incrementing
# numbers from 1.
x = np.arange(1.0, total_size + 1, dtype=np.float32)
x = x.reshape(input_sizes)
with self.test_session() as sess, self.test_scope():
inputs = array_ops.placeholder(dtypes.float32)
t = pool_func(
inputs,
ksize=[1] + window + [1],
strides=[1] + strides + [1],
padding=padding)
vals = sess.run(t, {inputs: x})
# Verifies values.
actual = vals.flatten()
self.assertAllClose(expected, actual)
def testAvgPool3dValidPadding(self):
expected_output = [20.5, 21.5, 22.5]
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 3, 3, 3, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID",
expected=expected_output)
def testAvgPool3dSamePadding(self):
expected_output = [20.5, 21.5, 22.5, 26.5, 27.5, 28.5]
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 2, 2, 4, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME",
expected=expected_output)
def testAvgPool3dSamePaddingDifferentStrides(self):
expected_output = [1.5, 4.5, 7.5, 17.5, 20.5, 23.5, 33.5, 36.5, 39.5]
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 5, 8, 1, 1],
window=[1, 2, 3],
strides=[2, 3, 1],
padding="SAME",
expected=expected_output)
def testMaxPool3dValidPadding(self):
expected_output = [40.0, 41.0, 42.0]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 3, 3, 3, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID",
expected=expected_output)
def testMaxPool3dSamePadding(self):
expected_output = [31., 32., 33., 34., 35., 36.]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 2, 2, 3, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME",
expected=expected_output)
def testMaxPool3dSamePaddingDifferentStrides(self):
expected_output = [2., 5., 8., 18., 21., 24., 34., 37., 40.]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 5, 8, 1, 1],
window=[1, 2, 3],
strides=[2, 3, 1],
padding="SAME",
expected=expected_output)
# Test pooling on a larger input, with different stride and kernel
# size for the 'z' dimension.
# Simulate max pooling in numpy to get the expected output.
input_data = np.arange(1, 5 * 27 * 27 * 64 + 1).reshape((5, 27, 27, 64))
input_data = np.pad(input_data, [[0, 0], [0, 1], [0, 1], [0, 0]],
mode="constant")
expected_output = input_data[:, 1::2, 1::2, :]
expected_output[:, -1, :, :] = input_data[:, -2, 1::2, :]
expected_output[:, :, -1, :] = input_data[:, 1::2, -2, :]
expected_output[:, -1, -1, :] = input_data[:, -2, -2, :]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 5, 27, 27, 64],
window=[1, 2, 2],
strides=[1, 2, 2],
padding="SAME",
expected=expected_output.flatten())
def testKernelSmallerThanStride(self):
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 3, 3, 3, 1],
window=[1, 1, 1],
strides=[2, 2, 2],
padding="SAME",
expected=[1, 3, 7, 9, 19, 21, 25, 27])
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 7, 7, 7, 1],
window=[2, 2, 2],
strides=[3, 3, 3],
padding="VALID",
expected=[58, 61, 79, 82, 205, 208, 226, 229])
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 3, 3, 3, 1],
window=[1, 1, 1],
strides=[2, 2, 2],
padding="SAME",
expected=[1, 3, 7, 9, 19, 21, 25, 27])
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 7, 7, 7, 1],
window=[2, 2, 2],
strides=[3, 3, 3],
padding="VALID",
expected=[29.5, 32.5, 50.5, 53.5, 176.5, 179.5, 197.5, 200.5])
def _VerifyGradient(self, pool_func, pool_grad_func, input_sizes, ksize,
strides, padding):
"""Verifies the output values of the pooling gradient function.
Args:
pool_func: Forward pooling function
pool_grad_func: Pooling gradient function for pool_grad_func
input_sizes: Input tensor dimensions.
ksize: The kernel size dimensions
strides: The stride dimensions
padding: Padding type.
"""
ksize = [1] + ksize + [1]
strides = [1] + strides + [1]
total_size = np.prod(input_sizes)
x = np.arange(1, total_size + 1, dtype=np.float32).reshape(input_sizes)
with self.test_session() as sess:
# Use the forward pool function to compute some corresponding outputs
# (needed for the CPU device, and we need the shape in both cases).
with ops.device("CPU"):
inputs = array_ops.placeholder(dtypes.float32, shape=input_sizes)
outputs = pool_func(
inputs,
ksize=ksize,
strides=strides,
padding=padding)
output_vals = np.array(sess.run(outputs, {inputs: x}))
output_gradient_vals = np.arange(
1, output_vals.size + 1, dtype=np.float32)
output_gradient_vals = output_gradient_vals.reshape(output_vals.shape)
# Use the Tensorflow CPU pooling gradient to compute the expected input
# gradients.
with ops.device("CPU"):
output_gradients = array_ops.placeholder(
dtypes.float32, shape=output_vals.shape)
expected_input_gradients = pool_grad_func(
inputs,
outputs,
output_gradients,
ksize=ksize,
strides=strides,
padding=padding)
expected_input_gradient_vals = sess.run(
expected_input_gradients,
{inputs: x,
output_gradients: output_gradient_vals})
# Run the gradient op on the XLA device
with self.test_scope():
outputs = array_ops.placeholder(dtypes.float32, shape=output_vals.shape)
actual_input_gradients = pool_grad_func(
inputs,
outputs,
output_gradients,
ksize=ksize,
strides=strides,
padding=padding)
actual = sess.run(actual_input_gradients, {
inputs: x,
outputs: output_vals,
output_gradients: output_gradient_vals
})
# Compare the Tensorflow and XLA results.
self.assertAllClose(
expected_input_gradient_vals.flatten(),
actual.flatten(),
rtol=1e-5,
atol=1e-6)
self.assertShapeEqual(actual, inputs)
def testMaxPoolGradValidPadding1_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[1, 3, 3, 3, 1],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="VALID")
def testMaxPoolGradValidPadding2_1_6_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 3, 6, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="VALID")
def testMaxPoolGradValidPadding2_1_7_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 5, 7, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="VALID")
def testMaxPoolGradValidPadding2_2_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 2, 2, 2, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID")
def testMaxPoolGradSamePadding1_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 2, 4, 1],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="SAME")
def testMaxPoolGradSamePadding2_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 2, 4, 1],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="SAME")
def testMaxPoolGradSamePadding2_2_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 5, 2, 4, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME")
def testMaxPoolGradSamePadding3_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[1, 3, 3, 7, 1],
ksize=[3, 3, 3],
strides=[1, 1, 1],
padding="SAME")
def testAvgPoolGradValidPadding1_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 3, 3, 3, 3],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="VALID")
def testAvgPoolGradValidPadding2_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 3, 3, 3, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="VALID")
def testAvgPoolGradValidPadding2_2_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 2, 2, 2, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID")
def testAvgPoolGradSamePadding1_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 3, 2, 4, 3],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="SAME")
def testAvgPoolGradSamePadding2_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[1, 2, 2, 2, 1],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="SAME")
def testAvgPoolGradSamePadding2_2_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 5, 2, 4, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME")
def testAvgPoolGradSamePadding3_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[1, 3, 6, 7, 1],
ksize=[3, 3, 3],
strides=[1, 1, 1],
padding="SAME")
if __name__ == "__main__":
test.main()
| apache-2.0 | -5,878,281,486,346,029,000 | 30.615 | 80 | 0.577099 | false |
teamtuga4/teamtuga4ever.repository | plugin.video.traquinas/resources/lib/resolvers/v_vids.py | 23 | 1385 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib
from resources.lib.libraries import client
def resolve(url):
try:
result = client.request(url)
post = {}
f = client.parseDOM(result, 'Form', attrs = {'name': 'F1'})[0]
k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'})
for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]})
post.update({'method_free': '', 'method_premium': ''})
result = client.request(url, post=post)
url = client.parseDOM(result, 'a', ret='href', attrs = {'id': 'downloadbutton'})[0]
return url
except:
return
| gpl-2.0 | -372,157,471,895,818,600 | 31.209302 | 102 | 0.644043 | false |
uzh/vm-mad | vmmad/provider/libcloud.py | 1 | 9309 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Interfaces to cloud providers, using `Apache LibCloud <http://libcloud.apache.org>`
"""
# Copyright (C) 2011, 2012 ETH Zurich and University of Zurich. All rights reserved.
#
# Authors:
# Riccardo Murri <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
__docformat__ = 'reStructuredText'
__version__ = '$Revision$'
# stdlib imports
from abc import abstractmethod
from copy import copy
import os
import sys
# libcloud imports
import libcloud.compute.types
import libcloud.compute.providers
# local imports
from vmmad import log
from vmmad.orchestrator import VmInfo
from vmmad.provider import NodeProvider
class CloudNodeProvider(NodeProvider):
"""
Abstract base class implementing common functionality for all
LibCloud providers.
"""
@staticmethod
def _vminfo_state_from_libcloud_status(status):
"""
Return the `orchestrator.VmInfo` state word corresponding to
LibCloud's `NodeState`.
"""
return {
libcloud.compute.types.NodeState.PENDING: VmInfo.STARTING,
libcloud.compute.types.NodeState.REBOOTING: VmInfo.STARTING,
libcloud.compute.types.NodeState.RUNNING: None,
libcloud.compute.types.NodeState.TERMINATED: VmInfo.DOWN,
libcloud.compute.types.NodeState.UNKNOWN: VmInfo.OTHER,
}[status]
class DummyCloud(CloudNodeProvider):
"""
Interface `Apache LibCloud <http://libcloud.apache.org/>` "dummy" cloud provider.
"""
def __init__(self, image='1', kind='1'):
self.image = image
self.kind = kind
log.debug("Creating LibCloud's 'Dummy' provider ...")
driver = libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.DUMMY)
self.provider = driver(0)
# LibCloud's "dummy" provider always starts two instances; remove them
for node in copy(self.provider.list_nodes()):
node.destroy()
log.info("Using cloud provider '%s'.", self.provider.__class__.__name__)
log.info("Listing available images ...")
self._images = dict((img.id, img) for img in self.provider.list_images())
log.debug("Available images: %s", self._images.keys())
if image not in self._images:
raise RuntimeError("Image '%s' not available on %s"
% (image, self.provider.__class__.__name__))
log.info("... done: %d images available.", len(self._images))
log.info("Listing available kinds ...")
self._kinds = dict((kind.id, kind) for kind in self.provider.list_sizes())
log.debug("Available kinds: %s", self._kinds.keys())
if kind not in self._kinds:
raise RuntimeError("Kind '%s' not available on %s"
% (kind, self.provider.__class__.__name__))
log.info("... done: %d kinds available.", len(self._kinds))
log.info("VMs will use image '%s' (%s) on hardware kind '%s' (%s)",
self.image, self._images[self.image].name,
self.kind, self._kinds[self.kind].name)
# associate the Node ID we get from the cloud provider with
# the VM object we get from the orchestrator
self._instance_to_vm_map = { }
def start_vm(self, vm):
vm.instance = self.provider.create_node(
name=str(vm.vmid), image=self._images[self.image], size=self._kinds[self.kind])
uuid = vm.instance.uuid
assert uuid not in self._instance_to_vm_map, (
"Instance UUID %s already registered as belonging to VM %s"
% (uuid, vm.vmid))
vm.cloud = self.provider
self._instance_to_vm_map[uuid] = vm
assert uuid in self._instance_to_vm_map, (
"BUG: Instance UUID %s has not been inserted in `self._instance_to_vm_map`"
% uuid)
def stop_vm(self, vm):
uuid = vm.instance.uuid
assert uuid in self._instance_to_vm_map, (
"Instance UUID %s (of VM %s) not registered to any instance!"
% (uuid, vm.vmid))
self.provider.destroy_node(vm.instance)
del self._instance_to_vm_map[uuid]
vm.state = VmInfo.DOWN
def update_vm_status(self, vms):
nodes = [ node for node in self.provider.list_nodes()
if node.id in self._instance_to_vm_map ]
for node in nodes:
vm = self._instance_to_vm_map[node.uuid]
vm.instance = node
state = self._vminfo_state_from_libcloud_status(node.state)
if state is not None:
vm.state = state
class EC2Cloud(CloudNodeProvider):
"""
Interface to Amazon EC2 on top of `Apache LibCloud <http://libcloud.apache.org/>`.
"""
def __init__(self, image, kind, access_id=None, secret_key=None):
self.image = image
self.kind = kind
if access_id is not None:
self.access_id = access_id
self.secret_key = secret_key
else:
# use same environment variables as Boto
self.access_id = os.environ['AWS_ACCESS_KEY_ID']
self.secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
log.debug("Creating EC2 cloud provider with access ID '%s' ...", access_id)
driver = libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.EC2)
self.provider = driver(self.access_id, self.secret_key)
log.info("Using cloud provider '%s'.", self.provider.friendly_name)
log.info("Listing available images ...")
self._images = dict((img.id, img) for img in self.provider.list_images())
if image not in self._images:
raise RuntimeError("Image '%s' not available on %s"
% (image, self.provider.friendly_name))
log.info("... done: %d images available.", len(self._images))
log.info("Listing available sizes ...")
self._kinds = dict((kind.id, kind) for kind in self.provider.list_sizes())
if kind not in self._kinds:
raise RuntimeError("Kind '%s' not available on %s"
% (kind, self.provider.friendly_name))
log.info("... done: %d kinds available.", len(self._kinds))
log.info("VMs will use image '%s' (%s) on hardware kind '%s' (%s)",
self.image, self._images[self.image].name,
self.kind, self._kinds[self.kind].name)
# log.info("Getting list of running instances ...")
# self.instances = dict((node.uuid, node) for node in self.provider.list_nodes())
# log.info("... Done: %d instances available.", len(self._sizes))
# associate the Node ID we get from the cloud provider with
# the VM object we get from the orchestrator
self._instance_to_vm_map = { }
def start_vm(self, vm):
vm.instance = self.provider.create_node(
name=str(vm.vmid), image=self._images[self.image],
size=self._kinds[self.kind],
ex_keyname='vm-mad', ex_securitygroup='vm-mad',
ex_userdata=("VMMAD_AUTH='%s'" % vm.auth))
vm.cloud = self.provider
self._instance_to_vm_map[vm.instance.uuid] = vm
def stop_vm(self, vm):
# XXX: this is tricky: we must:
# 1. gracefully shutdown the node, and (after a timeout) proceed to:
# 2. destroy the node
# In addition this should not block the main Orchestrator thread.
uuid = vm.instance.uuid
self.provider.destroy_node(vm.instance)
del self._instance_to_vm_map[uuid]
def update_vm_status(self, vms):
nodes = self.provider.list_nodes(ex_node_ids=[vm.instance.id for vm in vms])
for node in nodes:
if node.uuid in self._instance_to_vm_map:
vm = self._instance_to_vm_map[node.uuid]
vm.instance = node
state = self._vminfo_state_from_libcloud_status(node.state)
if state is not None:
vm.state = state
else:
# Ignore VMs that were not started by us. There are
# two reasons for this policy:
#
# - the same AWS account could be used for other purposes
# or `Orchestrator` instances, so we should not assume that
# all VMs are under our control.
#
# - the AWS interface keeps reporting *terminated* instances
# for some time after they have been shut down.
#
log.debug("Ignoring VM '%s', which was not started by this orchestrator.",
node.uuid)
| apache-2.0 | -7,938,056,302,283,737,000 | 38.782051 | 93 | 0.601354 | false |
ehashman/oh-mainline | vendor/packages/Django/tests/regressiontests/signals_regress/tests.py | 102 | 3907 | from __future__ import absolute_import
from django.db import models
from django.test import TestCase
from .models import Author, Book
class SignalsRegressTests(TestCase):
"""
Testing signals before/after saving and deleting.
"""
def get_signal_output(self, fn, *args, **kwargs):
# Flush any existing signal output
self.signal_output = []
fn(*args, **kwargs)
return self.signal_output
def pre_save_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('pre_save signal, %s' % instance)
if kwargs.get('raw'):
self.signal_output.append('Is raw')
def post_save_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('post_save signal, %s' % instance)
if 'created' in kwargs:
if kwargs['created']:
self.signal_output.append('Is created')
else:
self.signal_output.append('Is updated')
if kwargs.get('raw'):
self.signal_output.append('Is raw')
def pre_delete_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('pre_save signal, %s' % instance)
self.signal_output.append('instance.id is not None: %s' % (instance.id != None))
def post_delete_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('post_delete signal, %s' % instance)
self.signal_output.append('instance.id is not None: %s' % (instance.id != None))
def setUp(self):
self.signal_output = []
# Save up the number of connected signals so that we can check at the end
# that all the signals we register get properly unregistered (#9989)
self.pre_signals = (len(models.signals.pre_save.receivers),
len(models.signals.post_save.receivers),
len(models.signals.pre_delete.receivers),
len(models.signals.post_delete.receivers))
models.signals.pre_save.connect(self.pre_save_test)
models.signals.post_save.connect(self.post_save_test)
models.signals.pre_delete.connect(self.pre_delete_test)
models.signals.post_delete.connect(self.post_delete_test)
def tearDown(self):
models.signals.post_delete.disconnect(self.post_delete_test)
models.signals.pre_delete.disconnect(self.pre_delete_test)
models.signals.post_save.disconnect(self.post_save_test)
models.signals.pre_save.disconnect(self.pre_save_test)
# Check that all our signals got disconnected properly.
post_signals = (len(models.signals.pre_save.receivers),
len(models.signals.post_save.receivers),
len(models.signals.pre_delete.receivers),
len(models.signals.post_delete.receivers))
self.assertEqual(self.pre_signals, post_signals)
def test_model_signals(self):
""" Model saves should throw some signals. """
a1 = Author(name='Neal Stephenson')
self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, Neal Stephenson",
"post_save signal, Neal Stephenson",
"Is created"
])
b1 = Book(name='Snow Crash')
self.assertEqual(self.get_signal_output(b1.save), [
"pre_save signal, Snow Crash",
"post_save signal, Snow Crash",
"Is created"
])
def test_m2m_signals(self):
""" Assigning and removing to/from m2m shouldn't generate an m2m signal """
b1 = Book(name='Snow Crash')
self.get_signal_output(b1.save)
a1 = Author(name='Neal Stephenson')
self.get_signal_output(a1.save)
self.assertEqual(self.get_signal_output(setattr, b1, 'authors', [a1]), [])
self.assertEqual(self.get_signal_output(setattr, b1, 'authors', []), [])
| agpl-3.0 | -4,604,521,263,658,101,000 | 40.126316 | 88 | 0.620425 | false |
tntnatbry/tensorflow | tensorflow/examples/learn/iris_custom_model.py | 50 | 2613 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example of Estimator for Iris plant dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from sklearn import cross_validation
from sklearn import datasets
from sklearn import metrics
import tensorflow as tf
layers = tf.contrib.layers
learn = tf.contrib.learn
def my_model(features, target):
"""DNN with three hidden layers, and dropout of 0.1 probability."""
# Convert the target to a one-hot tensor of shape (length of features, 3) and
# with a on-value of 1 for each one-hot vector of length 3.
target = tf.one_hot(target, 3, 1, 0)
# Create three fully connected layers respectively of size 10, 20, and 10 with
# each layer having a dropout probability of 0.1.
normalizer_fn = layers.dropout
normalizer_params = {'keep_prob': 0.9}
features = layers.stack(
features,
layers.fully_connected, [10, 20, 10],
normalizer_fn=normalizer_fn,
normalizer_params=normalizer_params)
# Compute logits (1 per class) and compute loss.
logits = layers.fully_connected(features, 3, activation_fn=None)
loss = tf.losses.softmax_cross_entropy(target, logits)
# Create a tensor for training op.
train_op = tf.contrib.layers.optimize_loss(
loss,
tf.contrib.framework.get_global_step(),
optimizer='Adagrad',
learning_rate=0.1)
return ({
'class': tf.argmax(logits, 1),
'prob': tf.nn.softmax(logits)
}, loss, train_op)
def main(unused_argv):
iris = datasets.load_iris()
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
iris.data, iris.target, test_size=0.2, random_state=42)
classifier = learn.Estimator(model_fn=my_model)
classifier.fit(x_train, y_train, steps=1000)
y_predicted = [
p['class'] for p in classifier.predict(
x_test, as_iterable=True)
]
score = metrics.accuracy_score(y_test, y_predicted)
print('Accuracy: {0:f}'.format(score))
if __name__ == '__main__':
tf.app.run()
| apache-2.0 | -1,357,143,937,272,532,200 | 32.5 | 80 | 0.704171 | false |
ClearCorp/odoo-clearcorp | TODO-9.0/base_currency_symbol/__terp__.py | 4 | 2547 | # -*- encoding: utf-8 -*-
##############################################################################
#
# __terp__.py
# base_currency_symbol
# First author: Carlos Vásquez <[email protected]> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
{
'name': 'Base currency symbol',
'version': '0.1',
'url': 'http://launchpad.net/openerp-ccorp-addons',
'author': 'ClearCorp S.A.',
'website': 'http://clearcorp.co.cr',
'category': 'General Modules/Base',
'description': """Adds symbol to currency:
Use symbol_prefix and symbol_suffix depending on the currency standard.
""",
'depends': ['base'],
'init_xml': [],
'demo_xml': [],
'update_xml': [
'base_currency_symbol_data.xml',
'base_currency_symbol_view.xml',
],
'license': 'Other OSI approved licence',
'installable': True,
'active': True,
}
| agpl-3.0 | 5,226,458,743,355,502,000 | 45.290909 | 97 | 0.663001 | false |
matplotlib/viscm | tests.py | 1 | 4429 | from viscm.gui import *
from viscm.bezierbuilder import *
import numpy as np
import matplotlib as mpl
from matplotlib.backends.qt_compat import QtGui, QtCore
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
cms = {"viscm/examples/sample_linear.jscm",
"viscm/examples/sample_diverging.jscm",
"viscm/examples/sample_diverging_continuous.jscm"}
def test_editor_loads_native():
for k in cms:
with open(k) as f:
data = json.loads(f.read())
cm = Colormap(None, "CatmulClark", "CAM02-UCS")
cm.load(k)
viscm = viscm_editor(uniform_space=cm.uniform_space, cmtype=cm.cmtype, method=cm.method, **cm.params)
assert viscm.name == data["name"]
extensions = data["extensions"]["https://matplotlib.org/viscm"]
xp, yp, fixed = viscm.control_point_model.get_control_points()
assert extensions["fixed"] == fixed
assert len(extensions["xp"]) == len(xp)
assert len(extensions["yp"]) == len(yp)
assert len(xp) == len(yp)
for i in range(len(xp)):
assert extensions["xp"][i] == xp[i]
assert extensions["yp"][i] == yp[i]
assert extensions["min_Jp"] == viscm.min_Jp
assert extensions["max_Jp"] == viscm.max_Jp
assert extensions["filter_k"] == viscm.filter_k
assert extensions["cmtype"] == viscm.cmtype
colors = data["colors"]
colors = [[int(c[i:i + 2], 16) / 256 for i in range(0, 6, 2)] for c in [colors[i:i + 6] for i in range(0, len(colors), 6)]]
editor_colors = viscm.cmap_model.get_sRGB(num=256)[0].tolist()
for i in range(len(colors)):
for z in range(3):
assert colors[i][z] == np.rint(editor_colors[i][z] / 256)
# def test_editor_add_point():
# # Testing linear
# fig = plt.figure()
# figure_canvas = FigureCanvas(fig)
# linear = viscm_editor(min_Jp=40, max_Jp=60, xp=[-10, 10], yp=[0,0], figure=fig, cmtype="linear")
# Jp, ap, bp = linear.cmap_model.get_Jpapbp(3)
# eJp, eap, ebp = [40, 50, 60], [-10, 0, 10], [0, 0, 0]
# for i in range(3):
# assert approxeq(Jp[i], eJp[i])
# assert approxeq(ap[i], eap[i])
# assert approxeq(bp[i], ebp[i])
# rgb = linear.cmap_model.get_sRGB(3)[0]
# ergb = [[ 0.27446483, 0.37479529, 0.34722738],
# [ 0.44884374, 0.44012037, 0.43848162],
# [ 0.63153956, 0.49733664, 0.53352363]]
# for i in range(3):
# for z in range(3):
# assert approxeq(rgb[i][z], ergb[i][z])
# # Testing adding a point to linear
# linear.bezier_builder.mode = "add"
# qtEvent = QtGui.QMouseEvent(QtCore.QEvent.MouseButtonPress, QtCore.QPoint(), QtCore.Qt.LeftButton, QtCore.Qt.LeftButton, QtCore.Qt.ShiftModifier)
# event = mpl.backend_bases.MouseEvent("button_press_event", figure_canvas, 0, 10, guiEvent=qtEvent)
# event.xdata = 0
# event.ydata = 10
# event.inaxes = linear.bezier_builder.ax
# linear.bezier_builder.on_button_press(event)
# Jp, ap, bp = linear.cmap_model.get_Jpapbp(3)
# eJp, eap, ebp = [40, 50, 60], [-10, 0, 10], [0, 5, 0]
# for i in range(3):
# assert approxeq(Jp[i], eJp[i])
# assert approxeq(ap[i], eap[i])
# assert approxeq(bp[i], ebp[i])
# rgb = linear.cmap_model.get_sRGB(3)[0]
# ergb = [[ 0.27446483, 0.37479529, 0.34722738],
# [ 0.46101392, 0.44012069, 0.38783966],
# [ 0.63153956, 0.49733664, 0.53352363]]
# for i in range(3):
# for z in range(3):
# assert approxeq(rgb[i][z], ergb[i][z])
# # Removing a point from linear
# linear.bezier_builder.mode = "remove"
# qtEvent = QtGui.QMouseEvent(QtCore.QEvent.MouseButtonPress, QtCore.QPoint(), QtCore.Qt.LeftButton, QtCore.Qt.LeftButton, QtCore.Qt.ControlModifier)
# event = mpl.backend_bases.MouseEvent("button_press_event", figure_canvas, 0, 10, guiEvent=qtEvent)
# event.xdata = 0
# event.ydata = 10
# event.inaxes = linear.bezier_builder.ax
# linear.bezier_builder.on_button_press(event)
# # Jp, ap, bp = linear.cmap_model.get_Jpapbp(3)
# # print(Jp, ap, bp)
# # print(rgb)
# # use mpl transformations
# print(linear.control_point_model.get_control_points())
# # print(linear.cmap_model.get_Jpapbp(3))
def approxeq(x, y, err=0.0001):
return abs(y - x) < err
| mit | -1,164,865,570,586,637,800 | 40.009259 | 153 | 0.602393 | false |
glizer/nw.js | test/remoting/testcfg.py | 68 | 2628 | import test
import os
from os.path import join, dirname, exists, basename, isdir
import re
import utils
class RemotingTestCase(test.TestCase):
def __init__(self, path, file, arch, mode, nwdir, context, config, additional=[]):
super(RemotingTestCase, self).__init__(context, path, arch, mode, nwdir)
self.file = file
self.config = config
self.arch = arch
self.mode = mode
self.additional_flags = additional
self.nwdir = nwdir
def GetTmpDir(self):
return "%s.%d" % (self.tmpdir, self.thread_id)
def GetChromeDriver(self, arch, mode, nwdir):
if utils.IsWindows():
return os.path.abspath(join(nwdir, 'chromedriver.exe'))
return os.path.abspath(join(nwdir, 'chromedriver'))
def AfterRun(self, result):
return
def BeforeRun(self):
return
def GetLabel(self):
return "%s %s" % (self.mode, self.GetName())
def GetName(self):
return self.path[-1]
def GetEnv(self):
libpath = join(self.file, '..', '..', '..', '..', '..', 'third_party/webdriver/pylib')
return {'PYTHONPATH': libpath, 'CHROMEDRIVER': self.GetChromeDriver(self.arch, self.mode, self.nwdir)}
def GetCommand(self):
result = ['python']
result += [self.file + '/test.py']
return result
def IsFailureOutput(self, output):
return output.exit_code != 0
def GetSource(self):
return open(self.file).read()
class RemotingTestConfiguration(test.TestConfiguration):
def __init__(self, context, root, section, additional=[]):
super(RemotingTestConfiguration, self).__init__(context, root)
self.section = section
self.additional_flags = additional
def Ls(self, path):
def SelectTest(name):
return os.path.isdir(os.path.join(path, name))
return [f[0:] for f in os.listdir(path) if SelectTest(f)]
def ListTests(self, current_path, path, arch, mode, nwdir):
all_tests = [current_path + [t] for t in self.Ls(join(self.root))]
result = []
for test in all_tests:
if self.Contains(path, test):
file_path = join(self.root, reduce(join, test[1:], ""))
result.append(RemotingTestCase(test, file_path, arch, mode, nwdir, self.context,
self, self.additional_flags))
return result
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, '%s.status' % (self.section))
if exists(status_file):
test.ReadConfigurationInto(status_file, sections, defs)
def GetConfiguration(context, root):
return RemotingTestConfiguration(context, root, 'remoting')
| mit | -2,742,679,846,780,379,000 | 29.55814 | 108 | 0.657154 | false |
etingof/pysnmp | pysnmp/smi/mibs/TRANSPORT-ADDRESS-MIB.py | 1 | 27607 | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pysnmp/license.html
#
# ASN.1 source http://mibs.snmplabs.com:80/asn1/TRANSPORT-ADDRESS-MIB
# Produced by pysmi-0.4.0 at Sun Feb 17 08:56:38 2019
#
# Parts of otherwise autogenerated MIB has been updated manually.
#
import socket
from pyasn1.compat.octets import int2oct
from pyasn1.compat.octets import oct2int
from pysnmp import error
has_ipv6 = socket.has_ipv6
if hasattr(socket, 'inet_ntop') and hasattr(socket, 'inet_pton'):
inet_ntop = socket.inet_ntop
inet_pton = socket.inet_pton
else:
import sys
if sys.platform != "win32":
from socket import inet_ntoa, inet_aton
inet_ntop = lambda x, y: inet_ntoa(y)
inet_pton = lambda x, y: inet_aton(y)
has_ipv6 = False
elif has_ipv6:
import struct # The case of old Python at old Windows
def inet_pton(address_family, ip_string):
if address_family == socket.AF_INET:
return socket.inet_aton(ip_string)
elif address_family != socket.AF_INET6:
raise socket.error(
'Unknown address family %s' % (address_family,)
)
groups = ip_string.split(":")
spaces = groups.count('')
if '.' in groups[-1]:
groups[-1:] = ["%x" % x for x in struct.unpack("!HH", socket.inet_aton(groups[-1]))]
if spaces == 1:
idx = groups.index('')
groups[idx:idx + 1] = ['0'] * (8 - len(groups) + 1)
elif spaces == 2:
zeros = ['0'] * (8 - len(groups) + 2)
if ip_string.startswith('::'):
groups[:2] = zeros
elif ip_string.endswith('::'):
groups[-2:] = zeros
else:
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
elif spaces == 3:
if ip_string != '::':
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
return '\x00' * 16
elif spaces > 3:
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
groups = [t for t in [int(t, 16) for t in groups] if t & 0xFFFF == t]
if len(groups) != 8:
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
return struct.pack('!8H', *groups)
def inet_ntop(address_family, packed_ip):
if address_family == socket.AF_INET:
return socket.inet_ntop(packed_ip)
elif address_family != socket.AF_INET6:
raise socket.error(
'Unknown address family %s' % (address_family,)
)
if len(packed_ip) != 16:
raise socket.error(
'incorrect address length: %s' % len(packed_ip)
)
groups = list(struct.unpack('!8H', packed_ip))
cur_base = best_base = cur_len = best_len = -1
for idx in range(8):
if groups[idx]:
if cur_base != -1:
if best_base == -1 or cur_len > best_len:
best_base, best_len = cur_base, cur_len
cur_base = -1
else:
if cur_base == -1:
cur_base, cur_len = idx, 1
else:
cur_len += 1
if cur_base != -1:
if best_base == -1 or cur_len > best_len:
best_base, best_len = cur_base, cur_len
if best_base != -1 and best_len > 1:
groups[best_base:best_base + best_len] = [':']
if groups[0] == ':':
groups.insert(0, ':')
if groups[-1] == ':':
groups.append(':')
f = lambda x: x != ':' and '%x' % x or ''
return ':'.join([f(x) for x in groups])
if 'mibBuilder' not in globals():
import sys
sys.stderr.write(__doc__)
sys.exit(1)
(Integer,
OctetString,
ObjectIdentifier) = mibBuilder.importSymbols(
"ASN1",
"Integer",
"OctetString",
"ObjectIdentifier")
(NamedValues,) = mibBuilder.importSymbols(
"ASN1-ENUMERATION",
"NamedValues")
(ConstraintsIntersection,
SingleValueConstraint,
ValueRangeConstraint,
ValueSizeConstraint,
ConstraintsUnion) = mibBuilder.importSymbols(
"ASN1-REFINEMENT",
"ConstraintsIntersection",
"SingleValueConstraint",
"ValueRangeConstraint",
"ValueSizeConstraint",
"ConstraintsUnion")
(ModuleCompliance,
NotificationGroup) = mibBuilder.importSymbols(
"SNMPv2-CONF",
"ModuleCompliance",
"NotificationGroup")
(Counter32,
TimeTicks,
MibScalar,
MibTable,
MibTableRow,
MibTableColumn,
Counter64,
IpAddress,
ObjectIdentity,
Gauge32,
MibIdentifier,
iso,
Unsigned32,
Bits,
NotificationType,
Integer32,
ModuleIdentity,
mib_2) = mibBuilder.importSymbols(
"SNMPv2-SMI",
"Counter32",
"TimeTicks",
"MibScalar",
"MibTable",
"MibTableRow",
"MibTableColumn",
"Counter64",
"IpAddress",
"ObjectIdentity",
"Gauge32",
"MibIdentifier",
"iso",
"Unsigned32",
"Bits",
"NotificationType",
"Integer32",
"ModuleIdentity",
"mib-2")
(DisplayString,
TextualConvention) = mibBuilder.importSymbols(
"SNMPv2-TC",
"DisplayString",
"TextualConvention")
transportAddressMIB = ModuleIdentity(
(1, 3, 6, 1, 2, 1, 100)
)
transportAddressMIB.setRevisions(
("2002-11-01 00:00",)
)
transportAddressMIB.setLastUpdated("200211010000Z")
if mibBuilder.loadTexts:
transportAddressMIB.setOrganization("""\
IETF Operations and Management Area
""")
transportAddressMIB.setContactInfo("""\
Juergen Schoenwaelder (Editor) TU Braunschweig Bueltenweg 74/75 38106
Braunschweig, Germany Phone: +49 531 391-3289 EMail: [email protected]
Send comments to <[email protected]>.
""")
if mibBuilder.loadTexts:
transportAddressMIB.setDescription("""\
This MIB module provides commonly used transport address definitions. Copyright
(C) The Internet Society (2002). This version of this MIB module is part of RFC
3419; see the RFC itself for full legal notices.
""")
class TransportDomain(TextualConvention, ObjectIdentifier):
status = "current"
if mibBuilder.loadTexts:
description = """\
A value that represents a transport domain. Some possible values, such as
transportDomainUdpIpv4, are defined in this module. Other possible values can
be defined in other MIB modules.
"""
class TransportAddressType(TextualConvention, Integer32):
status = "current"
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(
SingleValueConstraint(
*(0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16)
)
)
namedValues = NamedValues(
*(("local", 13),
("sctpDns", 16),
("sctpIpv4", 9),
("sctpIpv4z", 11),
("sctpIpv6", 10),
("sctpIpv6z", 12),
("tcpDns", 15),
("tcpIpv4", 5),
("tcpIpv4z", 7),
("tcpIpv6", 6),
("tcpIpv6z", 8),
("udpDns", 14),
("udpIpv4", 1),
("udpIpv4z", 3),
("udpIpv6", 2),
("udpIpv6z", 4),
("unknown", 0))
)
if mibBuilder.loadTexts:
description = """\
A value that represents a transport domain. This is the enumerated version of
the transport domain registrations in this MIB module. The enumerated values
have the following meaning: unknown(0) unknown transport address type
udpIpv4(1) transportDomainUdpIpv4 udpIpv6(2) transportDomainUdpIpv6 udpIpv4z(3)
transportDomainUdpIpv4z udpIpv6z(4) transportDomainUdpIpv6z tcpIpv4(5)
transportDomainTcpIpv4 tcpIpv6(6) transportDomainTcpIpv6 tcpIpv4z(7)
transportDomainTcpIpv4z tcpIpv6z(8) transportDomainTcpIpv6z sctpIpv4(9)
transportDomainSctpIpv4 sctpIpv6(10) transportDomainSctpIpv6 sctpIpv4z(11)
transportDomainSctpIpv4z sctpIpv6z(12) transportDomainSctpIpv6z local(13)
transportDomainLocal udpDns(14) transportDomainUdpDns tcpDns(15)
transportDomainTcpDns sctpDns(16) transportDomainSctpDns This textual
convention can be used to represent transport domains in situations where a
syntax of TransportDomain is unwieldy (for example, when used as an index). The
usage of this textual convention implies that additional transport domains can
only be supported by updating this MIB module. This extensibility restriction
does not apply for the TransportDomain textual convention which allows MIB
authors to define additional transport domains independently in other MIB
modules.
"""
class TransportAddress(TextualConvention, OctetString):
status = "current"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(0, 255),
)
if mibBuilder.loadTexts:
description = """\
Denotes a generic transport address. A TransportAddress value is always
interpreted within the context of a TransportAddressType or TransportDomain
value. Every usage of the TransportAddress textual convention MUST specify the
TransportAddressType or TransportDomain object which provides the context.
Furthermore, MIB authors SHOULD define a separate TransportAddressType or
TransportDomain object for each TransportAddress object. It is suggested that
the TransportAddressType or TransportDomain is logically registered before the
object(s) which use the TransportAddress textual convention if they appear in
the same logical row. The value of a TransportAddress object must always be
consistent with the value of the associated TransportAddressType or
TransportDomain object. Attempts to set a TransportAddress object to a value
which is inconsistent with the associated TransportAddressType or
TransportDomain must fail with an inconsistentValue error. When this textual
convention is used as a syntax of an index object, there may be issues with the
limit of 128 sub-identifiers specified in SMIv2, STD 58. In this case, the
OBJECT-TYPE declaration MUST include a 'SIZE' clause to limit the number of
potential instance sub-identifiers.
"""
class TransportAddressIPv4(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(6, 6),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv4 address and a port number
(as used for example by UDP, TCP and SCTP): octets contents encoding 1-4 IPv4
address network-byte order 5-6 port number network-byte order This textual
convention SHOULD NOT be used directly in object definitions since it restricts
addresses to a specific format. However, if it is used, it MAY be used either
on its own or in conjunction with TransportAddressType or TransportDomain as a
pair.
"""
fixedLength = 6
def prettyIn(self, value):
if isinstance(value, tuple):
# Wild hack -- need to implement TextualConvention.prettyIn
value = inet_pton(socket.AF_INET, value[0]) + int2oct((value[1] >> 8) & 0xff) + int2oct(value[1] & 0xff)
return OctetString.prettyIn(self, value)
# Socket address syntax coercion
def __asSocketAddress(self):
if not hasattr(self, '__tuple_value'):
v = self.asOctets()
self.__tuple_value = (
inet_ntop(socket.AF_INET, v[:4]),
oct2int(v[4]) << 8 | oct2int(v[5]),
)
return self.__tuple_value
def __iter__(self):
return iter(self.__asSocketAddress())
def __getitem__(self, item):
return self.__asSocketAddress()[item]
class TransportAddressIPv6(TextualConvention, OctetString):
status = "current"
displayHint = "0a[2x:2x:2x:2x:2x:2x:2x:2x]0a:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(18, 18),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv6 address and a port number
(as used for example by UDP, TCP and SCTP): octets contents encoding 1-16 IPv6
address network-byte order 17-18 port number network-byte order This textual
convention SHOULD NOT be used directly in object definitions since it restricts
addresses to a specific format. However, if it is used, it MAY be used either
on its own or in conjunction with TransportAddressType or TransportDomain as a
pair.
"""
fixedLength = 18
def prettyIn(self, value):
if not has_ipv6:
raise error.PySnmpError('IPv6 not supported by platform')
if isinstance(value, tuple):
value = inet_pton(socket.AF_INET6, value[0]) + int2oct((value[1] >> 8) & 0xff) + int2oct(value[1] & 0xff)
return OctetString.prettyIn(self, value)
# Socket address syntax coercion
def __asSocketAddress(self):
if not hasattr(self, '__tuple_value'):
if not has_ipv6:
raise error.PySnmpError('IPv6 not supported by platform')
v = self.asOctets()
self.__tuple_value = (
inet_ntop(socket.AF_INET6, v[:16]),
oct2int(v[16]) << 8 | oct2int(v[17]),
0, # flowinfo
0) # scopeid
return self.__tuple_value
def __iter__(self):
return iter(self.__asSocketAddress())
def __getitem__(self, item):
return self.__asSocketAddress()[item]
class TransportAddressIPv4z(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d%4d:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(10, 10),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv4 address, a zone index and
a port number (as used for example by UDP, TCP and SCTP): octets contents
encoding 1-4 IPv4 address network-byte order 5-8 zone index network-byte order
9-10 port number network-byte order This textual convention SHOULD NOT be used
directly in object definitions since it restricts addresses to a specific
format. However, if it is used, it MAY be used either on its own or in
conjunction with TransportAddressType or TransportDomain as a pair.
"""
fixedLength = 10
class TransportAddressIPv6z(TextualConvention, OctetString):
status = "current"
displayHint = "0a[2x:2x:2x:2x:2x:2x:2x:2x%4d]0a:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(22, 22),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv6 address, a zone index and
a port number (as used for example by UDP, TCP and SCTP): octets contents
encoding 1-16 IPv6 address network-byte order 17-20 zone index network-byte
order 21-22 port number network-byte order This textual convention SHOULD NOT
be used directly in object definitions since it restricts addresses to a
specific format. However, if it is used, it MAY be used either on its own or in
conjunction with TransportAddressType or TransportDomain as a pair.
"""
fixedLength = 22
class TransportAddressLocal(TextualConvention, OctetString):
status = "current"
displayHint = "1a"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 255),
)
if mibBuilder.loadTexts:
description = """\
Represents a POSIX Local IPC transport address: octets contents encoding all
POSIX Local IPC address string The Posix Local IPC transport domain subsumes
UNIX domain sockets. This textual convention SHOULD NOT be used directly in
object definitions since it restricts addresses to a specific format. However,
if it is used, it MAY be used either on its own or in conjunction with
TransportAddressType or TransportDomain as a pair. When this textual convention
is used as a syntax of an index object, there may be issues with the limit of
128 sub-identifiers specified in SMIv2, STD 58. In this case, the OBJECT-TYPE
declaration MUST include a 'SIZE' clause to limit the number of potential
instance sub-identifiers.
"""
class TransportAddressDns(TextualConvention, OctetString):
status = "current"
displayHint = "1a"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 255),
)
if mibBuilder.loadTexts:
description = """\
Represents a DNS domain name followed by a colon ':' (ASCII character 0x3A) and
a port number in ASCII. The name SHOULD be fully qualified whenever possible.
Values of this textual convention are not directly useable as transport-layer
addressing information, and require runtime resolution. As such, applications
that write them must be prepared for handling errors if such values are not
supported, or cannot be resolved (if resolution occurs at the time of the
management operation). The DESCRIPTION clause of TransportAddress objects that
may have TransportAddressDns values must fully describe how (and when) such
names are to be resolved to IP addresses and vice versa. This textual
convention SHOULD NOT be used directly in object definitions since it restricts
addresses to a specific format. However, if it is used, it MAY be used either
on its own or in conjunction with TransportAddressType or TransportDomain as a
pair. When this textual convention is used as a syntax of an index object,
there may be issues with the limit of 128 sub-identifiers specified in SMIv2,
STD 58. In this case, the OBJECT-TYPE declaration MUST include a 'SIZE' clause
to limit the number of potential instance sub-identifiers.
"""
_TransportDomains_ObjectIdentity = ObjectIdentity
transportDomains = _TransportDomains_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1)
)
_TransportDomainUdpIpv4_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv4 = _TransportDomainUdpIpv4_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 1)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv4.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv4.setDescription("""\
The UDP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4 for global IPv4 addresses.
""")
_TransportDomainUdpIpv6_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv6 = _TransportDomainUdpIpv6_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 2)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv6.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv6.setDescription("""\
The UDP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6 for global IPv6 addresses.
""")
_TransportDomainUdpIpv4z_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv4z = _TransportDomainUdpIpv4z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 3)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv4z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv4z.setDescription("""\
The UDP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4z for scoped IPv4 addresses with a zone index.
""")
_TransportDomainUdpIpv6z_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv6z = _TransportDomainUdpIpv6z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 4)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv6z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv6z.setDescription("""\
The UDP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6z for scoped IPv6 addresses with a zone index.
""")
_TransportDomainTcpIpv4_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv4 = _TransportDomainTcpIpv4_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 5)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv4.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv4.setDescription("""\
The TCP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4 for global IPv4 addresses.
""")
_TransportDomainTcpIpv6_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv6 = _TransportDomainTcpIpv6_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 6)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv6.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv6.setDescription("""\
The TCP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6 for global IPv6 addresses.
""")
_TransportDomainTcpIpv4z_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv4z = _TransportDomainTcpIpv4z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 7)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv4z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv4z.setDescription("""\
The TCP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4z for scoped IPv4 addresses with a zone index.
""")
_TransportDomainTcpIpv6z_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv6z = _TransportDomainTcpIpv6z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 8)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv6z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv6z.setDescription("""\
The TCP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6z for scoped IPv6 addresses with a zone index.
""")
_TransportDomainSctpIpv4_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv4 = _TransportDomainSctpIpv4_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 9)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv4.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv4.setDescription("""\
The SCTP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4 for global IPv4 addresses. This transport domain
usually represents the primary address on multihomed SCTP endpoints.
""")
_TransportDomainSctpIpv6_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv6 = _TransportDomainSctpIpv6_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 10)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv6.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv6.setDescription("""\
The SCTP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6 for global IPv6 addresses. This transport domain
usually represents the primary address on multihomed SCTP endpoints.
""")
_TransportDomainSctpIpv4z_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv4z = _TransportDomainSctpIpv4z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 11)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv4z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv4z.setDescription("""\
The SCTP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4z for scoped IPv4 addresses with a zone index. This
transport domain usually represents the primary address on multihomed SCTP
endpoints.
""")
_TransportDomainSctpIpv6z_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv6z = _TransportDomainSctpIpv6z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 12)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv6z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv6z.setDescription("""\
The SCTP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6z for scoped IPv6 addresses with a zone index. This
transport domain usually represents the primary address on multihomed SCTP
endpoints.
""")
_TransportDomainLocal_ObjectIdentity = ObjectIdentity
transportDomainLocal = _TransportDomainLocal_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 13)
)
if mibBuilder.loadTexts:
transportDomainLocal.setStatus("current")
if mibBuilder.loadTexts:
transportDomainLocal.setDescription("""\
The Posix Local IPC transport domain. The corresponding transport address is of
type TransportAddressLocal. The Posix Local IPC transport domain incorporates
the well-known UNIX domain sockets.
""")
_TransportDomainUdpDns_ObjectIdentity = ObjectIdentity
transportDomainUdpDns = _TransportDomainUdpDns_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 14)
)
if mibBuilder.loadTexts:
transportDomainUdpDns.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpDns.setDescription("""\
The UDP transport domain using fully qualified domain names. The corresponding
transport address is of type TransportAddressDns.
""")
_TransportDomainTcpDns_ObjectIdentity = ObjectIdentity
transportDomainTcpDns = _TransportDomainTcpDns_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 15)
)
if mibBuilder.loadTexts:
transportDomainTcpDns.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpDns.setDescription("""\
The TCP transport domain using fully qualified domain names. The corresponding
transport address is of type TransportAddressDns.
""")
_TransportDomainSctpDns_ObjectIdentity = ObjectIdentity
transportDomainSctpDns = _TransportDomainSctpDns_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 16)
)
if mibBuilder.loadTexts:
transportDomainSctpDns.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpDns.setDescription("""\
The SCTP transport domain using fully qualified domain names. The corresponding
transport address is of type TransportAddressDns.
""")
mibBuilder.exportSymbols(
"TRANSPORT-ADDRESS-MIB",
**{"TransportDomain": TransportDomain,
"TransportAddressType": TransportAddressType,
"TransportAddress": TransportAddress,
"TransportAddressIPv4": TransportAddressIPv4,
"TransportAddressIPv6": TransportAddressIPv6,
"TransportAddressIPv4z": TransportAddressIPv4z,
"TransportAddressIPv6z": TransportAddressIPv6z,
"TransportAddressLocal": TransportAddressLocal,
"TransportAddressDns": TransportAddressDns,
"transportAddressMIB": transportAddressMIB,
"transportDomains": transportDomains,
"transportDomainUdpIpv4": transportDomainUdpIpv4,
"transportDomainUdpIpv6": transportDomainUdpIpv6,
"transportDomainUdpIpv4z": transportDomainUdpIpv4z,
"transportDomainUdpIpv6z": transportDomainUdpIpv6z,
"transportDomainTcpIpv4": transportDomainTcpIpv4,
"transportDomainTcpIpv6": transportDomainTcpIpv6,
"transportDomainTcpIpv4z": transportDomainTcpIpv4z,
"transportDomainTcpIpv6z": transportDomainTcpIpv6z,
"transportDomainSctpIpv4": transportDomainSctpIpv4,
"transportDomainSctpIpv6": transportDomainSctpIpv6,
"transportDomainSctpIpv4z": transportDomainSctpIpv4z,
"transportDomainSctpIpv6z": transportDomainSctpIpv6z,
"transportDomainLocal": transportDomainLocal,
"transportDomainUdpDns": transportDomainUdpDns,
"transportDomainTcpDns": transportDomainTcpDns,
"transportDomainSctpDns": transportDomainSctpDns}
)
| bsd-2-clause | 7,718,383,914,486,025,000 | 36.106183 | 117 | 0.699315 | false |
jantman/pelican-plugins | pelican_comment_system/avatars.py | 25 | 2732 | # -*- coding: utf-8 -*-
"""
Author: Bernhard Scheirle
"""
from __future__ import unicode_literals
import logging
import os
import hashlib
logger = logging.getLogger(__name__)
_log = "pelican_comment_system: avatars: "
try:
from . identicon import identicon
_identiconImported = True
except ImportError as e:
logger.warning(_log + "identicon deactivated: " + str(e))
_identiconImported = False
# Global Variables
_identicon_save_path = None
_identicon_output_path = None
_identicon_data = None
_identicon_size = None
_initialized = False
_authors = None
_missingAvatars = []
def _ready():
if not _initialized:
logger.warning(_log + "Module not initialized. use init")
if not _identicon_data:
logger.debug(_log + "No identicon data set")
return _identiconImported and _initialized and _identicon_data
def init(pelican_output_path, identicon_output_path, identicon_data,
identicon_size, authors):
global _identicon_save_path
global _identicon_output_path
global _identicon_data
global _identicon_size
global _initialized
global _authors
global _missingAvatars
_identicon_save_path = os.path.join(pelican_output_path,
identicon_output_path)
_identicon_output_path = identicon_output_path
_identicon_data = identicon_data
_identicon_size = identicon_size
_authors = authors
_missingAvatars = []
_initialized = True
def _createIdenticonOutputFolder():
if not _ready():
return
if not os.path.exists(_identicon_save_path):
os.makedirs(_identicon_save_path)
def getAvatarPath(comment_id, metadata):
if not _ready():
return ''
md5 = hashlib.md5()
author = tuple()
for data in _identicon_data:
if data in metadata:
string = "{}".format(metadata[data])
md5.update(string.encode('utf-8'))
author += tuple([string])
else:
logger.warning(_log + data +
" is missing in comment: " + comment_id)
if author in _authors:
return _authors[author]
global _missingAvatars
code = md5.hexdigest()
if not code in _missingAvatars:
_missingAvatars.append(code)
return os.path.join(_identicon_output_path, '%s.png' % code)
def generateAndSaveMissingAvatars():
_createIdenticonOutputFolder()
global _missingAvatars
for code in _missingAvatars:
avatar_path = '%s.png' % code
avatar = identicon.render_identicon(int(code, 16), _identicon_size)
avatar_save_path = os.path.join(_identicon_save_path, avatar_path)
avatar.save(avatar_save_path, 'PNG')
_missingAvatars = []
| agpl-3.0 | -2,695,387,279,387,074,000 | 25.019048 | 75 | 0.644949 | false |
Potato42/primitivepyg | primitivepyg/convertcolors.py | 1 | 1734 | """
convertcolors.py provides functions for converting various color formats to (red, green, blue, alpha)
"""
RED_MASK = 0xff000000
GREEN_MASK = 0x00ff0000
BLUE_MASK = 0x0000ff00
ALPHA_MASK = 0x000000ff
def color_from_hex(hex_color:int)->(int,int,int,int):
"""
Takes a hex value in either of the form 0xRRGGBBAA and returns a tuple containing the R, G, B, and A components from 0 to 255.
"""
# this commented-out code adds ambiguity as there would be no way to have a 0 for red and also have an alpha.
# if hex_color <= 0xffffff: # if alpha value not included
# # fill in the AA part with 255
# hex_color <<= 8
# hex_color |= 0xff
return (hex_color & RED_MASK) >> 24, (hex_color & GREEN_MASK) >> 16,\
(hex_color & BLUE_MASK) >> 8, (hex_color & ALPHA_MASK)
def get_color(color)->(int,int,int,int):
"""
Return an R, G, B, A tuple from color, which can be any of the following formats:
0xRRGGBBAA
(grey,)
(grey,alpha)
(red,green,blue)
(red,green,blue,alpha)
"""
if isinstance(color, int):
# color is a hex color
return color_from_hex(color)
if isinstance(color, tuple): # todo: perhaps allow all tuple-like types?
# color is already a tuple, but may need to be formatted correctly
if len(color) == 1: # grey scale
return color[0],color[0],color[0],255
if len(color) == 2: # alpha grey scale
return color[0],color[0],color[0],color[1]
if len(color) == 3: # R,G,B
return color+(255,)
if len(color) == 4: # R,G,B,A
return color
raise ValueError("color has invalid length")
raise ValueError("color must be int or tuple") | mit | 4,884,712,632,055,236,000 | 37.555556 | 130 | 0.616494 | false |
jostschmithals/three.js | utils/exporters/blender/addons/io_three/exporter/texture.py | 173 | 1407 | from .. import constants, logger
from . import base_classes, image, api
class Texture(base_classes.BaseNode):
"""Class that wraps a texture node"""
def __init__(self, node, parent):
logger.debug("Texture().__init__(%s)", node)
base_classes.BaseNode.__init__(self, node, parent, constants.TEXTURE)
num = constants.NUMERIC
img_inst = self.scene.image(api.texture.file_name(self.node))
if not img_inst:
image_node = api.texture.image_node(self.node)
img_inst = image.Image(image_node.name, self.scene)
self.scene[constants.IMAGES].append(img_inst)
self[constants.IMAGE] = img_inst[constants.UUID]
wrap = api.texture.wrap(self.node)
self[constants.WRAP] = (num[wrap[0]], num[wrap[1]])
if constants.WRAPPING.REPEAT in wrap:
self[constants.REPEAT] = api.texture.repeat(self.node)
self[constants.ANISOTROPY] = api.texture.anisotropy(self.node)
self[constants.MAG_FILTER] = num[api.texture.mag_filter(self.node)]
self[constants.MIN_FILTER] = num[api.texture.min_filter(self.node)]
self[constants.MAPPING] = num[api.texture.mapping(self.node)]
@property
def image(self):
"""
:return: the image object of the current texture
:rtype: image.Image
"""
return self.scene.image(self[constants.IMAGE])
| mit | -2,458,399,075,161,477,600 | 32.5 | 77 | 0.63113 | false |
ricotabor/opendrop | opendrop/__main__.py | 2 | 1861 | # Copyright © 2020, Joseph Berry, Rico Tabor ([email protected])
# OpenDrop is released under the GNU GPL License. You are free to
# modify and distribute the code, but always under the same license
# (i.e. you cannot make commercial derivatives).
#
# If you use this software in your research, please cite the following
# journal articles:
#
# J. D. Berry, M. J. Neeson, R. R. Dagastine, D. Y. C. Chan and
# R. F. Tabor, Measurement of surface and interfacial tension using
# pendant drop tensiometry. Journal of Colloid and Interface Science 454
# (2015) 226–237. https://doi.org/10.1016/j.jcis.2015.05.012
#
# E. Huang, T. Denning, A. Skoufis, J. Qi, R. R. Dagastine, R. F. Tabor
# and J. D. Berry, OpenDrop: Open-source software for pendant drop
# tensiometry & contact angle measurements, submitted to the Journal of
# Open Source Software
#
# These citations help us not only to understand who is using and
# developing OpenDrop, and for what purpose, but also to justify
# continued development of this code and other open source resources.
#
# OpenDrop is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this software. If not, see <https://www.gnu.org/licenses/>.
import sys
from opendrop.app import OpendropApplication
from opendrop.appfw import Injector
def main(*argv) -> int:
# https://stackoverflow.com/questions/13514031/py2exe-with-multiprocessing-fails-to-run-the-processes#27547300
import multiprocessing
multiprocessing.freeze_support()
injector = Injector()
app = injector.create_object(OpendropApplication)
return app.run(argv)
if __name__ == '__main__':
sys.exit(main(*sys.argv))
| gpl-2.0 | -2,147,662,062,066,860,300 | 39.391304 | 114 | 0.744887 | false |
rx2130/Leetcode | python/37 Sudoku Solver.py | 1 | 1671 | class Solution(object):
def solveSudoku(self, board):
"""
:type board: List[List[str]]
:rtype: void Do not return anything, modify board in-place instead.
"""
if board is None or len(board) != 9 or len(board[0]) != 9:
return
for row in range(9):
board[row] = list(board[row])
self.solve(board)
for row in board:
print(row)
def solve(self, board):
for i in range(9):
for j in range(9):
if board[i][j] == '.':
for c in range(1, 10):
if self.isValid(board, i, j, c):
board[i][j] = str(c)
if self.solve(board):
return True
else:
board[i][j] = '.'
return False
return True
def isValid(self, board, i, j, c):
for row in range(9):
if board[row][j] == str(c):
return False
for col in range(9):
if board[i][col] == str(c):
return False
for row in range(i // 3 * 3, i // 3 * 3 + 3):
for col in range(j // 3 * 3, j // 3 * 3 + 3):
if board[row][col] == str(c):
return False
return True
board = ["..9748...", "7........", ".2.1.9...", "..7...24.", ".64.1.59.",
".98...3..", "...8.3.2.", "........6", "...2759.."]
test = Solution()
test.solveSudoku(board)
# ["519748632","783652419","426139875","357986241","264317598","198524367","975863124","832491756","641275983"]
| apache-2.0 | -5,658,215,617,263,024,000 | 34.553191 | 111 | 0.421903 | false |
ericdwang/django-analytical | analytical/tests/test_tag_spring_metrics.py | 4 | 2549 | """
Tests for the Spring Metrics template tags and filters.
"""
import re
from django.contrib.auth.models import User, AnonymousUser
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from analytical.templatetags.spring_metrics import SpringMetricsNode
from analytical.tests.utils import TagTestCase
from analytical.utils import AnalyticalException
@override_settings(SPRING_METRICS_TRACKING_ID='12345678')
class SpringMetricsTagTestCase(TagTestCase):
"""
Tests for the ``spring_metrics`` template tag.
"""
def test_tag(self):
r = self.render_tag('spring_metrics', 'spring_metrics')
self.assertTrue("_springMetq.push(['id', '12345678']);" in r, r)
def test_node(self):
r = SpringMetricsNode().render(Context({}))
self.assertTrue("_springMetq.push(['id', '12345678']);" in r, r)
@override_settings(SPRING_METRICS_TRACKING_ID=None)
def test_no_site_id(self):
self.assertRaises(AnalyticalException, SpringMetricsNode)
@override_settings(SPRING_METRICS_TRACKING_ID='123xyz')
def test_wrong_site_id(self):
self.assertRaises(AnalyticalException, SpringMetricsNode)
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify(self):
r = SpringMetricsNode().render(Context({'user':
User(email='[email protected]')}))
self.assertTrue("_springMetq.push(['setdata', "
"{'email': '[email protected]'}]);" in r, r)
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_anonymous_user(self):
r = SpringMetricsNode().render(Context({'user': AnonymousUser()}))
self.assertFalse("_springMetq.push(['setdata', {'email':" in r, r)
def test_custom(self):
r = SpringMetricsNode().render(Context({'spring_metrics_var1': 'val1',
'spring_metrics_var2': 'val2'}))
self.assertTrue("_springMetq.push(['setdata', {'var1': 'val1'}]);" in r,
r)
self.assertTrue("_springMetq.push(['setdata', {'var2': 'val2'}]);" in r,
r)
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = SpringMetricsNode().render(context)
self.assertTrue(r.startswith(
'<!-- Spring Metrics disabled on internal IP address'), r)
self.assertTrue(r.endswith('-->'), r)
| mit | -3,707,138,534,453,119,500 | 37.044776 | 80 | 0.652413 | false |
ceph/autotest | frontend/settings.py | 2 | 4228 | # Django settings for frontend project.
import os
import common
from autotest_lib.client.common_lib import global_config
DEBUG = True
TEMPLATE_DEBUG = DEBUG
FULL_ADMIN = False
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql',
# 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_PORT = '' # Set to empty string for default.
# Not used with sqlite3.
c = global_config.global_config
_section = 'AUTOTEST_WEB'
DATABASE_HOST = c.get_config_value(_section, "host")
# Or path to database file if using sqlite3.
DATABASE_NAME = c.get_config_value(_section, "database")
# The following not used with sqlite3.
DATABASE_USER = c.get_config_value(_section, "user")
DATABASE_PASSWORD = c.get_config_value(_section, "password", default='')
DATABASE_READONLY_HOST = c.get_config_value(_section, "readonly_host",
default=DATABASE_HOST)
DATABASE_READONLY_USER = c.get_config_value(_section, "readonly_user",
default=DATABASE_USER)
if DATABASE_READONLY_USER != DATABASE_USER:
DATABASE_READONLY_PASSWORD = c.get_config_value(_section,
"readonly_password",
default='')
else:
DATABASE_READONLY_PASSWORD = DATABASE_PASSWORD
# prefix applied to all URLs - useful if requests are coming through apache,
# and you need this app to coexist with others
URL_PREFIX = 'afe/server/'
TKO_URL_PREFIX = 'new_tko/server/'
PLANNER_URL_PREFIX = 'planner/server/'
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'pn-t15u(epetamdflb%dqaaxw+5u&2#0u-jah70w1l*_9*)=n7'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'frontend.apache_auth.ApacheAuthMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
'frontend.shared.json_html_formatter.JsonToHtmlMiddleware',
)
ROOT_URLCONF = 'frontend.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.abspath(os.path.dirname(__file__) + '/templates')
)
INSTALLED_APPS = (
'frontend.afe',
'frontend.tko',
'frontend.planner',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
)
AUTHENTICATION_BACKENDS = (
'frontend.apache_auth.SimpleAuthBackend',
)
| gpl-2.0 | 4,005,310,545,773,176,000 | 33.655738 | 94 | 0.684957 | false |
drcapulet/sentry | src/sentry/migrations/0120_auto__add_grouprulestatus.py | 36 | 23983 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GroupRuleStatus'
db.create_table('sentry_grouprulestatus', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'])),
('rule', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Rule'])),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'])),
('status', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('date_added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal('sentry', ['GroupRuleStatus'])
def backwards(self, orm):
# Deleting model 'GroupRuleStatus'
db.delete_table('sentry_grouprulestatus')
models = {
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.TeamMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['sentry.User']"})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
}, 'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry']
| bsd-3-clause | 3,956,484,335,433,759,000 | 82.856643 | 223 | 0.560063 | false |
tony0924/itriqemu | scripts/tracetool/backend/stderr.py | 94 | 1162 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Stderr built-in backend.
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
PUBLIC = True
def c(events):
pass
def h(events):
out('#include <stdio.h>',
'#include "trace/control.h"',
'',
)
for e in events:
argnames = ", ".join(e.args.names())
if len(e.args) > 0:
argnames = ", " + argnames
out('static inline void trace_%(name)s(%(args)s)',
'{',
' bool _state = trace_event_get_state(%(event_id)s);',
' if (_state) {',
' fprintf(stderr, "%(name)s " %(fmt)s "\\n" %(argnames)s);',
' }',
'}',
name = e.name,
args = e.args,
event_id = "TRACE_" + e.name.upper(),
fmt = e.fmt.rstrip("\n"),
argnames = argnames,
)
| gpl-2.0 | -8,138,510,596,667,711,000 | 23.166667 | 79 | 0.483621 | false |
eldipa/athena | athenaproj/athena/models.py | 1 | 7528 | from django.db import models
import json, string
from django.core.exceptions import ValidationError
class Objective(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=240, blank=True)
def __unicode__(self):
return self.name
class How(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=240, blank=True)
def __unicode__(self):
return self.name
class Plan(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
def __unicode__(self):
return self.name
class TestSet(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
objectives = models.ManyToManyField(Objective, null=False)
how = models.ManyToManyField(How, null=False)
plan = models.ForeignKey(Plan, null=False, blank=False)
parameters = models.TextField(blank=True)
keyword_count = 8
class TestTemplate(string.Template):
delimiter = '{{'
pattern = r'''
\{\{(?:
(?P<escaped>!)|
(?P<named>[_a-z][_a-z0-9]*)\}\}|
(?P<braced>[_a-z][_a-z0-9]*)\}\}|
(?P<invalid>)
)
'''
def required(self):
required = []
for escaped, named, braced, invalid in self.pattern.findall(self.template):
if named or braced:
required.append(named if named else braced)
return required
def __unicode__(self):
test_count = self.test_count()
is_parametric = self.is_parametric()
return "%s (%s)" % (self.name, "singleton set" if not is_parametric else ("%i test%s in the set" % (test_count, "" if test_count == 1 else "s")))
def _get_keywords_and_rows(self):
try:
data = json.loads(self.parameters)
except:
data = []
return filter(None, data[:1][0]), [filter(None, row) for row in data[1:]]
def _get_row_count(self):
_, rows = self._get_keywords_and_rows()
return len(rows)
def test_count(self):
row_count = self._get_row_count()
return row_count if row_count else 1
def is_parametric(self):
row_count = self._get_row_count()
return row_count > 0
def tests(self):
k = ("name", "description")
if not self.is_parametric():
return [dict(zip(k, (self.name, self.description)))]
keywords, rows = self._get_keywords_and_rows()
results = []
name_template = self.TestTemplate(self.name)
description_template = self.TestTemplate(self.description)
for r in rows:
params = dict(zip(keywords, r))
results.append(dict(zip(k, (name_template.substitute(params), description_template.substitute(params)))))
return results
def clean(self):
models.Model.clean(self)
# JSON integrity -------------------------------
try:
data = json.loads(self.parameters)
except Exception, e:
raise ValidationError("Internal Error (possible BUG): The parameters are not a valid json. '%s'" % str(e))
# Clean up --------------------------
empties = []
for i, row in enumerate(data[1:]):
if not filter(None, map(lambda s: s.strip(), row)):
empties.append(i)
for i in empties:
data[i+1] = []
data[1:] = filter(None, data[1:]) #remove empties rows (tests)
for i, l in enumerate(data):
for j, s in enumerate(l):
data[i][j] = data[i][j].strip()
self.parameters = json.dumps(data) #restore
# Template integrity --------------------------
try:
descr_template = self.TestTemplate(self.description)
except Exception, e:
raise ValidationError("The description is an invalid template: '%s'" % str(e))
try:
name_template = self.TestTemplate(self.name)
except Exception, e:
raise ValidationError("The name is an invalid template: '%s'" % str(e))
# JSON integrity -------------------------------
try:
# expected list of lists.
# the first is a list of column names (keywords)
# then, each list is the list of values of each row
# each value of the object is string.
# the list must have the first object (list of columns)
assert isinstance(data, list), "The data returned is not a list (expected list of objects). Found '%s'" % data.__class__
assert data, "The data cannot be empty."
column_names = data[0]
assert isinstance(column_names, list), "The first object in data must be a list of column names (keywords). Found '%s'" % column_names.__class__
rows = data[1:]
for row in rows:
assert isinstance(row, list), "The data is a list of non-lists (one object is the type '%s'). Expected a list of list." % row.__class__
assert len(row) == len(column_names), "The row size %i is distinct of the column count %i." % (len(row), len(column_names))
except Exception, e:
raise ValidationError("Internal Error (possible BUG): The parameters is a valid json but has a diferent format. '%s'" % str(e))
# Table integrity ---------------------------
empty_columns = [i for i, c in enumerate(column_names) if not c.strip()]
for j, r in enumerate(rows):
for i in empty_columns:
if r[i].strip():
raise ValidationError("The test (row) %i, keyword (column) %i has the value '%s' but that column has not name (not keyword defined)." % (j+1, i+1, r[i]))
normalized_column_names = filter(None, map(lambda s: s.strip(), column_names))
if len(set(normalized_column_names)) != len(normalized_column_names):
raise ValidationError("Two or more column names (keywords) are repeated.")
# Template - Table relationship ---------------------
column_names = set(normalized_column_names)
required = set(descr_template.required())
required_but_not_defined = required.difference(column_names)
used_in_name_but_not_defined = set(name_template.required()).difference(column_names)
defined_but_not_required = column_names.difference(required.union(set(name_template.required())))
errors = []
if required_but_not_defined:
errors.append(("Some keywords are required (in the description) to be defined: %s" % str(map(str, required_but_not_defined))))
if defined_but_not_required:
errors.append(("Some keywords were defined but they are not used: %s" % str(map(str, defined_but_not_required))))
if used_in_name_but_not_defined:
errors.append(("Some keywords are required (in the name) to be defined: %s" % str(map(str, used_in_name_but_not_defined))))
if column_names and not rows:
errors.append(("You are using keywords but you don't define any test."))
if errors:
raise ValidationError("\n".join(errors))
class Comment(models.Model):
test = models.ForeignKey(TestSet, null=True, blank=True)
responding_to = models.ForeignKey('self', blank=True, null=True)
text = models.TextField()
def clean(self):
if self.test is None and self.responding_to is None:
raise ValidationError("The comment must refer to a test or to another comment.")
models.Model.clean(self)
def __unicode__(self):
t = self.text[:64]
if len(self.text) > 64:
t += "..."
return t
| gpl-2.0 | -7,051,104,130,827,571,000 | 34.847619 | 168 | 0.604145 | false |
brianlsharp/MissionPlanner | Lib/encodings/unicode_escape.py | 103 | 1229 | """ Python 'unicode-escape' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_escape_encode
decode = codecs.unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| gpl-3.0 | 9,170,883,655,038,947,000 | 25.311111 | 69 | 0.692433 | false |
varunkamra/kuma | vendor/packages/pygments/lexers/algebra.py | 72 | 6167 | # -*- coding: utf-8 -*-
"""
pygments.lexers.algebra
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for computer algebra systems.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer']
class GAPLexer(RegexLexer):
"""
For `GAP <http://www.gap-system.org>`_ source code.
.. versionadded:: 2.0
"""
name = 'GAP'
aliases = ['gap']
filenames = ['*.g', '*.gd', '*.gi', '*.gap']
tokens = {
'root': [
(r'#.*$', Comment.Single),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
if|then|elif|else|fi|
for|while|do|od|
repeat|until|
break|continue|
function|local|return|end|
rec|
quit|QUIT|
IsBound|Unbind|
TryNextMethod|
Info|Assert
)\b''', Keyword),
(r'''(?x)\b(?:
true|false|fail|infinity
)\b''',
Name.Constant),
(r'''(?x)\b(?:
(Declare|Install)([A-Z][A-Za-z]+)|
BindGlobal|BIND_GLOBAL
)\b''',
Name.Builtin),
(r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
(r'''(?x)\b(?:
and|or|not|mod|in
)\b''',
Operator.Word),
(r'''(?x)
(?:\w+|`[^`]*`)
(?:::\w+|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
]
}
class MathematicaLexer(RegexLexer):
"""
Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Mathematica'
aliases = ['mathematica', 'mma', 'nb']
filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
mimetypes = ['application/mathematica',
'application/vnd.wolfram.mathematica',
'application/vnd.wolfram.mathematica.package',
'application/vnd.wolfram.cdf']
# http://reference.wolfram.com/mathematica/guide/Syntax.html
operators = (
";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
"^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
"@@@", "~~", "===", "&", "<", ">", "<=", ">=",
)
punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
def _multi_escape(entries):
return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
tokens = {
'root': [
(r'(?s)\(\*.*?\*\)', Comment),
(r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
(r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
(r'#\d*', Name.Variable),
(r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
(r'-?[0-9]+\.[0-9]*', Number.Float),
(r'-?[0-9]*\.[0-9]+', Number.Float),
(r'-?[0-9]+', Number.Integer),
(words(operators), Operator),
(words(punctuation), Punctuation),
(r'".*?"', String),
(r'\s+', Text.Whitespace),
],
}
class MuPADLexer(RegexLexer):
"""
A `MuPAD <http://www.mupad.com>`_ lexer.
Contributed by Christopher Creutzig <[email protected]>.
.. versionadded:: 0.8
"""
name = 'MuPAD'
aliases = ['mupad']
filenames = ['*.mu']
tokens = {
'root': [
(r'//.*?$', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
next|break|end|
axiom|end_axiom|category|end_category|domain|end_domain|inherits|
if|%if|then|elif|else|end_if|
case|of|do|otherwise|end_case|
while|end_while|
repeat|until|end_repeat|
for|from|to|downto|step|end_for|
proc|local|option|save|begin|end_proc|
delete|frame
)\b''', Keyword),
(r'''(?x)\b(?:
DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
)\b''', Name.Class),
(r'''(?x)\b(?:
PI|EULER|E|CATALAN|
NIL|FAIL|undefined|infinity|
TRUE|FALSE|UNKNOWN
)\b''',
Name.Constant),
(r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
(r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
(r'''(?x)\b(?:
and|or|not|xor|
assuming|
div|mod|
union|minus|intersect|in|subset
)\b''',
Operator.Word),
(r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
# (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
(r'''(?x)
((?:[a-zA-Z_#][\w#]*|`[^`]*`)
(?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
bygroups(Name.Function, Text, Punctuation)),
(r'''(?x)
(?:[a-zA-Z_#][\w#]*|`[^`]*`)
(?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
]
}
| mpl-2.0 | -7,802,489,386,847,769,000 | 31.97861 | 81 | 0.401816 | false |
dtroyer/python-openstacksdk | openstack/tests/functional/cloud/test_groups.py | 1 | 4031 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_groups
----------------------------------
Functional tests for `shade` keystone group resource.
"""
import openstack.cloud
from openstack.tests.functional.cloud import base
class TestGroup(base.BaseFunctionalTestCase):
def setUp(self):
super(TestGroup, self).setUp()
i_ver = self.operator_cloud.config.get_api_version('identity')
if i_ver in ('2', '2.0'):
self.skipTest('Identity service does not support groups')
self.group_prefix = self.getUniqueString('group')
self.addCleanup(self._cleanup_groups)
def _cleanup_groups(self):
exception_list = list()
for group in self.operator_cloud.list_groups():
if group['name'].startswith(self.group_prefix):
try:
self.operator_cloud.delete_group(group['id'])
except Exception as e:
exception_list.append(str(e))
continue
if exception_list:
# Raise an error: we must make users aware that something went
# wrong
raise openstack.cloud.OpenStackCloudException(
'\n'.join(exception_list))
def test_create_group(self):
group_name = self.group_prefix + '_create'
group = self.operator_cloud.create_group(group_name, 'test group')
for key in ('id', 'name', 'description', 'domain_id'):
self.assertIn(key, group)
self.assertEqual(group_name, group['name'])
self.assertEqual('test group', group['description'])
def test_delete_group(self):
group_name = self.group_prefix + '_delete'
group = self.operator_cloud.create_group(group_name, 'test group')
self.assertIsNotNone(group)
self.assertTrue(self.operator_cloud.delete_group(group_name))
results = self.operator_cloud.search_groups(
filters=dict(name=group_name))
self.assertEqual(0, len(results))
def test_delete_group_not_exists(self):
self.assertFalse(self.operator_cloud.delete_group('xInvalidGroupx'))
def test_search_groups(self):
group_name = self.group_prefix + '_search'
# Shouldn't find any group with this name yet
results = self.operator_cloud.search_groups(
filters=dict(name=group_name))
self.assertEqual(0, len(results))
# Now create a new group
group = self.operator_cloud.create_group(group_name, 'test group')
self.assertEqual(group_name, group['name'])
# Now we should find only the new group
results = self.operator_cloud.search_groups(
filters=dict(name=group_name))
self.assertEqual(1, len(results))
self.assertEqual(group_name, results[0]['name'])
def test_update_group(self):
group_name = self.group_prefix + '_update'
group_desc = 'test group'
group = self.operator_cloud.create_group(group_name, group_desc)
self.assertEqual(group_name, group['name'])
self.assertEqual(group_desc, group['description'])
updated_group_name = group_name + '_xyz'
updated_group_desc = group_desc + ' updated'
updated_group = self.operator_cloud.update_group(
group_name,
name=updated_group_name,
description=updated_group_desc)
self.assertEqual(updated_group_name, updated_group['name'])
self.assertEqual(updated_group_desc, updated_group['description'])
| apache-2.0 | 2,655,857,871,646,167,600 | 35.981651 | 76 | 0.637559 | false |
SophieTh/und_Sophie_2016 | pySRU/tests/SimulationTest.py | 1 | 5980 | # coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2016 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
__authors__ = ["S Thery, M Glass, M Sanchez del Rio - ESRF ISDD Advanced Analysis and Modelling"]
__license__ = "MIT"
__date__ = "31/08/2016"
import unittest
import numpy as np
import scipy.constants as codata
from pySRU.MagneticStructureUndulatorPlane import MagneticStructureUndulatorPlane as Undulator
from pySRU.ElectronBeam import ElectronBeam
from pySRU.Source import Source
from pySRU.TrajectoryFactory import TrajectoryFactory,TRAJECTORY_METHOD_ANALYTIC, TRAJECTORY_METHOD_ODE
from pySRU.RadiationFactory import RadiationFactory , RADIATION_METHOD_APPROX_FARFIELD,RADIATION_METHOD_NEAR_FIELD
from pySRU.Simulation import Simulation,create_simulation
class UndulatorSimulationTest(unittest.TestCase):
def test_simulation(self):
electron_beam_test = ElectronBeam(Electron_energy=1.3, I_current=1.0)
beam_ESRF = ElectronBeam(Electron_energy=6.0, I_current=0.2)
undulator_test = Undulator(K=1.87, period_length=0.035, length=0.035 * 14)
ESRF18 = Undulator(K=1.68, period_length=0.018, length=2.0)
sim_test = create_simulation(magnetic_structure=undulator_test,electron_beam=electron_beam_test,
traj_method=TRAJECTORY_METHOD_ANALYTIC,rad_method=RADIATION_METHOD_NEAR_FIELD)
self.assertFalse(sim_test.radiation.distance == None)
source_test=sim_test.source
self.assertFalse(all(sim_test.trajectory_fact.initial_condition==
source_test.choose_initial_contidion_automatic()))
ref=sim_test.copy()
rad_max = sim_test.radiation.max()
# test change
sim_test.change_radiation_method(RADIATION_METHOD_APPROX_FARFIELD)
self.assertEqual(sim_test.radiation_fact.method, RADIATION_METHOD_APPROX_FARFIELD)
self.assertFalse(ref.radiation_fact.method==sim_test.radiation_fact.method)
self.assertFalse(np.all(ref.radiation.intensity == sim_test.radiation.intensity))
self.assertAlmostEqual(ref.radiation.intensity[0][0]/rad_max, sim_test.radiation.intensity[0][0]/rad_max, 3)
sim_test.change_trajectory_method(TRAJECTORY_METHOD_ODE)
self.assertEqual(sim_test.trajectory_fact.method, TRAJECTORY_METHOD_ODE)
self.assertFalse(ref.trajectory_fact.method==sim_test.trajectory_fact.method)
time_diff=np.abs(ref.trajectory.t - sim_test.trajectory.t)
self.assertTrue(np.all(time_diff<=1e-19))
self.assertFalse(np.all(ref.trajectory.x == sim_test.trajectory.x))
self.assertFalse(np.all(ref.radiation.intensity == sim_test.radiation.intensity))
rad_max = sim_test.radiation.max()
self.assertAlmostEqual(ref.radiation.intensity[0][0]/rad_max, sim_test.radiation.intensity[0][0]/rad_max, 1)
sim_test.change_Nb_pts_trajectory(ref.trajectory_fact.Nb_pts+1)
self.assertEqual(sim_test.trajectory_fact.Nb_pts,ref.trajectory_fact.Nb_pts+1)
self.assertEqual(sim_test.trajectory.nb_points(), ref.trajectory_fact.Nb_pts+1)
self.assertFalse(ref.trajectory_fact.Nb_pts == sim_test.trajectory_fact.Nb_pts)
self.assertAlmostEqual(ref.radiation.intensity[0][0]/rad_max,sim_test.radiation.intensity[0][0]/rad_max,1)
sim_test.change_Nb_pts_radiation(100)
self.assertEqual(sim_test.radiation_fact.Nb_pts,100)
self.assertFalse(np.all(ref.radiation.X == sim_test.radiation.X))
self.assertTrue(ref.radiation.X.min() == sim_test.radiation.X.min())
self.assertTrue(ref.radiation.X.max() == sim_test.radiation.X.max())
self.assertTrue(ref.radiation.Y.min() == sim_test.radiation.Y.min())
self.assertTrue(ref.radiation.Y.max() == sim_test.radiation.Y.max())
self.assertFalse(len(ref.radiation.X) == len(sim_test.radiation.X))
sim_test.change_distance(50)
self.assertEqual(sim_test.radiation.distance,50)
self.assertFalse(ref.radiation.distance == sim_test.radiation.distance)
sim_test.change_photon_frequency(source_test.harmonic_frequency(1) * 0.8)
self.assertEqual(sim_test.radiation_fact.photon_frequency,source_test.harmonic_frequency(1)*0.8)
self.assertFalse(ref.radiation_fact.photon_frequency == sim_test.radiation_fact.photon_frequency)
# nb_pts=np.arange(500,2001,500,dtype='int')
# err=sim_test.error_radiation_method_nb_pts_traj(RADIATION_METHOD_APPROX_FARFIELD,nb_pts=nb_pts)
# self.assertLessEqual((err.max() / rad_max), 1e-2, 1)
#
# distance=np.arange(20,101,20,dtype='int')
# err = sim_test.error_radiation_method_distance(RADIATION_METHOD_APPROX_FARFIELD, D=distance)
# self.assertLessEqual(err.max()/rad_max, 1e-2, 1)
| mit | 6,584,440,623,411,914,000 | 54.37037 | 116 | 0.701672 | false |
sanja7s/SR_Twitter | src_SR/articles_in_CVs.py | 1 | 3380 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Join article names for the popular concepts found in each community
"""
from collections import defaultdict
import glob, os
IN_DIR = "../../../DATA/CV"
#########################################################
# SR
#########################################################
X = "0.6"
working_subfolder = "SR_communities/"
# the communities we analyze (from the SR graph)
spec_users = working_subfolder + "communitiesSR_" + str(X) + ".txt"
#########################################################
#########################################################
# Mention
#########################################################
#X = "" #dummy
#working_subfolder = "mention_communities/"
# the communities we analyze (from the mention graph)
#spec_users = working_subfolder + "communitiesMent" + str(X) + ".txt"
#########################################################
# to return top sizeN communities, as many as there are
# in a form of a dictionary: {community_id: defaultdict{id_usr1:1, id_usr2:1, ...}}
# and also another dict, as a map (res3) to tell us the community id of a user
# and finally the whole set of communities (not limited in size) and similar map in res4
def read_in_communities(sizeN=300):
res = defaultdict(int)
res7s = defaultdict(int)
res3 = defaultdict(int)
res3 = defaultdict(lambda: -1, res3)
res4 = defaultdict(int)
res4 = defaultdict(lambda: -1, res4)
f = open(spec_users, "r")
for line in f:
line = line.split()
user_id = line[0]
com_id = line[1]
if com_id not in res:
res[com_id] = defaultdict(int)
res[com_id][user_id] = 1
for com in res:
if len(res[com]) >= sizeN:
res7s[com] = res[com]
for usr in res[com]:
res4[usr] = com
for com in res7s:
for usr in res7s[com]:
res3[usr] = com
return res7s, res3, res, res4
f_in_article_IDs = "articles_selected"
#
# read in all article IDs
#
def read_article_IDs(): #TODO fin
article_IDs = defaultdict(int)
cnt = 0
with open(f_in_article_IDs) as f:
for line in f:
line = line[:-1].split('\t')
aid = line[0]
aname = line[1]
article_IDs[aid] = aname
if cnt % 10000 == 0:
print line
cnt += 1
return article_IDs
#
# extract all concepts in a community and sort them by popularity
#
def save_popular_articles(com_id, article_IDs):
f_in = working_subfolder + "top_concepts_SR_" + str(X) + "_COM_" + com_id + ".tab"
f = open(f_in, "r")
f_out = working_subfolder + com_id + "_COM_" + str(X) + "top_articles.tab"
f2 = open(f_out, "w")
for line in f:
line = line[:-1].split('\t')
aid = line[0]
aTF = line[1]
aname = article_IDs[aid]
f2.write(str(aTF) + '\t' + str(aname) + '\t' + str(aid) + '\n')
print "Processed community %s " % (com_id)
###
### call the others
###
def main():
os.chdir(IN_DIR)
# number of nodes in a community
sizeN = 300
top_communities, com_id_map, all_communities, all_com_id_map = read_in_communities(sizeN)
N = len(top_communities)
print N, "top communities found ", "of size larger than ", sizeN
NALL = len(all_communities)
print NALL, "all communities found"
article_IDs = read_article_IDs()
#################################################################
for community in top_communities:
save_popular_articles(community, article_IDs)
#################################################################
main()
| mit | 4,141,837,262,998,916,600 | 25.40625 | 90 | 0.559172 | false |
nextgis-extra/tests | lib_gdal/gdrivers/isis2.py | 1 | 3417 | #!/usr/bin/env python
###############################################################################
# $Id: isis2.py 32163 2015-12-13 17:44:50Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test read functionality for ISIS2 driver.
# Author: Even Rouault <even dot rouault @ mines-paris dot org>
#
###############################################################################
# Copyright (c) 2008, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
###############################################################################
# Read a truncated and modified version of arvidson_original.cub from
# ftp://ftpflag.wr.usgs.gov/dist/pigpen/venus/venustopo_download/ovda_dtm.zip
def isis2_1():
tst = gdaltest.GDALTest( 'ISIS2', 'arvidson_original_truncated.cub', 1, 382 )
expected_prj = """PROJCS["SIMPLE_CYLINDRICAL VENUS",
GEOGCS["GCS_VENUS",
DATUM["D_VENUS",
SPHEROID["VENUS",6051000,0]],
PRIMEM["Reference_Meridian",0],
UNIT["degree",0.0174532925199433]],
PROJECTION["Equirectangular"],
PARAMETER["latitude_of_origin",0],
PARAMETER["central_meridian",0],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0]],
PARAMETER["standard_parallel_1",-6.5]"""
expected_gt = (10157400.403618813, 1200.0000476837158, 0.0, -585000.02324581146, 0.0, -1200.0000476837158)
return tst.testOpen( check_prj = expected_prj,
check_gt = expected_gt )
###############################################################################
# Test simple creation on disk.
def isis2_2():
tst = gdaltest.GDALTest( 'ISIS2', 'byte.tif', 1, 4672 )
return tst.testCreate()
###############################################################################
# Test a different data type with some options.
def isis2_3():
tst = gdaltest.GDALTest( 'ISIS2', 'float32.tif', 1, 4672,
options = ['LABELING_METHOD=DETACHED', 'IMAGE_EXTENSION=qub'] )
return tst.testCreateCopy( vsimem=1 )
gdaltest_list = [
isis2_1,
isis2_2,
isis2_3 ]
if __name__ == '__main__':
gdaltest.setup_run( 'isis2' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| gpl-2.0 | 8,372,541,780,123,177,000 | 36.141304 | 110 | 0.598478 | false |
selwin/Django-facebook | open_facebook/api.py | 1 | 32830 | # -*- coding: utf-8 -*-
'''
Open Facebook allows you to use Facebook's open graph API with simple python code
**Features**
* Supported and maintained
* Tested so people can contribute
* Facebook exceptions are mapped
* Logging
**Basic examples**::
facebook = OpenFacebook(access_token)
# Getting info about me
facebook.get('me')
# Learning some more about fashiolista
facebook.get('fashiolista')
# Writing your first comment
facebook.set('fashiolista/comments', message='I love Fashiolista!')
# Posting to a users wall
facebook.set('me/feed', message='check out fashiolista',
url='http://www.fashiolista.com')
# Liking a page
facebook.set('fashiolista/likes')
# Getting who likes cocacola
facebook.set('cocacola/likes')
# Use fql to retrieve your name
facebook.fql('SELECT name FROM user WHERE uid = me()')
# Executing fql in batch
facebook.batch_fql([
'SELECT uid, name, pic_square FROM user WHERE uid = me()',
'SELECT uid, rsvp_status FROM event_member WHERE eid=12345678',
])
# Uploading pictures
photo_urls = [
'http://e.fashiocdn.com/images/entities/0/7/B/I/9/0.365x365.jpg',
'http://e.fashiocdn.com/images/entities/0/5/e/e/r/0.365x365.jpg',
]
for photo in photo_urls:
print facebook.set('me/feed', message='Check out Fashiolista',
picture=photo, url='http://www.fashiolista.com')
**Getting an access token**
Once you get your access token, Open Facebook gives you access to the Facebook API
There are 3 ways of getting a facebook access_token and these are currently
implemented by Django Facebook.
1. code is passed as request parameter and traded for an
access_token using the api
2. code is passed through a signed cookie and traded for an access_token
3. access_token is passed directly (retrieved through javascript, which
would be bad security, or through one of the mobile flows.)
If you are looking to develop your own flow for a different framework have a look at
Facebook's documentation:
http://developers.facebook.com/docs/authentication/
Also have a look at the :class:`.FacebookRequired` decorator and :func:`get_persistent_graph` function to
understand the required functionality
**Api docs**:
'''
from django.http import QueryDict
from django.utils import six
from django.utils.http import urlencode
from django_facebook import settings as facebook_settings
from open_facebook import exceptions as facebook_exceptions
from open_facebook.utils import json, encode_params, send_warning, memoized, \
stop_statsd, start_statsd
import logging
from django_facebook.utils import to_int
import ssl
import re
try:
# python 2 imports
from urlparse import urlparse
from urllib2 import build_opener, HTTPError, URLError
except ImportError:
# python 3 imports
from urllib.error import HTTPError, URLError
from urllib.parse import urlparse
from urllib.request import build_opener
logger = logging.getLogger(__name__)
# base timeout, actual timeout will increase when requests fail
REQUEST_TIMEOUT = 10
# two retries was too little, sometimes facebook is a bit flaky
REQUEST_ATTEMPTS = 3
class FacebookConnection(object):
'''
Shared utility class implementing the parsing
of Facebook API responses
'''
api_url = 'https://graph.facebook.com/'
# this older url is still used for fql requests
old_api_url = 'https://api.facebook.com/method/'
@classmethod
def request(cls, path='', post_data=None, old_api=False, **params):
'''
Main function for sending the request to facebook
**Example**::
FacebookConnection.request('me')
:param path:
The path to request, examples: /me/friends/, /me/likes/
:param post_data:
A dictionary of data to post
:param parms:
The get params to include
'''
api_base_url = cls.old_api_url if old_api else cls.api_url
if getattr(cls, 'access_token', None):
params['access_token'] = cls.access_token
url = '%s%s?%s' % (api_base_url, path, urlencode(params))
response = cls._request(url, post_data)
return response
@classmethod
def _request(cls, url, post_data=None, timeout=REQUEST_TIMEOUT,
attempts=REQUEST_ATTEMPTS):
# change fb__explicitly_shared to fb:explicitly_shared
if post_data:
post_data = dict(
(k.replace('__', ':'), v) for k, v in post_data.items())
logger.info('requesting url %s with post data %s', url, post_data)
post_request = (post_data is not None or 'method=post' in url)
if post_request and facebook_settings.FACEBOOK_READ_ONLY:
logger.info('running in readonly mode')
response = dict(id=123456789, setting_read_only=True)
return response
# nicely identify ourselves before sending the request
opener = build_opener()
opener.addheaders = [('User-agent', 'Open Facebook Python')]
# get the statsd path to track response times with
path = urlparse(url).path
statsd_path = path.replace('.', '_')
# give it a few shots, connection is buggy at times
timeout_mp = 0
while attempts:
# gradually increase the timeout upon failure
timeout_mp += 1
extended_timeout = timeout * timeout_mp
response_file = None
encoded_params = encode_params(post_data) if post_data else None
post_string = (urlencode(encoded_params).encode('utf-8')
if post_data else None)
try:
start_statsd('facebook.%s' % statsd_path)
try:
response_file = opener.open(
url, post_string, timeout=extended_timeout)
response = response_file.read().decode('utf8')
except (HTTPError,) as e:
response_file = e
response = response_file.read().decode('utf8')
# Facebook sents error codes for many of their flows
# we still want the json to allow for proper handling
msg_format = 'FB request, error type %s, code %s'
logger.warn(msg_format, type(e), getattr(e, 'code', None))
# detect if its a server or application error
server_error = cls.is_server_error(e, response)
if server_error:
# trigger a retry
raise URLError(
'Facebook is down %s' % response)
break
except (HTTPError, URLError, ssl.SSLError) as e:
# These are often temporary errors, so we will retry before
# failing
error_format = 'Facebook encountered a timeout (%ss) or error %s'
logger.warn(error_format, extended_timeout, str(e))
attempts -= 1
if not attempts:
# if we have no more attempts actually raise the error
error_instance = facebook_exceptions.convert_unreachable_exception(
e)
error_msg = 'Facebook request failed after several retries, raising error %s'
logger.warn(error_msg, error_instance)
raise error_instance
finally:
if response_file:
response_file.close()
stop_statsd('facebook.%s' % statsd_path)
# Faceboook response is either
# Valid json
# A string which is a querydict (a=b&c=d...etc)
# A html page stating FB is having trouble (but that shouldnt reach
# this part of the code)
try:
parsed_response = json.loads(response)
logger.info('facebook send response %s' % parsed_response)
except Exception as e:
# using exception because we need to support multiple json libs :S
parsed_response = QueryDict(response, True)
logger.info('facebook send response %s' % parsed_response)
if parsed_response and isinstance(parsed_response, dict):
# of course we have two different syntaxes
if parsed_response.get('error'):
cls.raise_error(parsed_response['error']['type'],
parsed_response['error']['message'],
parsed_response['error'].get('code'))
elif parsed_response.get('error_code'):
cls.raise_error(parsed_response['error_code'],
parsed_response['error_msg'])
return parsed_response
@classmethod
def is_server_error(cls, e, response):
'''
Checks an HTTPError to see if Facebook is down or we are using the
API in the wrong way
Facebook doesn't clearly distinquish between the two, so this is a bit
of a hack
'''
from open_facebook.utils import is_json
server_error = False
if hasattr(e, 'code') and e.code == 500:
server_error = True
# Facebook status codes are used for application logic
# http://fbdevwiki.com/wiki/Error_codes#User_Permission_Errors
# The only way I know to detect an actual server error is to check if
# it looks like their error page
# TODO: think of a better solution....
error_matchers = [
'<title>Facebook | Error</title>',
'Sorry, something went wrong.'
]
is_error_page = all(
[matcher in response for matcher in error_matchers])
if is_error_page:
server_error = True
# if it looks like json, facebook is probably not down
if is_json(response):
server_error = False
return server_error
@classmethod
def raise_error(cls, error_type, message, error_code=None):
'''
Lookup the best error class for the error and raise it
**Example**::
FacebookConnection.raise_error(10, 'OAuthException')
:param error_type:
the error type from the facebook api call
:param message:
the error message from the facebook api call
:param error_code:
optionally the error code which facebook send
'''
default_error_class = facebook_exceptions.OpenFacebookException
# get the error code
error_code = error_code or cls.get_code_from_message(message)
# also see http://fbdevwiki.com/wiki/Error_codes#User_Permission_Errors
logger.info('Trying to match error code %s to error class', error_code)
# lookup by error code takes precedence
error_class = cls.match_error_code(error_code)
# try to get error class by direct lookup
if not error_class:
if not isinstance(error_type, int):
error_class = getattr(facebook_exceptions, error_type, None)
if error_class and not issubclass(error_class, default_error_class):
error_class = None
# hack for missing parameters
if 'Missing' in message and 'parameter' in message:
error_class = facebook_exceptions.MissingParameter
# hack for Unsupported delete request
if 'Unsupported delete request' in message:
error_class = facebook_exceptions.UnsupportedDeleteRequest
# fallback to the default
if not error_class:
error_class = default_error_class
logger.info('Matched error to class %s', error_class)
error_message = message
if error_code:
# this is handy when adding new exceptions for facebook errors
error_message = u'%s (error code %s)' % (message, error_code)
raise error_class(error_message)
@classmethod
def get_code_from_message(cls, message):
# map error classes to facebook error codes
# find the error code
error_code = None
error_code_re = re.compile('\(#(\d+)\)')
matches = error_code_re.match(message)
matching_groups = matches.groups() if matches else None
if matching_groups:
error_code = to_int(matching_groups[0]) or None
return error_code
@classmethod
def get_sorted_exceptions(cls):
from open_facebook.exceptions import get_exception_classes
exception_classes = get_exception_classes()
exception_classes.sort(key=lambda e: e.range())
return exception_classes
@classmethod
def match_error_code(cls, error_code):
'''
Return the right exception class for the error code
'''
exception_classes = cls.get_sorted_exceptions()
error_class = None
for class_ in exception_classes:
codes_list = class_.codes_list()
# match the error class
matching_error_class = None
for code in codes_list:
if isinstance(code, tuple):
start, stop = code
if error_code and start <= error_code <= stop:
matching_error_class = class_
logger.info('Matched error on code %s', code)
elif isinstance(code, (int, six.integer_types)):
if int(code) == error_code:
matching_error_class = class_
logger.info('Matched error on code %s', code)
else:
raise(
ValueError, 'Dont know how to handle %s of '
'type %s' % (code, type(code)))
# tell about the happy news if we found something
if matching_error_class:
error_class = matching_error_class
break
return error_class
class FacebookAuthorization(FacebookConnection):
'''
Methods for getting us an access token
There are several flows we must support
* js authentication flow (signed cookie)
* facebook app authentication flow (signed cookie)
* facebook oauth redirect (code param in url)
These 3 options need to be converted to an access token
Also handles several testing scenarios
* get app access token
* create test user
* get_or_create_test_user
'''
@classmethod
def convert_code(cls, code,
redirect_uri='http://local.mellowmorning.com:8000/facebook/connect/'):
'''
Turns a code into an access token
**Example**::
FacebookAuthorization.convert_code(code)
:param code:
The code to convert
:param redirect_uri:
The redirect uri with which the code was requested
:returns: dict
'''
kwargs = cls._client_info()
kwargs['code'] = code
kwargs['redirect_uri'] = redirect_uri
response = cls.request('oauth/access_token', **kwargs)
return response
@classmethod
def extend_access_token(cls, access_token):
'''
https://developers.facebook.com/roadmap/offline-access-removal/
We can extend the token only once per day
Normal short lived tokens last 1-2 hours
Long lived tokens (given by extending) last 60 days
**Example**::
FacebookAuthorization.extend_access_token(access_token)
:param access_token:
The access_token to extend
:returns: dict
'''
kwargs = cls._client_info()
kwargs['grant_type'] = 'fb_exchange_token'
kwargs['fb_exchange_token'] = access_token
response = cls.request('oauth/access_token', **kwargs)
return response
@classmethod
def _client_info(cls):
kwargs = dict(client_id=facebook_settings.FACEBOOK_APP_ID)
kwargs['client_secret'] = facebook_settings.FACEBOOK_APP_SECRET
return kwargs
@classmethod
def parse_signed_data(cls, signed_request,
secret=facebook_settings.FACEBOOK_APP_SECRET):
'''
Thanks to
http://stackoverflow.com/questions/3302946/how-to-base64-url-decode-in-python
and
http://sunilarora.org/parsing-signedrequest-parameter-in-python-bas
'''
from open_facebook.utils import base64_url_decode_php_style, smart_str
l = signed_request.split('.', 2)
encoded_sig = l[0]
payload = l[1]
from open_facebook.utils import json
sig = base64_url_decode_php_style(encoded_sig)
import hmac
import hashlib
data = json.loads(base64_url_decode_php_style(payload).decode('utf-8'))
algo = data.get('algorithm').upper()
if algo != 'HMAC-SHA256':
error_format = 'Unknown algorithm we only support HMAC-SHA256 user asked for %s'
error_message = error_format % algo
send_warning(error_message)
logger.error('Unknown algorithm')
return None
else:
expected_sig = hmac.new(smart_str(secret), msg=smart_str(payload),
digestmod=hashlib.sha256).digest()
if not sig == expected_sig:
error_format = 'Signature %s didnt match the expected signature %s'
error_message = error_format % (sig, expected_sig)
send_warning(error_message)
return None
else:
logger.debug('valid signed request received..')
return data
@classmethod
def get_app_access_token(cls):
'''
Get the access_token for the app that can be used for
insights and creating test users
application_id = retrieved from the developer page
application_secret = retrieved from the developer page
returns the application access_token
'''
kwargs = {
'grant_type': 'client_credentials',
'client_id': facebook_settings.FACEBOOK_APP_ID,
'client_secret': facebook_settings.FACEBOOK_APP_SECRET,
}
response = cls.request('oauth/access_token', **kwargs)
return response['access_token']
@memoized
@classmethod
def get_cached_app_access_token(cls):
'''
Caches the access token in memory, good for speeding up testing
'''
app_access_token = cls.get_app_access_token()
return app_access_token
@classmethod
def create_test_user(cls, app_access_token, permissions=None, name=None):
'''
Creates a test user with the given permissions and name
:param app_access_token:
The application's access token
:param permissions:
The list of permissions to request for the test user
:param name:
Optionally specify the name
'''
if not permissions:
permissions = ['read_stream', 'publish_stream',
'user_photos,offline_access']
if isinstance(permissions, list):
permissions = ','.join(permissions)
default_name = 'Permissions %s' % permissions.replace(
',', ' ').replace('_', '')
name = name or default_name
kwargs = {
'access_token': app_access_token,
'installed': True,
'name': name,
'method': 'post',
'permissions': permissions,
}
path = '%s/accounts/test-users' % facebook_settings.FACEBOOK_APP_ID
# add the test user data to the test user data class
test_user_data = cls.request(path, **kwargs)
test_user_data['name'] = name
test_user = TestUser(test_user_data)
return test_user
@classmethod
def get_or_create_test_user(cls, app_access_token, name=None, permissions=None, force_create=False):
'''
There is no supported way of get or creating a test user
However
- creating a test user takes around 5s
- you an only create 500 test users
So this slows your testing flow quite a bit.
This method checks your test users
Queries their names (stores the permissions in the name)
'''
if not permissions:
permissions = ['read_stream', 'publish_stream', 'publish_actions',
'user_photos,offline_access']
if isinstance(permissions, list):
permissions = ','.join(permissions)
# hacking the permissions into the name of the test user
default_name = 'Permissions %s' % permissions.replace(
',', ' ').replace('_', '')
name = name or default_name
# retrieve all test users
test_users = cls.get_test_users(app_access_token)
user_id_dict = dict([(int(u['id']), u) for u in test_users])
user_ids = map(str, user_id_dict.keys())
# use fql to figure out their names
facebook = OpenFacebook(app_access_token)
users = facebook.fql('SELECT uid, name FROM user WHERE uid in (%s)' %
','.join(user_ids))
users_dict = dict([(u['name'], u['uid']) for u in users])
user_id = users_dict.get(name)
if force_create and user_id:
# we need the users access_token, the app access token doesn't
# always work, seems to be a bug in the Facebook api
test_user_data = user_id_dict[user_id]
cls.delete_test_user(test_user_data['access_token'], user_id)
user_id = None
if user_id:
# we found our user, extend the data a bit
test_user_data = user_id_dict[user_id]
test_user_data['name'] = name
test_user = TestUser(test_user_data)
else:
# create the user
test_user = cls.create_test_user(
app_access_token, permissions, name)
return test_user
@classmethod
def get_test_users(cls, app_access_token):
kwargs = dict(access_token=app_access_token)
path = '%s/accounts/test-users' % facebook_settings.FACEBOOK_APP_ID
# retrieve all test users
response = cls.request(path, **kwargs)
test_users = response['data']
return test_users
@classmethod
def delete_test_user(cls, app_access_token, test_user_id):
kwargs = dict(access_token=app_access_token, method='delete')
path = '%s/' % test_user_id
# retrieve all test users
response = cls.request(path, **kwargs)
return response
@classmethod
def delete_test_users(cls, app_access_token):
# retrieve all test users
test_users = cls.get_test_users(app_access_token)
test_user_ids = [u['id'] for u in test_users]
for test_user_id in test_user_ids:
cls.delete_test_user(app_access_token, test_user_id)
class OpenFacebook(FacebookConnection):
'''
The main api class, initialize using
**Example**::
graph = OpenFacebook(access_token)
print(graph.get('me'))
'''
def __init__(self, access_token=None, prefetched_data=None,
expires=None, current_user_id=None, version=None):
'''
:param access_token:
The facebook Access token
'''
self.access_token = access_token
# extra data coming from signed cookies
self.prefetched_data = prefetched_data
# store to enable detection for offline usage
self.expires = expires
# hook to store the current user id if representing the
# facebook connection to a logged in user :)
self.current_user_id = current_user_id
if version is None:
version = 'v1.0'
self.version = version
def __getstate__(self):
'''
Turns the object into something easy to serialize
'''
state = dict(
access_token=self.access_token,
prefetched_data=self.prefetched_data,
expires=self.expires,
)
return state
def __setstate__(self, state):
'''
Restores the object from the state dict
'''
self.access_token = state['access_token']
self.prefetched_data = state['prefetched_data']
self.expires = state['expires']
def is_authenticated(self):
'''
Ask facebook if we have access to the users data
:returns: bool
'''
try:
me = self.me()
except facebook_exceptions.OpenFacebookException as e:
if isinstance(e, facebook_exceptions.OAuthException):
raise
me = None
authenticated = bool(me)
return authenticated
def get(self, path, version=None, **kwargs):
'''
Make a Facebook API call
**Example**::
open_facebook.get('me')
open_facebook.get('me', fields='id,name')
:param path:
The path to use for making the API call
:returns: dict
'''
version = version or self.version
kwargs['version'] = version
response = self.request(path, **kwargs)
return response
def get_many(self, *ids, **kwargs):
'''
Make a batched Facebook API call
For multiple ids
**Example**::
open_facebook.get_many('me', 'starbucks')
open_facebook.get_many('me', 'starbucks', fields='id,name')
:param path:
The path to use for making the API call
:returns: dict
'''
kwargs['ids'] = ','.join(ids)
return self.request(**kwargs)
def set(self, path, params=None, version=None, **post_data):
'''
Write data to facebook
**Example**::
open_facebook.set('me/feed', message='testing open facebook')
:param path:
The path to use for making the API call
:param params:
A dictionary of get params
:param post_data:
The kwargs for posting to facebook
:returns: dict
'''
version = version or self.version
assert self.access_token, 'Write operations require an access token'
if not params:
params = {}
params['method'] = 'post'
params['version'] = version
response = self.request(path, post_data=post_data, **params)
return response
def delete(self, path, *args, **kwargs):
'''
Delete the given bit of data
**Example**::
graph.delete(12345)
:param path:
the id of the element to remove
'''
kwargs['method'] = 'delete'
self.request(path, *args, **kwargs)
def fql(self, query, **kwargs):
'''
Runs the specified query against the Facebook FQL API.
**Example**::
open_facebook.fql('SELECT name FROM user WHERE uid = me()')
:param query:
The query to execute
:param kwargs:
Extra options to send to facebook
:returns: dict
'''
kwargs['q'] = query
path = 'fql'
response = self.request(path, **kwargs)
# return only the data for backward compatability
return response['data']
def batch_fql(self, queries_dict):
'''
queries_dict a dict with the required queries
returns the query results in:
**Example**::
response = facebook.batch_fql({
name: 'SELECT uid, name, pic_square FROM user WHERE uid = me()',
rsvp: 'SELECT uid, rsvp_status FROM event_member WHERE eid=12345678',
})
# accessing the results
response['fql_results']['name']
response['fql_results']['rsvp']
:param queries_dict:
A dictiontary of queries to execute
:returns: dict
'''
query = json.dumps(queries_dict)
query_results = self.fql(query)
named_results = dict(
[(r['name'], r['fql_result_set']) for r in query_results])
return named_results
def me(self):
'''
Cached method of requesting information about me
'''
me = getattr(self, '_me', None)
if me is None:
# self._me = me = self.get('me')
self._me = me = self.get('me', fields="id,name,email,verified")
return me
def permissions(self):
'''
Shortcut for self.get('me/permissions') with some extra parsing
to turn it into a dictionary of booleans
:returns: dict
'''
permissions_dict = {}
try:
permissions = {}
permissions_response = self.get('me/permissions')
# determine whether we're dealing with 1.0 or 2.0+
for permission in permissions_response.get('data', []):
# graph api 2.0+, returns multiple dicts with keys 'status' and
# 'permission'
if any(value in ['granted', 'declined'] for value in permission.values()):
for perm in permissions_response['data']:
grant = perm.get('status') == 'granted'
name = perm.get('permission')
# just in case something goes sideways
if grant and name:
permissions_dict[name] = grant
# graph api 1.0, returns single dict as {permission: intval}
elif any(value in [0, 1, '0', '1'] for value in permission.values()):
permissions = permissions_response['data'][0]
permissions_dict = dict([(k, bool(int(v)))
for k, v in permissions.items()
if v == '1' or v == 1])
break
except facebook_exceptions.OAuthException:
pass
return permissions_dict
def has_permissions(self, required_permissions):
'''
Validate if all the required_permissions are currently given
by the user
**Example**::
open_facebook.has_permissions(['publish_actions','read_stream'])
:param required_permissions:
A list of required permissions
:returns: bool
'''
permissions_dict = self.permissions()
# see if we have all permissions
has_permissions = True
for permission in required_permissions:
if permission not in permissions_dict:
has_permissions = False
return has_permissions
def my_image_url(self, size='large'):
'''
Returns the image url from your profile
Shortcut for me/picture
:param size:
the type of the image to request, see facebook for available formats
:returns: string
'''
query_dict = QueryDict('', True)
query_dict['type'] = size
query_dict['access_token'] = self.access_token
url = '%sme/picture?%s' % (self.api_url, query_dict.urlencode())
return url
def request(self, path='', post_data=None, old_api=False, version=None, **params):
url = self.get_request_url(path=path, old_api=old_api, version=version,
**params)
logger.info('requesting url %s', url)
response = self._request(url, post_data)
return response
def get_request_url(self, path='', old_api=False, version=None, **params):
'''
Gets the url for the request.
'''
api_base_url = self.old_api_url if old_api else self.api_url
version = version or self.version
if getattr(self, 'access_token', None):
params['access_token'] = self.access_token
if api_base_url.endswith('/'):
api_base_url = api_base_url[:-1]
if path and path.startswith('/'):
path = path[1:]
url = '/'.join([api_base_url, version, path])
return '%s?%s' % (url, urlencode(params))
class TestUser(object):
'''
Simple wrapper around test users
'''
def __init__(self, data):
self.name = data['name']
self.id = data['id']
self.access_token = data['access_token']
self.data = data
def graph(self):
graph = OpenFacebook(self.access_token)
return graph
def __repr__(self):
return 'Test user %s' % self.name
| bsd-3-clause | 7,857,979,562,056,322,000 | 32.741007 | 105 | 0.579775 | false |
Kromey/roglick | roglick/dungeon/maps/conway_dungeon.py | 1 | 6436 | from roglick.dungeon.base import Map,Room,Tile
from roglick.dungeon import tiles
from roglick.lib import libtcod
class Cave(object):
def __init__(self):
self.cells = []
@property
def center(self):
if 0 >= self.size:
return None
sum_x = 0
sum_y = 0
for cell in self.cells:
sum_x += cell[0]
sum_y += cell[1]
return (int(sum_x/self.size), int(sum_y/self.size))
@property
def size(self):
return len(self.cells)
class ConwayDungeon(Map):
def __init__(self, width, height, random, *args, **kwargs):
super().__init__(width, height, random)
self.make_map(*args, **kwargs)
def make_map(self, open_prob=45, close_neighbors=4, visits=1, smoothing_passes=2, cave_min_size=10):
super().make_map()
self._caves = []
self._open_cells(open_prob)
for x in range(int(visits * self.width * self.height)):
self._visit_random_cell(close_neighbors)
self._smooth_cave(smoothing_passes)
self._find_caves(cave_min_size)
self._connect_caves()
def _open_cells(self, open_prob):
for x in range(self.width):
if x == 0 or x == self.width-1:
continue
for y in range(self.height):
if y == 0 or y == self.height-1:
continue
if self._random.get_int(0,100) < open_prob:
self.tiles[x][y] = Tile(**tiles.floor)
def _visit_random_cell(self, close_neighbors):
x = self._random.get_int(1, self.width-2)
y = self._random.get_int(1, self.height-2)
neighbors = self._count_neighbors(x, y)
if neighbors > close_neighbors:
self.tiles[x][y] = Tile(**tiles.wall)
else:
self.tiles[x][y] = Tile(**tiles.floor)
def _smooth_cave(self, smoothing_passes):
for i in range(smoothing_passes):
for x in range(self.width):
if x == 0 or x == self.width-1:
continue
for y in range(self.height):
if y == 0 or y == self.height-1:
continue
if not self.tiles[x][y].passable and self._count_orth_neighbors(x, y) <= 1:
self.tiles[x][y] = Tile(**tiles.floor)
for i in range(smoothing_passes):
for x in range(self.width):
if x == 0 or x == self.width-1:
continue
for y in range(self.height):
if y == 0 or y == self.height-1:
continue
if self.tiles[x][y].passable and self._count_orth_neighbors(x, y) >= 4:
self.tiles[x][y] = Tile(**tiles.wall)
def _find_caves(self, cave_min_size):
visited = [[False for y in range(self.height)]
for x in range(self.width)]
for x in range(self.width):
for y in range(self.height):
if visited[x][y]:
continue
if self.tiles[x][y].passable:
cave = self._map_cave(visited, x, y)
if cave.size < cave_min_size:
# Too small, fill it in
for cx,cy in cave.cells:
self.tiles[cx][cy] = Tile(**tiles.wall)
else:
self._caves.append(cave)
else:
visited[x][y] = True
def _connect_caves(self):
while len(self._caves) > 1:
cave = self._caves.pop()
cx,cy = cave.center
# Find the "nearest" cave to this one by comparing centers
other = self._caves[0]
ox,oy = other.center
d2 = self.distance_squared(cx, cy, ox, oy)
for i in range(1, len(self._caves)):
ox,oy = self._caves[i].center
i_d2 = self.distance_squared(cx, cy, ox, oy)
if i_d2 < d2:
other = self._caves[i]
d2 = i_d2
# Found nearest cave, find nearest points
# Not necessarily actually nearest two points; first we find the
# point in one cave nearest the other's center, then the point in
# the other nearest to that one.
x1,y1 = cave.center
x2,y2 = other.center
d2 = self.distance_squared(x1, y1, x2, y2)
for i_x2,i_y2 in other.cells:
i_d2 = self.distance_squared(x1, y1, i_x2, i_y2)
if i_d2 < d2:
x2,y2 = i_x2,i_y2
d2 = i_d2
for i_x1,i_y1 in cave.cells:
i_d2 = self.distance_squared(i_x1, i_y1, x2, y2)
if i_d2 < d2:
x1,y1 = i_x1,i_y1
d2 = i_d2
self.create_tunnel(x1, y1, x2, y2)
def _count_neighbors(self, x, y):
neighbors = 0
for dx in range(-1,2):
for dy in range(-1,2):
if dx or dy:
tx = x+dx
ty = y+dy
if tx >=0 and tx < self.width and ty >= 0 and ty < self.height:
if not self.tiles[tx][ty].passable:
neighbors += 1
return neighbors
def _count_orth_neighbors(self, x, y):
neighbors = 0
for dx in range(-1,2):
if dx:
tx = x+dx
ty = y
if tx >=0 and tx < self.width and ty >= 0 and ty < self.height:
if not self.tiles[tx][ty].passable:
neighbors += 1
for dy in range(-1,2):
if dy:
tx = x
ty = y+dy
if tx >=0 and tx < self.width and ty >= 0 and ty < self.height:
if not self.tiles[tx][ty].passable:
neighbors += 1
return neighbors
def _map_cave(self, visited, x, y):
if visited[x][y] or not self.tiles[x][y].passable:
raise ValueError("Cell ({x},{y}) not valid start for cave".format(
x=x, y=y))
cave = Cave()
for cell in self.flood_fill(x, y):
cave.cells.append(cell)
x,y = cell
visited[x][y] = True
return cave
| mit | -6,934,859,963,298,470,000 | 32.175258 | 104 | 0.470323 | false |
tumbl3w33d/ansible | lib/ansible/modules/network/aci/aci_rest.py | 13 | 14126 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_rest
short_description: Direct access to the Cisco APIC REST API
description:
- Enables the management of the Cisco ACI fabric through direct access to the Cisco APIC REST API.
- Thanks to the idempotent nature of the APIC, this module is idempotent and reports changes.
version_added: '2.4'
requirements:
- lxml (when using XML payload)
- xmljson >= 0.1.8 (when using XML payload)
- python 2.7+ (when using xmljson)
options:
method:
description:
- The HTTP method of the request.
- Using C(delete) is typically used for deleting objects.
- Using C(get) is typically used for querying objects.
- Using C(post) is typically used for modifying objects.
type: str
choices: [ delete, get, post ]
default: get
aliases: [ action ]
path:
description:
- URI being used to execute API calls.
- Must end in C(.xml) or C(.json).
type: str
required: yes
aliases: [ uri ]
content:
description:
- When used instead of C(src), sets the payload of the API request directly.
- This may be convenient to template simple requests.
- For anything complex use the C(template) lookup plugin (see examples)
or the M(template) module with parameter C(src).
type: raw
src:
description:
- Name of the absolute path of the filename that includes the body
of the HTTP request being sent to the ACI fabric.
- If you require a templated payload, use the C(content) parameter
together with the C(template) lookup plugin, or use M(template).
type: path
aliases: [ config_file ]
extends_documentation_fragment: aci
notes:
- Certain payloads are known not to be idempotent, so be careful when constructing payloads,
e.g. using C(status="created") will cause idempotency issues, use C(status="modified") instead.
More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- Certain payloads (and used paths) are known to report no changes happened when changes did happen.
This is a known APIC problem and has been reported to the vendor. A workaround for this issue exists.
More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- XML payloads require the C(lxml) and C(xmljson) python libraries. For JSON payloads nothing special is needed.
seealso:
- module: aci_tenant
- name: Cisco APIC REST API Configuration Guide
description: More information about the APIC REST API.
link: http://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/2-x/rest_cfg/2_1_x/b_Cisco_APIC_REST_API_Configuration_Guide.html
author:
- Dag Wieers (@dagwieers)
'''
EXAMPLES = r'''
- name: Add a tenant using certificate authentication
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/aci_config.xml
delegate_to: localhost
- name: Add a tenant from a templated payload file from templates/
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
content: "{{ lookup('template', 'aci/tenant.xml.j2') }}"
delegate_to: localhost
- name: Add a tenant using inline YAML
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/mo/uni.json
method: post
content:
fvTenant:
attributes:
name: Sales
descr: Sales department
delegate_to: localhost
- name: Add a tenant using a JSON string
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/mo/uni.json
method: post
content:
{
"fvTenant": {
"attributes": {
"name": "Sales",
"descr": "Sales department"
}
}
}
delegate_to: localhost
- name: Add a tenant using an XML string
aci_rest:
host: apic
username: admin
private_key: pki/{{ aci_username }}.key
validate_certs: no
path: /api/mo/uni.xml
method: post
content: '<fvTenant name="Sales" descr="Sales departement"/>'
delegate_to: localhost
- name: Get tenants using password authentication
aci_rest:
host: apic
username: admin
password: SomeSecretPassword
method: get
path: /api/node/class/fvTenant.json
delegate_to: localhost
register: query_result
- name: Configure contracts
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/contract_config.xml
delegate_to: localhost
- name: Register leaves and spines
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
method: post
path: /api/mo/uni/controller/nodeidentpol.xml
content: |
<fabricNodeIdentPol>
<fabricNodeIdentP name="{{ item.name }}" nodeId="{{ item.nodeid }}" status="{{ item.status }}" serial="{{ item.serial }}"/>
</fabricNodeIdentPol>
with_items:
- '{{ apic_leavesspines }}'
delegate_to: localhost
- name: Wait for all controllers to become ready
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/node/class/topSystem.json?query-target-filter=eq(topSystem.role,"controller")
register: apics
until: "'totalCount' in apics and apics.totalCount|int >= groups['apic']|count"
retries: 120
delay: 30
delegate_to: localhost
run_once: yes
'''
RETURN = r'''
error_code:
description: The REST ACI return code, useful for troubleshooting on failure
returned: always
type: int
sample: 122
error_text:
description: The REST ACI descriptive text, useful for troubleshooting on failure
returned: always
type: str
sample: unknown managed object class foo
imdata:
description: Converted output returned by the APIC REST (register this for post-processing)
returned: always
type: str
sample: [{"error": {"attributes": {"code": "122", "text": "unknown managed object class foo"}}}]
payload:
description: The (templated) payload send to the APIC REST API (xml or json)
returned: always
type: str
sample: '<foo bar="boo"/>'
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
response:
description: HTTP response string
returned: always
type: str
sample: 'HTTP Error 400: Bad Request'
status:
description: HTTP status code
returned: always
type: int
sample: 400
totalCount:
description: Number of items in the imdata array
returned: always
type: str
sample: '0'
url:
description: URL used for APIC REST call
returned: success
type: str
sample: https://1.2.3.4/api/mo/uni/tn-[Dag].json?rsp-subtree=modified
'''
import json
import os
try:
from ansible.module_utils.six.moves.urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
HAS_URLPARSE = True
except Exception:
HAS_URLPARSE = False
# Optional, only used for XML payload
try:
import lxml.etree # noqa
HAS_LXML_ETREE = True
except ImportError:
HAS_LXML_ETREE = False
# Optional, only used for XML payload
try:
from xmljson import cobra # noqa
HAS_XMLJSON_COBRA = True
except ImportError:
HAS_XMLJSON_COBRA = False
# Optional, only used for YAML validation
try:
import yaml
HAS_YAML = True
except Exception:
HAS_YAML = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_text
def update_qsl(url, params):
''' Add or update a URL query string '''
if HAS_URLPARSE:
url_parts = list(urlparse(url))
query = dict(parse_qsl(url_parts[4]))
query.update(params)
url_parts[4] = urlencode(query)
return urlunparse(url_parts)
elif '?' in url:
return url + '&' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
else:
return url + '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
class ACIRESTModule(ACIModule):
def changed(self, d):
''' Check ACI response for changes '''
if isinstance(d, dict):
for k, v in d.items():
if k == 'status' and v in ('created', 'modified', 'deleted'):
return True
elif self.changed(v) is True:
return True
elif isinstance(d, list):
for i in d:
if self.changed(i) is True:
return True
return False
def response_type(self, rawoutput, rest_type='xml'):
''' Handle APIC response output '''
if rest_type == 'json':
self.response_json(rawoutput)
else:
self.response_xml(rawoutput)
# Use APICs built-in idempotency
if HAS_URLPARSE:
self.result['changed'] = self.changed(self.imdata)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
path=dict(type='str', required=True, aliases=['uri']),
method=dict(type='str', default='get', choices=['delete', 'get', 'post'], aliases=['action']),
src=dict(type='path', aliases=['config_file']),
content=dict(type='raw'),
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['content', 'src']],
)
content = module.params.get('content')
path = module.params.get('path')
src = module.params.get('src')
# Report missing file
file_exists = False
if src:
if os.path.isfile(src):
file_exists = True
else:
module.fail_json(msg="Cannot find/access src '%s'" % src)
# Find request type
if path.find('.xml') != -1:
rest_type = 'xml'
if not HAS_LXML_ETREE:
module.fail_json(msg='The lxml python library is missing, or lacks etree support.')
if not HAS_XMLJSON_COBRA:
module.fail_json(msg='The xmljson python library is missing, or lacks cobra support.')
elif path.find('.json') != -1:
rest_type = 'json'
else:
module.fail_json(msg='Failed to find REST API payload type (neither .xml nor .json).')
aci = ACIRESTModule(module)
aci.result['status'] = -1 # Ensure we always return a status
# We include the payload as it may be templated
payload = content
if file_exists:
with open(src, 'r') as config_object:
# TODO: Would be nice to template this, requires action-plugin
payload = config_object.read()
# Validate payload
if rest_type == 'json':
if content and isinstance(content, dict):
# Validate inline YAML/JSON
payload = json.dumps(payload)
elif payload and isinstance(payload, str) and HAS_YAML:
try:
# Validate YAML/JSON string
payload = json.dumps(yaml.safe_load(payload))
except Exception as e:
module.fail_json(msg='Failed to parse provided JSON/YAML payload: %s' % to_text(e), exception=to_text(e), payload=payload)
elif rest_type == 'xml' and HAS_LXML_ETREE:
if content and isinstance(content, dict) and HAS_XMLJSON_COBRA:
# Validate inline YAML/JSON
# FIXME: Converting from a dictionary to XML is unsupported at this time
# payload = etree.tostring(payload)
pass
elif payload and isinstance(payload, str):
try:
# Validate XML string
payload = lxml.etree.tostring(lxml.etree.fromstring(payload))
except Exception as e:
module.fail_json(msg='Failed to parse provided XML payload: %s' % to_text(e), payload=payload)
# Perform actual request using auth cookie (Same as aci.request(), but also supports XML)
if 'port' in aci.params and aci.params.get('port') is not None:
aci.url = '%(protocol)s://%(host)s:%(port)s/' % aci.params + path.lstrip('/')
else:
aci.url = '%(protocol)s://%(host)s/' % aci.params + path.lstrip('/')
if aci.params.get('method') != 'get':
path += '?rsp-subtree=modified'
aci.url = update_qsl(aci.url, {'rsp-subtree': 'modified'})
# Sign and encode request as to APIC's wishes
if aci.params.get('private_key') is not None:
aci.cert_auth(path=path, payload=payload)
aci.method = aci.params.get('method').upper()
# Perform request
resp, info = fetch_url(module, aci.url,
data=payload,
headers=aci.headers,
method=aci.method,
timeout=aci.params.get('timeout'),
use_proxy=aci.params.get('use_proxy'))
aci.response = info.get('msg')
aci.status = info.get('status')
# Report failure
if info.get('status') != 200:
try:
# APIC error
aci.response_type(info.get('body'), rest_type)
aci.fail_json(msg='APIC Error %(code)s: %(text)s' % aci.error)
except KeyError:
# Connection error
aci.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
aci.response_type(resp.read(), rest_type)
aci.result['imdata'] = aci.imdata
aci.result['totalCount'] = aci.totalCount
# Report success
aci.exit_json(**aci.result)
if __name__ == '__main__':
main()
| gpl-3.0 | 6,615,357,320,688,893,000 | 30.959276 | 142 | 0.637973 | false |
hengqujushi/shadowsocks | shadowsocks/asyncdns.py | 655 | 17416 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_IPV4 = 0
STATUS_IPV6 = 1
class DNSResolver(object):
def __init__(self, server_list=None):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if line:
if line.startswith(b'nameserver'):
parts = line.split()
if len(parts) >= 2:
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) >= 2:
ip = parts[0]
if common.is_ip(ip):
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_IPV6) \
== STATUS_IPV4:
self._hostname_status[hostname] = STATUS_IPV6
self._send_req(hostname, QTYPE_AAAA)
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) == STATUS_IPV6:
for question in response.questions:
if question[1] == QTYPE_AAAA:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_IPV4
self._send_req(hostname, QTYPE_A)
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, QTYPE_A)
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&[email protected]', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
| apache-2.0 | 3,827,486,546,087,732,700 | 34.983471 | 79 | 0.478525 | false |
sanjayankur31/nest-simulator | pynest/examples/balancedneuron.py | 8 | 7344 | # -*- coding: utf-8 -*-
#
# balancedneuron.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Balanced neuron example
-----------------------
This script simulates a neuron driven by an excitatory and an
inhibitory population of neurons firing Poisson spike trains. The aim
is to find a firing rate for the inhibitory population that will make
the neuron fire at the same rate as the excitatory population.
Optimization is performed using the ``bisection`` method from Scipy,
simulating the network repeatedly.
This example is also shown in the article [1]_
References
~~~~~~~~~~
.. [1] Eppler JM, Helias M, Mulller E, Diesmann M, Gewaltig MO (2009). PyNEST: A convenient interface to the NEST
simulator, Front. Neuroinform.
http://dx.doi.org/10.3389/neuro.11.012.2008
"""
###############################################################################
# First, we import all necessary modules for simulation, analysis and
# plotting. Scipy should be imported before nest.
from scipy.optimize import bisect
import nest
import nest.voltage_trace
import matplotlib.pyplot as plt
###############################################################################
# Additionally, we set the verbosity using ``set_verbosity`` to
# suppress info messages.
nest.set_verbosity("M_WARNING")
nest.ResetKernel()
###############################################################################
# Second, the simulation parameters are assigned to variables.
t_sim = 25000.0 # how long we simulate
n_ex = 16000 # size of the excitatory population
n_in = 4000 # size of the inhibitory population
r_ex = 5.0 # mean rate of the excitatory population
r_in = 20.5 # initial rate of the inhibitory population
epsc = 45.0 # peak amplitude of excitatory synaptic currents
ipsc = -45.0 # peak amplitude of inhibitory synaptic currents
d = 1.0 # synaptic delay
lower = 15.0 # lower bound of the search interval
upper = 25.0 # upper bound of the search interval
prec = 0.01 # how close need the excitatory rates be
###############################################################################
# Third, the nodes are created using ``Create``. We store the returned
# handles in variables for later reference.
neuron = nest.Create("iaf_psc_alpha")
noise = nest.Create("poisson_generator", 2)
voltmeter = nest.Create("voltmeter")
spikerecorder = nest.Create("spike_recorder")
###################################################################################
# Fourth, the ``poisson_generator`` (`noise`) is configured.
# Note that we need not set parameters for the neuron, the spike recorder, and
# the voltmeter, since they have satisfactory defaults.
noise.rate = [n_ex * r_ex, n_in * r_in]
###############################################################################
# Fifth, the ``iaf_psc_alpha`` is connected to the ``spike_recorder`` and the
# ``voltmeter``, as are the two Poisson generators to the neuron. The command
# ``Connect`` has different variants. Plain `Connect` just takes the handles of
# pre- and postsynaptic nodes and uses the default values for weight and
# delay. It can also be called with a list of weights, as in the connection
# of the noise below.
# Note that the connection direction for the ``voltmeter`` is reversed compared
# to the ``spike_recorder``, because it observes the neuron instead of
# receiving events from it. Thus, ``Connect`` reflects the direction of signal
# flow in the simulation kernel rather than the physical process of inserting
# an electrode into the neuron. The latter semantics is presently not
# available in NEST.
nest.Connect(neuron, spikerecorder)
nest.Connect(voltmeter, neuron)
nest.Connect(noise, neuron, syn_spec={'weight': [[epsc, ipsc]], 'delay': 1.0})
###############################################################################
# To determine the optimal rate of the neurons in the inhibitory population,
# the network is simulated several times for different values of the
# inhibitory rate while measuring the rate of the target neuron. This is done
# by calling ``Simulate`` until the rate of the target neuron matches the rate
# of the neurons in the excitatory population with a certain accuracy. The
# algorithm is implemented in two steps:
#
# First, the function ``output_rate`` is defined to measure the firing rate
# of the target neuron for a given rate of the inhibitory neurons.
def output_rate(guess):
print("Inhibitory rate estimate: %5.2f Hz" % guess)
rate = float(abs(n_in * guess))
noise[1].rate = rate
spikerecorder.n_events = 0
nest.Simulate(t_sim)
out = spikerecorder.n_events * 1000.0 / t_sim
print(" -> Neuron rate: %6.2f Hz (goal: %4.2f Hz)" % (out, r_ex))
return out
###############################################################################
# The function takes the firing rate of the inhibitory neurons as an
# argument. It scales the rate with the size of the inhibitory population and
# configures the inhibitory Poisson generator (`noise[1]`) accordingly.
# Then, the spike counter of the ``spike_recorder`` is reset to zero. The
# network is simulated using ``Simulate``, which takes the desired simulation
# time in milliseconds and advances the network state by this amount of time.
# During simulation, the ``spike_recorder`` counts the spikes of the target
# neuron and the total number is read out at the end of the simulation
# period. The return value of ``output_rate()`` is the firing rate of the
# target neuron in Hz.
#
# Second, the scipy function ``bisect`` is used to determine the optimal
# firing rate of the neurons of the inhibitory population.
in_rate = bisect(lambda x: output_rate(x) - r_ex, lower, upper, xtol=prec)
print("Optimal rate for the inhibitory population: %.2f Hz" % in_rate)
###############################################################################
# The function ``bisect`` takes four arguments: first a function whose
# zero crossing is to be determined. Here, the firing rate of the target
# neuron should equal the firing rate of the neurons of the excitatory
# population. Thus we define an anonymous function (using `lambda`) that
# returns the difference between the actual rate of the target neuron and the
# rate of the excitatory Poisson generator, given a rate for the inhibitory
# neurons. The next two arguments are the lower and upper bound of the
# interval in which to search for the zero crossing. The fourth argument of
# ``bisect`` is the desired relative precision of the zero crossing.
#
# Finally, we plot the target neuron's membrane potential as a function of
# time.
nest.voltage_trace.from_device(voltmeter)
plt.show()
| gpl-2.0 | -4,636,725,491,730,874,000 | 42.2 | 113 | 0.665986 | false |
memkeytm/p2pool | p2pool/test/test_node.py | 198 | 10503 | from __future__ import division
import base64
import random
import tempfile
from twisted.internet import defer, reactor
from twisted.python import failure
from twisted.trial import unittest
from twisted.web import client, resource, server
from p2pool import data, node, work
from p2pool.bitcoin import data as bitcoin_data, networks, worker_interface
from p2pool.util import deferral, jsonrpc, math, variable
class bitcoind(object): # can be used as p2p factory, p2p protocol, or rpc jsonrpc proxy
def __init__(self):
self.blocks = [0x000000000000016c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89]
self.headers = {0x16c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89: {
'nonce': 1853158954,
'timestamp': 1351658517,
'merkle_root': 2282849479936278423916707524932131168473430114569971665822757638339486597658L,
'version': 1,
'previous_block': 1048610514577342396345362905164852351970507722694242579238530L,
'bits': bitcoin_data.FloatingInteger(bits=0x1a0513c5, target=0x513c50000000000000000000000000000000000000000000000L),
}}
self.conn = variable.Variable(self)
self.new_headers = variable.Event()
self.new_block = variable.Event()
self.new_tx = variable.Event()
# p2p factory
def getProtocol(self):
return self
# p2p protocol
def send_block(self, block):
pass
def send_tx(self, tx):
pass
def get_block_header(self, block_hash):
return self.headers[block_hash]
# rpc jsonrpc proxy
def rpc_help(self):
return '\ngetblock '
def rpc_getblock(self, block_hash_hex):
block_hash = int(block_hash_hex, 16)
return dict(height=self.blocks.index(block_hash))
def __getattr__(self, name):
if name.startswith('rpc_'):
return lambda *args, **kwargs: failure.Failure(jsonrpc.Error_for_code(-32601)('Method not found'))
def rpc_getblocktemplate(self, param):
if param['mode'] == 'template':
pass
elif param['mode'] == 'submit':
result = param['data']
block = bitcoin_data.block_type.unpack(result.decode('hex'))
if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum(tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000:
print 'invalid fee'
if block['header']['previous_block'] != self.blocks[-1]:
return False
if bitcoin_data.hash256(result.decode('hex')) > block['header']['bits'].target:
return False
header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
self.blocks.append(header_hash)
self.headers[header_hash] = block['header']
reactor.callLater(0, self.new_block.happened)
return True
else:
raise jsonrpc.Error_for_code(-1)('invalid request')
txs = []
for i in xrange(100):
fee = i
txs.append(dict(
data=bitcoin_data.tx_type.pack(dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!'*100)], lock_time=0)).encode('hex'),
fee=fee,
))
return {
"version" : 2,
"previousblockhash" : '%064x' % (self.blocks[-1],),
"transactions" : txs,
"coinbaseaux" : {
"flags" : "062f503253482f"
},
"coinbasevalue" : 5000000000 + sum(tx['fee'] for tx in txs),
"target" : "0000000000000513c50000000000000000000000000000000000000000000000",
"mintime" : 1351655621,
"mutable" : [
"time",
"transactions",
"prevblock"
],
"noncerange" : "00000000ffffffff",
"sigoplimit" : 20000,
"sizelimit" : 1000000,
"curtime" : 1351659940,
"bits" : "21008000",
"height" : len(self.blocks),
}
@apply
class mm_provider(object):
def __getattr__(self, name):
print '>>>>>>>', name
def rpc_getauxblock(self, request, result1=None, result2=None):
if result1 is not None:
print result1, result2
return True
return {
"target" : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", # 2**256*2/3
"hash" : "2756ea0315d46dc3d8d974f34380873fc88863845ac01a658ef11bc3b368af52",
"chainid" : 1
}
mynet = math.Object(
NAME='mynet',
PARENT=networks.nets['litecoin_testnet'],
SHARE_PERIOD=5, # seconds
CHAIN_LENGTH=20*60//3, # shares
REAL_CHAIN_LENGTH=20*60//3, # shares
TARGET_LOOKBEHIND=200, # shares
SPREAD=3, # blocks
IDENTIFIER='cca5e24ec6408b1e'.decode('hex'),
PREFIX='ad9614f6466a39cf'.decode('hex'),
P2P_PORT=19338,
MIN_TARGET=2**256 - 1,
MAX_TARGET=2**256 - 1,
PERSIST=False,
WORKER_PORT=19327,
BOOTSTRAP_ADDRS='72.14.191.28'.split(' '),
ANNOUNCE_CHANNEL='#p2pool-alt',
VERSION_CHECK=lambda v: True,
)
class MiniNode(object):
@classmethod
@defer.inlineCallbacks
def start(cls, net, factory, bitcoind, peer_ports, merged_urls):
self = cls()
self.n = node.Node(factory, bitcoind, [], [], net)
yield self.n.start()
self.n.p2p_node = node.P2PNode(self.n, port=0, max_incoming_conns=1000000, addr_store={}, connect_addrs=[('127.0.0.1', peer_port) for peer_port in peer_ports])
self.n.p2p_node.start()
wb = work.WorkerBridge(node=self.n, my_pubkey_hash=random.randrange(2**160), donation_percentage=random.uniform(0, 10), merged_urls=merged_urls, worker_fee=3)
self.wb = wb
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
self.web_port = reactor.listenTCP(0, server.Site(web_root))
defer.returnValue(self)
@defer.inlineCallbacks
def stop(self):
yield self.web_port.stopListening()
yield self.n.p2p_node.stop()
yield self.n.stop()
del self.web_port, self.n
class Test(unittest.TestCase):
@defer.inlineCallbacks
def test_node(self):
bitd = bitcoind()
mm_root = resource.Resource()
mm_root.putChild('', jsonrpc.HTTPServer(mm_provider))
mm_port = reactor.listenTCP(0, server.Site(mm_root))
n = node.Node(bitd, bitd, [], [], mynet)
yield n.start()
wb = work.WorkerBridge(node=n, my_pubkey_hash=42, donation_percentage=2, merged_urls=[('http://127.0.0.1:%i' % (mm_port.getHost().port,), '')], worker_fee=3)
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
port = reactor.listenTCP(0, server.Site(web_root))
proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(port.getHost().port),
headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password')))
yield deferral.sleep(3)
for i in xrange(100):
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
yield deferral.sleep(3)
assert len(n.tracker.items) == 100
assert n.tracker.verified.get_height(n.best_share_var.value) == 100
wb.stop()
n.stop()
yield port.stopListening()
del n, wb, web_root, port, proxy
import gc
gc.collect()
gc.collect()
gc.collect()
yield deferral.sleep(20) # waiting for work_poller to exit
yield mm_port.stopListening()
#test_node.timeout = 15
@defer.inlineCallbacks
def test_nodes(self):
N = 3
SHARES = 600
bitd = bitcoind()
nodes = []
for i in xrange(N):
nodes.append((yield MiniNode.start(mynet, bitd, bitd, [mn.n.p2p_node.serverfactory.listen_port.getHost().port for mn in nodes], [])))
yield deferral.sleep(3)
for i in xrange(SHARES):
proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port),
headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password')))
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
yield deferral.sleep(.05)
print i
print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value])
# crawl web pages
from p2pool import web
stop_event = variable.Event()
web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable(None), stop_event)
web2_port = reactor.listenTCP(0, server.Site(web2_root))
for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]:
if name in ['web/graph_data', 'web/share', 'web/share_data']: continue
print
print name
try:
res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name))
except:
import traceback
traceback.print_exc()
else:
print repr(res)[:100]
print
yield web2_port.stopListening()
stop_event.happened()
del web2_root
yield deferral.sleep(3)
for i, n in enumerate(nodes):
assert len(n.n.tracker.items) == SHARES, (i, len(n.n.tracker.items))
assert n.n.tracker.verified.get_height(n.n.best_share_var.value) == SHARES, (i, n.n.tracker.verified.get_height(n.n.best_share_var.value))
assert type(n.n.tracker.items[nodes[0].n.best_share_var.value]) is (data.Share.SUCCESSOR if data.Share.SUCCESSOR is not None else data.Share)
assert type(n.n.tracker.items[n.n.tracker.get_nth_parent_hash(nodes[0].n.best_share_var.value, SHARES - 5)]) is data.Share
for n in nodes:
yield n.stop()
del nodes, n
import gc
gc.collect()
gc.collect()
gc.collect()
yield deferral.sleep(20) # waiting for work_poller to exit
test_nodes.timeout = 300
| gpl-3.0 | -391,486,003,411,714,100 | 36.510714 | 167 | 0.58231 | false |
dulems/hue | desktop/core/src/desktop/lib/view_util.py | 38 | 2507 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for views (text and number formatting, etc)"""
import math
import datetime
def big_filesizeformat(bytes):
if bytes is None or bytes is "":
return "N/A"
assert bytes >= 0
# Special case small numbers (including 0), because they're exact.
if bytes < 1024:
return "%d B" % bytes
units = ["B", "KB", "MB", "GB", "TB", "PB"]
index = int(math.floor(math.log(bytes, 1024)))
index = min(len(units) - 1, index)
return( "%.1f %s" % (bytes / math.pow(1024, index), units[index]) )
def format_time_diff(start=None, end=None):
"""
formats the difference between two times as Xd:Xh:Xm:Xs
"""
if (end is None):
end = datetime.datetime.now()
diff = end - start
minutes, seconds = divmod(diff.seconds, 60)
hours, minutes = divmod(minutes, 60)
days = diff.days
output = []
written = False
if days:
written = True
output.append("%dd" % days)
if written or hours:
written = True
output.append("%dh" % hours)
if written or minutes:
output.append("%dm" % minutes)
output.append("%ds" % seconds)
return ":".join(output)
def format_duration_in_millis(duration=0):
"""
formats the difference between two times in millis as Xd:Xh:Xm:Xs
"""
seconds, millis = divmod(duration, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
output = []
written = False
if days:
written = True
output.append("%dd" % days)
if written or hours:
written = True
output.append("%dh" % hours)
if written or minutes:
output.append("%dm" % minutes)
output.append("%ds" % seconds)
return ":".join(output)
| apache-2.0 | 3,395,397,459,671,389,700 | 30.3375 | 74 | 0.665736 | false |
40223246/w16b_test | static/Brython3.1.3-20150514-095342/Lib/pydoc.py | 637 | 102017 | #!/usr/bin/env python3
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on the given port on the
local machine. Port number 0 can be used to get an arbitrary unused port.
Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/X.Y/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__all__ = ['help']
__author__ = "Ka-Ping Yee <[email protected]>"
__date__ = "26 February 2001"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import builtins
import imp
import importlib.machinery
#brython fix me
import inspect
import io
import os
#brython fix me
#import pkgutil
import platform
import re
import sys
import time
import tokenize
import warnings
from collections import deque
from reprlib import Repr
#fix me brython
#from traceback import extract_tb, format_exception_only
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = doc.strip().split('\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not lines[1].rstrip():
return lines[0], '\n'.join(lines[2:])
return '', '\n'.join(lines)
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = pairs[1].join(text.split(pairs[0]))
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
#fix me brython
#return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
inspect.isbuiltin(obj) or
inspect.ismethoddescriptor(obj))
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant or internal.
if name in {'__author__', '__builtins__', '__cached__', '__credits__',
'__date__', '__doc__', '__file__', '__initializing__',
'__loader__', '__module__', '__name__', '__package__',
'__path__', '__qualname__', '__slots__', '__version__'}:
return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return True
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
results = []
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
results.append((name, kind, cls, value))
return results
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not line.strip():
line = file.readline()
if not line: break
line = line.strip()
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not line.strip():
line = file.readline()
if not line: break
result = line.split('"""')[0].strip()
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
except IOError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
binary_suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
if any(filename.endswith(x) for x in binary_suffixes):
# binary modules have to be imported
file.close()
if any(filename.endswith(x) for x in
importlib.machinery.BYTECODE_SUFFIXES):
loader = importlib.machinery.SourcelessFileLoader('__temp__',
filename)
else:
loader = importlib.machinery.ExtensionFileLoader('__temp__',
filename)
try:
module = loader.load_module('__temp__')
except:
return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else:
# text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, exc_info):
self.filename = filename
self.exc, self.value, self.tb = exc_info
def __str__(self):
exc = self.exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
with open(path, 'rb') as file:
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.seek(0)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Remove the module from sys.modules and re-import to try
# and avoid problems with partially loaded modules.
# Also remove any submodules because they won't appear
# in the newly loaded module's namespace if they're already
# in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
#fix me brython
#elif exc is ImportError and value.name == path:
elif exc is ImportError and str(value) == str(path):
# No such module in the path.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in path.split('.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
PYTHONDOCS = os.environ.get("PYTHONDOCS",
"http://docs.python.org/%d.%d/library"
% sys.version_info[:2])
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError(message)
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
basedir = os.path.join(sys.base_exec_prefix, "lib",
"python%d.%d" % sys.version_info[:2])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages')))) and
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__)
else:
docloc = os.path.join(docloc, object.__name__ + ".html")
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + '_'.join(type(x).__name__.split())
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(text.expandtabs())
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)//cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100//cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, modpkginfo):
"""Make a link for a module or package to display in an index."""
name, path, ispackage, shadowed = modpkginfo
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def filelink(self, url, path):
"""Make a link to source file."""
return '<a href="file:%s">%s</a>' % (url, path)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + ', '.join(parents) + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = name.split('.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
('.'.join(parts[:i+1]), parts[i]))
linkedname = '.'.join(links + parts[-1:])
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = self.filelink(url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % ', '.join(info)
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Reference</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda t: self.modulelink(t[1]))
result = result + self.bigsection(
'Modules', '#ffffff', '#aa55cc', contents)
if classes:
classlist = [value for (key, value) in classes]
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', ' '.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', ' '.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', '<br>\n'.join(contents))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
print('docclass')
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
pass
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
attrs.sort(key=lambda t: t[0])
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % ', '.join(parents)
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % self.classlink(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.__func__
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
# ignore a module if its name contains a surrogate character
continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
#def repr1(self, x, level):
# if hasattr(type(x), '__name__'):
# methodname = 'repr_' + '_'.join(type(x).__name__.split())
# if hasattr(self, methodname):
# return getattr(self, methodname)(x, level)
# return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return ''.join(ch + '\b' + ch for ch in text)
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = [prefix + line for line in text.split('\n')]
if lines: lines[-1] = lines[-1].rstrip()
return '\n'.join(lines)
def section(self, title, contents):
"""Format a section with a given heading."""
clean_contents = self.indent(contents).rstrip()
return self.bold(title) + '\n' + clean_contents + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = (classname(c, modname) for c in bases)
result = result + '(%s)' % ', '.join(parents)
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
def docclass(self, object, name=None, mod=None, *ignored):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % ', '.join(parents)
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value,
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(contents.rstrip(), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.__func__
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
class _PlainTextDoc(TextDoc):
"""Subclass of TextDoc which overrides string styling"""
def bold(self, text):
return text
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if not hasattr(sys.stdout, "isatty"):
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write('\n'.join(lines[:inc]) + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + '\n'.join(lines[r:r+inc]) + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in path.split('.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport('.'.join(parts[:n+1]), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
else:
object = builtins
for part in parts[n:]:
try:
object = getattr(object, part)
except AttributeError:
return None
return object
# --------------------------------------- interactive interpreter interface
text = TextDoc()
plaintext = _PlainTextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError('no Python documentation found for %r' % thing)
return object, thing
else:
name = getattr(thing, '__name__', None)
return thing, name if isinstance(name, str) else None
def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
renderer=None):
"""Render text documentation, given an object or a path to an object."""
if renderer is None:
renderer = text
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
output=None):
"""Display text documentation, given an object or a path to an object."""
try:
if output is None:
pager(render_doc(thing, title, forceload))
else:
output.write(render_doc(thing, title, forceload, plaintext))
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w', encoding='utf-8')
file.write(page)
file.close()
print('wrote', name + '.html')
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
# These dictionaries map a topic name to either an alias, or a tuple
# (label, seealso-items). The "label" is the label of the corresponding
# section in the .rst file under Doc/ and an index into the dictionary
# in pydoc_data/topics.py.
#
# CAUTION: if you change one of these dictionaries, be sure to adapt the
# list of needed labels in Doc/tools/sphinxext/pyspecific.py and
# regenerate the pydoc_data/topics.py file by running
# make pydoc-topics
# in Doc/ and copying the output file into the Lib/ directory.
keywords = {
'False': '',
'None': '',
'True': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
'break': ('break', 'while for'),
'class': ('class', 'CLASSES SPECIALMETHODS'),
'continue': ('continue', 'while for'),
'def': ('function', ''),
'del': ('del', 'BASICMETHODS'),
'elif': 'if',
'else': ('else', 'while for'),
'except': 'try',
'finally': 'try',
'for': ('for', 'break continue while'),
'from': 'import',
'global': ('global', 'nonlocal NAMESPACES'),
'if': ('if', 'TRUTHVALUE'),
'import': ('import', 'MODULES'),
'in': ('in', 'SEQUENCEMETHODS'),
'is': 'COMPARISON',
'lambda': ('lambda', 'FUNCTIONS'),
'nonlocal': ('nonlocal', 'global NAMESPACES'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('pass', ''),
'raise': ('raise', 'EXCEPTIONS'),
'return': ('return', 'FUNCTIONS'),
'try': ('try', 'EXCEPTIONS'),
'while': ('while', 'break continue if TRUTHVALUE'),
'with': ('with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('yield', ''),
}
# Either add symbols to this dictionary or to the symbols dictionary
# directly: Whichever is easier. They are merged later.
_symbols_inverse = {
'STRINGS' : ("'", "'''", "r'", "b'", '"""', '"', 'r"', 'b"'),
'OPERATORS' : ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&',
'|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>'),
'COMPARISON' : ('<', '>', '<=', '>=', '==', '!=', '<>'),
'UNARY' : ('-', '~'),
'AUGMENTEDASSIGNMENT' : ('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//='),
'BITWISE' : ('<<', '>>', '&', '|', '^', '~'),
'COMPLEX' : ('j', 'J')
}
symbols = {
'%': 'OPERATORS FORMATTING',
'**': 'POWER',
',': 'TUPLES LISTS FUNCTIONS',
'.': 'ATTRIBUTES FLOAT MODULES OBJECTS',
'...': 'ELLIPSIS',
':': 'SLICINGS DICTIONARYLITERALS',
'@': 'def class',
'\\': 'STRINGS',
'_': 'PRIVATENAMES',
'__': 'PRIVATENAMES SPECIALMETHODS',
'`': 'BACKQUOTES',
'(': 'TUPLES FUNCTIONS CALLS',
')': 'TUPLES FUNCTIONS CALLS',
'[': 'LISTS SUBSCRIPTS SLICINGS',
']': 'LISTS SUBSCRIPTS SLICINGS'
}
for topic, symbols_ in _symbols_inverse.items():
for symbol in symbols_:
topics = symbols.get(symbol, topic)
if topic not in topics:
topics = topics + ' ' + topic
symbols[symbol] = topics
topics = {
'TYPES': ('types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '
'FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('strings', 'str UNICODE SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'STRINGMETHODS': ('string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('formatstrings', 'OPERATORS'),
'UNICODE': ('strings', 'encodings unicode SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'NUMBERS': ('numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('integers', 'int range'),
'FLOAT': ('floating', 'float math'),
'COMPLEX': ('imaginary', 'complex cmath'),
'SEQUENCES': ('typesseq', 'STRINGMETHODS FORMATTING range LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('typesfunctions', 'def TYPES'),
'METHODS': ('typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('bltin-null-object', ''),
'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('specialattrs', ''),
'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('operator-summary', 'lambda or and not in is BOOLEAN '
'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '
'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '
'LISTS DICTIONARIES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('objects', 'TYPES'),
'SPECIALMETHODS': ('specialnames', 'BASICMETHODS ATTRIBUTEMETHODS '
'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '
'NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('customization', 'hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS': ('sequence-types', 'SEQUENCES SEQUENCEMETHODS '
'SPECIALMETHODS'),
'MAPPINGMETHODS': ('sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT '
'SPECIALMETHODS'),
'EXECUTION': ('execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('naming', 'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('exceptions', 'try except finally raise'),
'CONVERSIONS': ('conversions', ''),
'IDENTIFIERS': ('identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('id-classes', ''),
'PRIVATENAMES': ('atom-identifiers', ''),
'LITERALS': ('atom-literals', 'STRINGS NUMBERS TUPLELITERALS '
'LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('exprlists', 'TUPLES LITERALS'),
'LISTS': ('typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('lists', 'LISTS LITERALS'),
'DICTIONARIES': ('typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('dict', 'DICTIONARIES LITERALS'),
'ATTRIBUTES': ('attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('subscriptions', 'SEQUENCEMETHODS'),
'SLICINGS': ('slicings', 'SEQUENCEMETHODS'),
'CALLS': ('calls', 'EXPRESSIONS'),
'POWER': ('power', 'EXPRESSIONS'),
'UNARY': ('unary', 'EXPRESSIONS'),
'BINARY': ('binary', 'EXPRESSIONS'),
'SHIFTING': ('shifting', 'EXPRESSIONS'),
'BITWISE': ('bitwise', 'EXPRESSIONS'),
'COMPARISON': ('comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('compound', 'for while break continue'),
'TRUTHVALUE': ('truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('debugger', 'pdb'),
'CONTEXTMANAGERS': ('context-managers', 'with'),
}
def __init__(self, input=None, output=None):
self._input = input
self._output = output
#fix me brython
self.input = self._input or sys.stdin
self.output = self._output or sys.stdout
#fix me brython
#input = property(lambda self: self._input or sys.stdin)
#output = property(lambda self: self._output or sys.stdout)
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
_GoInteractive = object()
def __call__(self, request=_GoInteractive):
if request is not self._GoInteractive:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = replace(request, '"', '', "'", '').strip()
if request.lower() in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using input() when appropriate."""
if self.input is sys.stdin:
return input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
request = request.strip()
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'symbols': self.listsymbols()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(request.split()[1])
elif request in self.symbols: self.showsymbol(request)
elif request in ['True', 'False', 'None']:
# special case these keywords since they are objects too
doc(eval(request), 'Help on %s:')
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:', output=self._output)
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:', output=self._output)
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the interactive help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
items = list(sorted(items))
colw = width // columns
rows = (len(items) + columns - 1) // columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw - 1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listsymbols(self):
self.output.write('''
Here is a list of the punctuation symbols which Python assigns special meaning
to. Enter any symbol to get more help.
''')
self.list(self.symbols.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic, more_xrefs=''):
try:
import pydoc_data.topics
except ImportError:
self.output.write('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target, more_xrefs)
label, xrefs = target
try:
doc = pydoc_data.topics.topics[label]
except KeyError:
self.output.write('no documentation found for %s\n' % repr(topic))
return
pager(doc.strip() + '\n')
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
if xrefs:
import formatter
buffer = io.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + ', '.join(xrefs.split()) + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def _gettopic(self, topic, more_xrefs=''):
"""Return unbuffered tuple of (topic, xrefs).
If an error occurs here, the exception is caught and displayed by
the url handler.
This function duplicates the showtopic method but returns its
result directly so it can be formatted for display in an html page.
"""
try:
import pydoc_data.topics
except ImportError:
return('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''' , '')
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
raise ValueError('could not find topic')
if isinstance(target, str):
return self._gettopic(target, more_xrefs)
label, xrefs = target
doc = pydoc_data.topics.topics[label]
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
return doc, xrefs
def showsymbol(self, symbol):
target = self.symbols[symbol]
topic, _, xrefs = target.partition(' ')
self.showtopic(topic, xrefs)
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if modname.find('.') < 0:
modules[modname] = 1
def onerror(modname):
callback(None, modname, None)
ModuleScanner().run(callback, onerror=onerror)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper()
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None, onerror=None):
if key: key = key.lower()
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
name = __import__(modname).__doc__ or ''
desc = name.split('\n')[0]
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages(onerror=onerror):
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
try:
loader = importer.find_module(modname)
except SyntaxError:
# raised by tests for bad coding cookies or BOM
continue
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(modname)
except Exception:
if onerror:
onerror(modname)
continue
desc = source_synopsis(io.StringIO(source)) or ''
if hasattr(loader, 'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
try:
module = loader.load_module(modname)
except ImportError:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print(modname, desc and '- ' + desc)
def onerror(modname):
pass
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
"""Start an HTTP server thread on a specific port.
Start an HTML/text server thread, so HTML or text documents can be
browsed dynamically and interactively with a Web browser. Example use:
>>> import time
>>> import pydoc
Define a URL handler. To determine what the client is asking
for, check the URL and content_type.
Then get or generate some text or HTML code and return it.
>>> def my_url_handler(url, content_type):
... text = 'the URL sent was: (%s, %s)' % (url, content_type)
... return text
Start server thread on port 0.
If you use port 0, the server will pick a random port number.
You can then use serverthread.port to get the port number.
>>> port = 0
>>> serverthread = pydoc._start_server(my_url_handler, port)
Check that the server is really started. If it is, open browser
and get first page. Use serverthread.url as the starting page.
>>> if serverthread.serving:
... import webbrowser
The next two lines are commented out so a browser doesn't open if
doctest is run on this module.
#... webbrowser.open(serverthread.url)
#True
Let the server do its thing. We just need to monitor its status.
Use time.sleep so the loop doesn't hog the CPU.
>>> starttime = time.time()
>>> timeout = 1 #seconds
This is a short timeout for testing purposes.
>>> while serverthread.serving:
... time.sleep(.01)
... if serverthread.serving and time.time() - starttime > timeout:
... serverthread.stop()
... break
Print any errors that may have occurred.
>>> print(serverthread.error)
None
"""
import http.server
import email.message
import select
import threading
class DocHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Process a request from an HTML browser.
The URL received is in self.path.
Get an HTML page from self.urlhandler and send it.
"""
if self.path.endswith('.css'):
content_type = 'text/css'
else:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', '%s; charset=UTF-8' % content_type)
self.end_headers()
self.wfile.write(self.urlhandler(
self.path, content_type).encode('utf-8'))
def log_message(self, *args):
# Don't log messages.
pass
class DocServer(http.server.HTTPServer):
def __init__(self, port, callback):
self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
self.quit = False
def serve_until_quit(self):
while not self.quit:
rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
if rd:
self.handle_request()
self.server_close()
def server_activate(self):
self.base.server_activate(self)
if self.callback:
self.callback(self)
class ServerThread(threading.Thread):
def __init__(self, urlhandler, port):
self.urlhandler = urlhandler
self.port = int(port)
threading.Thread.__init__(self)
self.serving = False
self.error = None
def run(self):
"""Start the server."""
try:
DocServer.base = http.server.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = email.message.Message
DocHandler.urlhandler = staticmethod(self.urlhandler)
docsvr = DocServer(self.port, self.ready)
self.docserver = docsvr
docsvr.serve_until_quit()
except Exception as e:
self.error = e
def ready(self, server):
self.serving = True
self.host = server.host
self.port = server.server_port
self.url = 'http://%s:%d/' % (self.host, self.port)
def stop(self):
"""Stop the server and this thread nicely"""
self.docserver.quit = True
self.serving = False
self.url = None
thread = ServerThread(urlhandler, port)
thread.start()
# Wait until thread.serving is True to make sure we are
# really up before returning.
while not thread.error and not thread.serving:
time.sleep(.01)
return thread
def _url_handler(url, content_type="text/html"):
"""The pydoc url handler for use with the pydoc server.
If the content_type is 'text/css', the _pydoc.css style
sheet is read and returned if it exits.
If the content_type is 'text/html', then the result of
get_html_page(url) is returned.
"""
class _HTMLDoc(HTMLDoc):
def page(self, title, contents):
"""Format an HTML page."""
css_path = "pydoc_data/_pydoc.css"
css_link = (
'<link rel="stylesheet" type="text/css" href="%s">' %
css_path)
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Pydoc: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
def filelink(self, url, path):
return '<a href="getfile?key=%s">%s</a>' % (url, path)
html = _HTMLDoc()
def html_navbar():
version = html.escape("%s [%s, %s]" % (platform.python_version(),
platform.python_build()[0],
platform.python_compiler()))
return """
<div style='float:left'>
Python %s<br>%s
</div>
<div style='float:right'>
<div style='text-align:center'>
<a href="index.html">Module Index</a>
: <a href="topics.html">Topics</a>
: <a href="keywords.html">Keywords</a>
</div>
<div>
<form action="get" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Get">
</form>
<form action="search" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Search">
</form>
</div>
</div>
""" % (version, html.escape(platform.platform(terse=True)))
def html_index():
"""Module Index page."""
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
names = [name for name in sys.builtin_module_names
if name != '__main__']
contents = html.multicolumn(names, bltinlink)
contents = [heading, '<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
contents.append(html.index(dir, seen))
contents.append(
'<p align=right><font color="#909090" face="helvetica,'
'arial"><strong>pydoc</strong> by Ka-Ping Yee'
'<[email protected]></font>')
return 'Index of Modules', ''.join(contents)
def html_search(key):
"""Search results page."""
# scan for modules
search_result = []
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
search_result.append((modname, desc and '- ' + desc))
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# format page
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
results = []
heading = html.heading(
'<big><big><strong>Search Results</strong></big></big>',
'#ffffff', '#7799ee')
for name, desc in search_result:
results.append(bltinlink(name) + desc)
contents = heading + html.bigsection(
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
def html_getfile(path):
"""Get and display a source file listing safely."""
path = path.replace('%20', ' ')
with tokenize.open(path) as fp:
lines = html.escape(fp.read())
body = '<pre>%s</pre>' % lines
heading = html.heading(
'<big><big><strong>File Listing</strong></big></big>',
'#ffffff', '#7799ee')
contents = heading + html.bigsection(
'File: %s' % path, '#ffffff', '#ee77aa', body)
return 'getfile %s' % path, contents
def html_topics():
"""Index of topic texts available."""
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.topics.keys())
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Topics', '#ffffff', '#ee77aa', contents)
return 'Topics', contents
def html_keywords():
"""Index of keywords."""
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.keywords.keys())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Keywords', '#ffffff', '#ee77aa', contents)
return 'Keywords', contents
def html_topicpage(topic):
"""Topic or keyword help page."""
buf = io.StringIO()
htmlhelp = Helper(buf, buf)
contents, xrefs = htmlhelp._gettopic(topic)
if topic in htmlhelp.keywords:
title = 'KEYWORD'
else:
title = 'TOPIC'
heading = html.heading(
'<big><big><strong>%s</strong></big></big>' % title,
'#ffffff', '#7799ee')
contents = '<pre>%s</pre>' % html.markup(contents)
contents = html.bigsection(topic , '#ffffff','#ee77aa', contents)
if xrefs:
xrefs = sorted(xrefs.split())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
xrefs = html.multicolumn(xrefs, bltinlink)
xrefs = html.section('Related help topics: ',
'#ffffff', '#ee77aa', xrefs)
return ('%s %s' % (title, topic),
''.join((heading, contents, xrefs)))
def html_getobj(url):
obj = locate(url, forceload=1)
if obj is None and url != 'None':
raise ValueError('could not find object')
title = describe(obj)
content = html.document(obj, url)
return title, content
def html_error(url, exc):
heading = html.heading(
'<big><big><strong>Error</strong></big></big>',
'#ffffff', '#7799ee')
contents = '<br>'.join(html.escape(line) for line in
format_exception_only(type(exc), exc))
contents = heading + html.bigsection(url, '#ffffff', '#bb0000',
contents)
return "Error - %s" % url, contents
def get_html_page(url):
"""Generate an HTML page for url."""
complete_url = url
if url.endswith('.html'):
url = url[:-5]
try:
if url in ("", "index"):
title, content = html_index()
elif url == "topics":
title, content = html_topics()
elif url == "keywords":
title, content = html_keywords()
elif '=' in url:
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
elif op == "getfile?key":
title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
title, content = html_topicpage(url)
except ValueError:
title, content = html_getobj(url)
elif op == "get?key":
# try objects first, then topics.
if url in ("", "index"):
title, content = html_index()
else:
try:
title, content = html_getobj(url)
except ValueError:
title, content = html_topicpage(url)
else:
raise ValueError('bad pydoc url')
else:
title, content = html_getobj(url)
except Exception as exc:
# Catch any errors and display them in an error page.
title, content = html_error(complete_url, exc)
return html.page(title, content)
if url.startswith('/'):
url = url[1:]
if content_type == 'text/css':
path_here = os.path.dirname(os.path.realpath(__file__))
css_path = os.path.join(path_here, url)
with open(css_path) as fp:
return ''.join(fp.readlines())
elif content_type == 'text/html':
return get_html_page(url)
# Errors outside the url handler are caught by the server.
raise TypeError('unknown content type %r for url %s' % (content_type, url))
def browse(port=0, *, open_browser=True):
"""Start the enhanced pydoc Web server and open a Web browser.
Use port '0' to start the server on an arbitrary port.
Set open_browser to False to suppress opening a browser.
"""
import webbrowser
serverthread = _start_server(_url_handler, port)
if serverthread.error:
print(serverthread.error)
return
if serverthread.serving:
server_help_msg = 'Server commands: [b]rowser, [q]uit'
if open_browser:
webbrowser.open(serverthread.url)
try:
print('Server ready at', serverthread.url)
print(server_help_msg)
while serverthread.serving:
cmd = input('server> ')
cmd = cmd.lower()
if cmd == 'q':
break
elif cmd == 'b':
webbrowser.open(serverthread.url)
else:
print(server_help_msg)
except (KeyboardInterrupt, EOFError):
print()
finally:
if serverthread.serving:
serverthread.stop()
print('Server stopped')
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and x.find(os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage(Exception): pass
# Scripts don't get the current directory in their path by default
# unless they are run with the '-m' switch
if '' not in sys.path:
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
if opt == '-b':
start_server = True
open_browser = True
if opt == '-k':
apropos(val)
return
if opt == '-p':
start_server = True
port = val
if opt == '-w':
writing = True
if start_server:
if port is None:
port = 0
browse(port, open_browser=open_browser)
return
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print('file %r does not exist' % arg)
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport as value:
print(value)
except (getopt.error, BadUsage):
cmd = os.path.splitext(os.path.basename(sys.argv[0]))[0]
print("""pydoc - the Python documentation tool
{cmd} <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '{sep}', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
{cmd} -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
{cmd} -p <port>
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
{cmd} -b
Start an HTTP server on an arbitrary unused port and open a Web browser
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""".format(cmd=cmd, sep=os.sep))
if __name__ == '__main__':
cli()
| agpl-3.0 | 5,632,927,379,735,907,000 | 37.863619 | 91 | 0.534558 | false |
ProjectSWGCore/NGECore2 | scripts/mobiles/talus/lost_aqualish_scout.py | 2 | 1650 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('lost_aqualish_scout')
mobileTemplate.setLevel(36)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("lost aqualish")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_dressed_lost_aqualish_scout_male_01.iff')
templates.add('object/mobile/shared_dressed_lost_aqualish_scout_female_01.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/rifle/shared_rifle_t21.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
lootPoolNames_1 = ['Junk']
lootPoolChances_1 = [100]
lootGroupChance_1 = 100
mobileTemplate.addToLootGroups(lootPoolNames_1,lootPoolChances_1,lootGroupChance_1)
core.spawnService.addMobileTemplate('lost_aqualish_scout', mobileTemplate)
return | lgpl-3.0 | 9,109,191,525,048,535,000 | 33.913043 | 122 | 0.804242 | false |
xbot/alfred-pushbullet | lib/websocket/_logging.py | 49 | 1865 | """
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
import logging
_logger = logging.getLogger('websocket')
_traceEnabled = False
__all__ = ["enableTrace", "dump", "error", "debug", "trace",
"isEnabledForError", "isEnabledForDebug"]
def enableTrace(tracable):
"""
turn on/off the tracability.
tracable: boolean value. if set True, tracability is enabled.
"""
global _traceEnabled
_traceEnabled = tracable
if tracable:
if not _logger.handlers:
_logger.addHandler(logging.StreamHandler())
_logger.setLevel(logging.DEBUG)
def dump(title, message):
if _traceEnabled:
_logger.debug("--- " + title + " ---")
_logger.debug(message)
_logger.debug("-----------------------")
def error(msg):
_logger.error(msg)
def debug(msg):
_logger.debug(msg)
def trace(msg):
if _traceEnabled:
_logger.debug(msg)
def isEnabledForError():
return _logger.isEnabledFor(logging.ERROR)
def isEnabledForDebug():
return _logger.isEnabledFor(logging.DEBUG)
| mit | -7,423,002,487,810,101,000 | 25.267606 | 71 | 0.677212 | false |
crystalhaohua0408/principles-of-computing | homework7.py | 5 | 6428 | # Homework 7 for Principles of Computing class, by k., 08/01/2014
# class Puzzle from program template at http://www.codeskulptor.org/#poc_fifteen_template.py
'''
Loyd's Fifteen puzzle - solver and visualizer
Note that solved configuration has the blank (zero) tile in upper left
Use the arrows key to swap this tile with its neighbors
'''
#import poc_fifteen_gui
class Puzzle:
"""
Class representation for the Fifteen puzzle
"""
def __init__(self, puzzle_height, puzzle_width, initial_grid=None):
"""
Initialize puzzle with default height and width
Returns a Puzzle object
"""
self._height = puzzle_height
self._width = puzzle_width
self._grid = [[col + puzzle_width * row
for col in range(self._width)]
for row in range(self._height)]
if initial_grid != None:
for row in range(puzzle_height):
for col in range(puzzle_width):
self._grid[row][col] = initial_grid[row][col]
def __str__(self):
"""
Generate string representaion for puzzle
Returns a string
"""
ans = ""
for row in range(self._height):
ans += str(self._grid[row])
ans += "\n"
return ans
#####################################
# GUI methods
def get_height(self):
"""
Getter for puzzle height
Returns an integer
"""
return self._height
def get_width(self):
"""
Getter for puzzle width
Returns an integer
"""
return self._width
def get_number(self, row, col):
"""
Getter for the number at tile position pos
Returns an integer
"""
return self._grid[row][col]
def set_number(self, row, col, value):
"""
Setter for the number at tile position pos
"""
self._grid[row][col] = value
def clone(self):
"""
Make a copy of the puzzle to update during solving
Returns a Puzzle object
"""
new_puzzle = Puzzle(self._height, self._width, self._grid)
return new_puzzle
########################################################
# Core puzzle methods
def current_position(self, solved_row, solved_col):
"""
Locate the current position of the tile that will be at
position (solved_row, solved_col) when the puzzle is solved
Returns a tuple of two integers
"""
solved_value = (solved_col + self._width * solved_row)
for row in range(self._height):
for col in range(self._width):
if self._grid[row][col] == solved_value:
return (row, col)
assert False, "Value " + str(solved_value) + " not found"
def update_puzzle(self, move_string):
"""
Updates the puzzle state based on the provided move string
"""
zero_row, zero_col = self.current_position(0, 0)
for direction in move_string:
if direction == "l":
assert zero_col > 0, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]
self._grid[zero_row][zero_col - 1] = 0
zero_col -= 1
elif direction == "r":
assert zero_col < self._width - 1, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]
self._grid[zero_row][zero_col + 1] = 0
zero_col += 1
elif direction == "u":
assert zero_row > 0, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]
self._grid[zero_row - 1][zero_col] = 0
zero_row -= 1
elif direction == "d":
assert zero_row < self._height - 1, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]
self._grid[zero_row + 1][zero_col] = 0
zero_row += 1
else:
assert False, "invalid direction: " + direction
# start interactive simulation
#poc_fifteen_gui.FifteenGUI(Puzzle(4, 4))
# Question 1
question1 = Puzzle(4, 4)
print 'Prep for Question 1 follows...\n', question1
question1.update_puzzle('drdr')
print 'Configuration after move \'drdr\':\n', question1
# Question 2
question2 = Puzzle(4, 4)
question2.update_puzzle('ddrdrudlulurrrlldluurrrdllldr')
print '\nPrep for Question 2 follows...\n', question2
question2.update_puzzle('urullddruld')
print 'Configuration after move \'urullddruld\':\n', question2
# Question 3
question3 = Puzzle(2, 2)
print '\nPrep for Question 3 follows...\n', question3
question3.update_puzzle('rdlu')
print 'First move:\n', question3
question3.update_puzzle('rdlu')
print 'Second move:\n', question3
question3.update_puzzle('rdlu')
print 'Configuration after third move:\n', question3
# Question 4
question4 = Puzzle(2, 2, [[0, 2], [3, 1]])
print '\nPrep for Question 4 follows...\n', question4
question4.update_puzzle('rdlu')
print 'Configuration after move \'rdlu\':\n', question4
# Question 5
question5 = Puzzle(2, 2, [[0, 3], [1, 2]])
print '\nPrep for Question 5 follows...\n', question5
question5.update_puzzle('drul')
print 'Configuration after move \'drul\':\n', question5
# Question 8
question8 = Puzzle(4, 4, [[4, 13, 1, 3], [5, 10, 2, 7], [8, 12, 6, 11], [9, 0, 14, 16]])
print '\nPrep for Question 8 follows...\n', question8
question8.update_puzzle('uuu')
print 'Configuration after move \'uuu\':\n', question8
question8.update_puzzle('lddru')
print 'Configuration after move \'lddru\':\n', question8
question8.update_puzzle('lddruld')
print 'Configuration after move \'lddruld\':\n', question8
# Question 9
question9 = Puzzle(3, 2, [[1, 2], [0, 4], [3, 5]])
print '\nPrep for Question 9 follows...\n', question9
question9.update_puzzle('ruldrdlurdluurddlur')
print 'Configuration after move \'ruldrdlurdluurddlur\':\n', question9
# Question 10
question10 = Puzzle(2, 3, [[3, 4, 1], [0, 2, 5]])
print '\nPrep for Question 10 follows...\n', question10
question10.update_puzzle('urdlurrdluldrruld')
print 'Configuration after move \'urdlurrdluldrruld\':\n', question10
| mit | -279,326,744,133,514,400 | 30.509804 | 92 | 0.583852 | false |
jindongh/boto | boto/ec2/elb/listener.py | 152 | 3377 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.ec2.elb.listelement import ListElement
class Listener(object):
"""
Represents an EC2 Load Balancer Listener tuple
"""
def __init__(self, load_balancer=None, load_balancer_port=0,
instance_port=0, protocol='', ssl_certificate_id=None, instance_protocol=None):
self.load_balancer = load_balancer
self.load_balancer_port = load_balancer_port
self.instance_port = instance_port
self.protocol = protocol
self.instance_protocol = instance_protocol
self.ssl_certificate_id = ssl_certificate_id
self.policy_names = ListElement()
def __repr__(self):
r = "(%d, %d, '%s'" % (self.load_balancer_port, self.instance_port, self.protocol)
if self.instance_protocol:
r += ", '%s'" % self.instance_protocol
if self.ssl_certificate_id:
r += ', %s' % (self.ssl_certificate_id)
r += ')'
return r
def startElement(self, name, attrs, connection):
if name == 'PolicyNames':
return self.policy_names
return None
def endElement(self, name, value, connection):
if name == 'LoadBalancerPort':
self.load_balancer_port = int(value)
elif name == 'InstancePort':
self.instance_port = int(value)
elif name == 'InstanceProtocol':
self.instance_protocol = value
elif name == 'Protocol':
self.protocol = value
elif name == 'SSLCertificateId':
self.ssl_certificate_id = value
else:
setattr(self, name, value)
def get_tuple(self):
return self.load_balancer_port, self.instance_port, self.protocol
def get_complex_tuple(self):
return self.load_balancer_port, self.instance_port, self.protocol, self.instance_protocol
def __getitem__(self, key):
if key == 0:
return self.load_balancer_port
if key == 1:
return self.instance_port
if key == 2:
return self.protocol
if key == 3:
return self.instance_protocol
if key == 4:
return self.ssl_certificate_id
raise KeyError
| mit | -8,834,580,491,162,530,000 | 37.816092 | 97 | 0.650874 | false |
dago/ansible-modules-core | cloud/amazon/cloudformation.py | 17 | 10939 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cloudformation
short_description: create a AWS CloudFormation stack
description:
- Launches an AWS CloudFormation stack and waits for it complete.
version_added: "1.1"
options:
stack_name:
description:
- name of the cloudformation stack
required: true
default: null
aliases: []
disable_rollback:
description:
- If a stacks fails to form, rollback will remove the stack
required: false
default: "false"
choices: [ "true", "false" ]
aliases: []
template_parameters:
description:
- a list of hashes of all the template variables for the stack
required: false
default: {}
aliases: []
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: true
default: null
aliases: ['aws_region', 'ec2_region']
state:
description:
- If state is "present", stack will be created. If state is "present" and if stack exists and template has changed, it will be updated.
If state is "absent", stack will be removed.
required: true
default: null
aliases: []
template:
description:
- the path of the cloudformation template
required: true
default: null
aliases: []
stack_policy:
description:
- the path of the cloudformation stack policy
required: false
default: null
aliases: []
version_added: "x.x"
tags:
description:
- Dictionary of tags to associate with stack and it's resources during stack creation. Cannot be updated later.
Requires at least Boto version 2.6.0.
required: false
default: null
aliases: []
version_added: "1.4"
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
version_added: "1.5"
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
version_added: "1.5"
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
version_added: "1.5"
requirements: [ "boto" ]
author: James S. Martin
'''
EXAMPLES = '''
# Basic task example
tasks:
- name: launch ansible cloudformation example
cloudformation:
stack_name: "ansible-cloudformation"
state: "present"
region: "us-east-1"
disable_rollback: true
template: "files/cloudformation-example.json"
template_parameters:
KeyName: "jmartin"
DiskType: "ephemeral"
InstanceType: "m1.small"
ClusterSize: 3
tags:
Stack: "ansible-cloudformation"
'''
import json
import time
try:
import boto
import boto.cloudformation.connection
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def boto_version_required(version_tuple):
parts = boto.Version.split('.')
boto_version = []
try:
for part in parts:
boto_version.append(int(part))
except:
boto_version.append(-1)
return tuple(boto_version) >= tuple(version_tuple)
def stack_operation(cfn, stack_name, operation):
'''gets the status of a stack while it is created/updated/deleted'''
existed = []
result = {}
operation_complete = False
while operation_complete == False:
try:
stack = cfn.describe_stacks(stack_name)[0]
existed.append('yes')
except:
if 'yes' in existed:
result = dict(changed=True,
output='Stack Deleted',
events=map(str, list(stack.describe_events())))
else:
result = dict(changed= True, output='Stack Not Found')
break
if '%s_COMPLETE' % operation == stack.stack_status:
result = dict(changed=True,
events = map(str, list(stack.describe_events())),
output = 'Stack %s complete' % operation)
break
if 'ROLLBACK_COMPLETE' == stack.stack_status or '%s_ROLLBACK_COMPLETE' % operation == stack.stack_status:
result = dict(changed=True, failed=True,
events = map(str, list(stack.describe_events())),
output = 'Problem with %s. Rollback complete' % operation)
break
elif '%s_FAILED' % operation == stack.stack_status:
result = dict(changed=True, failed=True,
events = map(str, list(stack.describe_events())),
output = 'Stack %s failed' % operation)
break
else:
time.sleep(5)
return result
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
stack_name=dict(required=True),
template_parameters=dict(required=False, type='dict', default={}),
state=dict(default='present', choices=['present', 'absent']),
template=dict(default=None, required=True),
stack_policy=dict(default=None, required=False),
disable_rollback=dict(default=False, type='bool'),
tags=dict(default=None)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
state = module.params['state']
stack_name = module.params['stack_name']
template_body = open(module.params['template'], 'r').read()
if module.params['stack_policy'] is not None:
stack_policy_body = open(module.params['stack_policy'], 'r').read()
else:
stack_policy_body = None
disable_rollback = module.params['disable_rollback']
template_parameters = module.params['template_parameters']
tags = module.params['tags']
ec2_url, aws_access_key, aws_secret_key, region = get_ec2_creds(module)
kwargs = dict()
if tags is not None:
if not boto_version_required((2,6,0)):
module.fail_json(msg='Module parameter "tags" requires at least Boto version 2.6.0')
kwargs['tags'] = tags
# convert the template parameters ansible passes into a tuple for boto
template_parameters_tup = [(k, v) for k, v in template_parameters.items()]
stack_outputs = {}
try:
cfn = boto.cloudformation.connect_to_region(
region,
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
update = False
result = {}
operation = None
# if state is present we are going to ensure that the stack is either
# created or updated
if state == 'present':
try:
cfn.create_stack(stack_name, parameters=template_parameters_tup,
template_body=template_body,
stack_policy_body=stack_policy_body,
disable_rollback=disable_rollback,
capabilities=['CAPABILITY_IAM'],
**kwargs)
operation = 'CREATE'
except Exception, err:
error_msg = boto_exception(err)
if 'AlreadyExistsException' in error_msg or 'already exists' in error_msg:
update = True
else:
module.fail_json(msg=error_msg)
if not update:
result = stack_operation(cfn, stack_name, operation)
# if the state is present and the stack already exists, we try to update it
# AWS will tell us if the stack template and parameters are the same and
# don't need to be updated.
if update:
try:
cfn.update_stack(stack_name, parameters=template_parameters_tup,
template_body=template_body,
stack_policy_body=stack_policy_body,
disable_rollback=disable_rollback,
capabilities=['CAPABILITY_IAM'])
operation = 'UPDATE'
except Exception, err:
error_msg = boto_exception(err)
if 'No updates are to be performed.' in error_msg:
result = dict(changed=False, output='Stack is already up-to-date.')
else:
module.fail_json(msg=error_msg)
if operation == 'UPDATE':
result = stack_operation(cfn, stack_name, operation)
# check the status of the stack while we are creating/updating it.
# and get the outputs of the stack
if state == 'present' or update:
stack = cfn.describe_stacks(stack_name)[0]
for output in stack.outputs:
stack_outputs[output.key] = output.value
result['stack_outputs'] = stack_outputs
# absent state is different because of the way delete_stack works.
# problem is it it doesn't give an error if stack isn't found
# so must describe the stack first
if state == 'absent':
try:
cfn.describe_stacks(stack_name)
operation = 'DELETE'
except Exception, err:
error_msg = boto_exception(err)
if 'Stack:%s does not exist' % stack_name in error_msg:
result = dict(changed=False, output='Stack not found.')
else:
module.fail_json(msg=error_msg)
if operation == 'DELETE':
cfn.delete_stack(stack_name)
result = stack_operation(cfn, stack_name, operation)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 | 1,931,798,520,711,330,000 | 33.184375 | 142 | 0.610202 | false |
cbandera/hometop_HT3 | HT3/sw/HT3_Logger.py | 1 | 1502 | #! /usr/bin/python3
#
#################################################################
## Copyright (c) 2013 Norbert S. <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#################################################################
# Ver:0.1.5 / Datum 25.05.2014
# Ver:0.1.6 / Datum 10.01.2015 'reading configuration changed'
# Ver:0.1.7.1/ Datum 04.03.2015 'socket option' activated
# logging from ht_utils added
#################################################################
import sys, time
sys.path.append('lib')
import ht3_worker
configurationfilename='./etc/config/HT3_db_cfg.xml'
logfilename="ht_logger.log"
#### reconfiguration has to be done in configuration-file ####
HT3_logger=ht3_worker.ht3_cworker(configurationfilename, hexdump_window=False, gui_active=False, logfilename_in=logfilename)
HT3_logger.run()
while True:
time.sleep(2)
| gpl-3.0 | 5,379,266,533,217,548,000 | 40.722222 | 124 | 0.63249 | false |
brave/browser-ios | Client/Assets/scrape_plugins.py | 3 | 1503 | #!/usr/bin/env python
from lxml import html
import os
import requests
import shutil
import urllib
def main():
if not os.path.exists("SearchPlugins"):
os.makedirs("SearchPlugins")
locales = getLocaleList()
for locale in locales:
files = getFileList(locale)
if files == None:
continue
print("found searchplugins")
for file in files:
downloadedFile = getFile(locale, file)
directory = os.path.join("SearchPlugins", locale)
if not os.path.exists(directory):
os.makedirs(directory)
shutil.move(downloadedFile, os.path.join(directory, file))
def getLocaleList():
response = requests.get('http://hg.mozilla.org/releases/mozilla-aurora/raw-file/default/mobile/android/locales/all-locales')
return response.text.strip().split("\n")
def getFileList(locale):
print("scraping: %s..." % locale)
url = "https://hg.mozilla.org/releases/l10n/mozilla-aurora/%s/file/default/mobile/searchplugins" % locale
response = requests.get(url)
if not response.ok:
return
tree = html.fromstring(response.content)
return tree.xpath('//a[@class="list"]/text()')
def getFile(locale, file):
print(" downloading: %s..." % file)
url = "https://hg.mozilla.org/releases/l10n/mozilla-aurora/%s/raw-file/default/mobile/searchplugins/%s" % (locale, file)
result = urllib.urlretrieve(url)
return result[0]
if __name__ == "__main__":
main()
| mpl-2.0 | 4,678,307,389,273,892,000 | 29.673469 | 128 | 0.646707 | false |
jpaalasm/pyglet | pyglet/media/drivers/openal/lib_openal.py | 45 | 27846 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Wrapper for openal
Generated with:
../tools/wraptypes/wrap.py /usr/include/AL/al.h -lopenal -olib_openal.py
.. Hacked to remove non-existent library functions.
TODO add alGetError check.
.. alListener3i and alListeneriv are present in my OS X 10.4 but not another
10.4 user's installation. They've also been removed for compatibility.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import ctypes
from ctypes import *
import sys
import pyglet.lib
_lib = pyglet.lib.load_library('openal', win32='openal32',
framework='/System/Library/Frameworks/OpenAL.framework')
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (ctypes.c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
AL_API = 0 # /usr/include/AL/al.h:39
ALAPI = 0 # /usr/include/AL/al.h:59
AL_INVALID = -1 # /usr/include/AL/al.h:61
AL_ILLEGAL_ENUM = 0 # /usr/include/AL/al.h:62
AL_ILLEGAL_COMMAND = 0 # /usr/include/AL/al.h:63
ALboolean = c_int # Better return type than c_char, as generated
ALchar = c_char # /usr/include/AL/al.h:73
ALbyte = c_char # /usr/include/AL/al.h:76
ALubyte = c_ubyte # /usr/include/AL/al.h:79
ALshort = c_short # /usr/include/AL/al.h:82
ALushort = c_ushort # /usr/include/AL/al.h:85
ALint = c_int # /usr/include/AL/al.h:88
ALuint = c_uint # /usr/include/AL/al.h:91
ALsizei = c_int # /usr/include/AL/al.h:94
ALenum = c_int # /usr/include/AL/al.h:97
ALfloat = c_float # /usr/include/AL/al.h:100
ALdouble = c_double # /usr/include/AL/al.h:103
ALvoid = None # /usr/include/AL/al.h:106
AL_NONE = 0 # /usr/include/AL/al.h:112
AL_FALSE = 0 # /usr/include/AL/al.h:115
AL_TRUE = 1 # /usr/include/AL/al.h:118
AL_SOURCE_RELATIVE = 514 # /usr/include/AL/al.h:121
AL_CONE_INNER_ANGLE = 4097 # /usr/include/AL/al.h:130
AL_CONE_OUTER_ANGLE = 4098 # /usr/include/AL/al.h:137
AL_PITCH = 4099 # /usr/include/AL/al.h:145
AL_POSITION = 4100 # /usr/include/AL/al.h:157
AL_DIRECTION = 4101 # /usr/include/AL/al.h:160
AL_VELOCITY = 4102 # /usr/include/AL/al.h:163
AL_LOOPING = 4103 # /usr/include/AL/al.h:171
AL_BUFFER = 4105 # /usr/include/AL/al.h:178
AL_GAIN = 4106 # /usr/include/AL/al.h:191
AL_MIN_GAIN = 4109 # /usr/include/AL/al.h:200
AL_MAX_GAIN = 4110 # /usr/include/AL/al.h:209
AL_ORIENTATION = 4111 # /usr/include/AL/al.h:216
AL_SOURCE_STATE = 4112 # /usr/include/AL/al.h:221
AL_INITIAL = 4113 # /usr/include/AL/al.h:222
AL_PLAYING = 4114 # /usr/include/AL/al.h:223
AL_PAUSED = 4115 # /usr/include/AL/al.h:224
AL_STOPPED = 4116 # /usr/include/AL/al.h:225
AL_BUFFERS_QUEUED = 4117 # /usr/include/AL/al.h:230
AL_BUFFERS_PROCESSED = 4118 # /usr/include/AL/al.h:231
AL_SEC_OFFSET = 4132 # /usr/include/AL/al.h:236
AL_SAMPLE_OFFSET = 4133 # /usr/include/AL/al.h:237
AL_BYTE_OFFSET = 4134 # /usr/include/AL/al.h:238
AL_SOURCE_TYPE = 4135 # /usr/include/AL/al.h:246
AL_STATIC = 4136 # /usr/include/AL/al.h:247
AL_STREAMING = 4137 # /usr/include/AL/al.h:248
AL_UNDETERMINED = 4144 # /usr/include/AL/al.h:249
AL_FORMAT_MONO8 = 4352 # /usr/include/AL/al.h:252
AL_FORMAT_MONO16 = 4353 # /usr/include/AL/al.h:253
AL_FORMAT_STEREO8 = 4354 # /usr/include/AL/al.h:254
AL_FORMAT_STEREO16 = 4355 # /usr/include/AL/al.h:255
AL_REFERENCE_DISTANCE = 4128 # /usr/include/AL/al.h:265
AL_ROLLOFF_FACTOR = 4129 # /usr/include/AL/al.h:273
AL_CONE_OUTER_GAIN = 4130 # /usr/include/AL/al.h:282
AL_MAX_DISTANCE = 4131 # /usr/include/AL/al.h:292
AL_FREQUENCY = 8193 # /usr/include/AL/al.h:300
AL_BITS = 8194 # /usr/include/AL/al.h:301
AL_CHANNELS = 8195 # /usr/include/AL/al.h:302
AL_SIZE = 8196 # /usr/include/AL/al.h:303
AL_UNUSED = 8208 # /usr/include/AL/al.h:310
AL_PENDING = 8209 # /usr/include/AL/al.h:311
AL_PROCESSED = 8210 # /usr/include/AL/al.h:312
AL_NO_ERROR = 0 # /usr/include/AL/al.h:316
AL_INVALID_NAME = 40961 # /usr/include/AL/al.h:321
AL_INVALID_ENUM = 40962 # /usr/include/AL/al.h:326
AL_INVALID_VALUE = 40963 # /usr/include/AL/al.h:331
AL_INVALID_OPERATION = 40964 # /usr/include/AL/al.h:336
AL_OUT_OF_MEMORY = 40965 # /usr/include/AL/al.h:342
AL_VENDOR = 45057 # /usr/include/AL/al.h:346
AL_VERSION = 45058 # /usr/include/AL/al.h:347
AL_RENDERER = 45059 # /usr/include/AL/al.h:348
AL_EXTENSIONS = 45060 # /usr/include/AL/al.h:349
AL_DOPPLER_FACTOR = 49152 # /usr/include/AL/al.h:356
AL_DOPPLER_VELOCITY = 49153 # /usr/include/AL/al.h:361
AL_SPEED_OF_SOUND = 49155 # /usr/include/AL/al.h:366
AL_DISTANCE_MODEL = 53248 # /usr/include/AL/al.h:375
AL_INVERSE_DISTANCE = 53249 # /usr/include/AL/al.h:376
AL_INVERSE_DISTANCE_CLAMPED = 53250 # /usr/include/AL/al.h:377
AL_LINEAR_DISTANCE = 53251 # /usr/include/AL/al.h:378
AL_LINEAR_DISTANCE_CLAMPED = 53252 # /usr/include/AL/al.h:379
AL_EXPONENT_DISTANCE = 53253 # /usr/include/AL/al.h:380
AL_EXPONENT_DISTANCE_CLAMPED = 53254 # /usr/include/AL/al.h:381
# /usr/include/AL/al.h:386
alEnable = _lib.alEnable
alEnable.restype = None
alEnable.argtypes = [ALenum]
# /usr/include/AL/al.h:388
alDisable = _lib.alDisable
alDisable.restype = None
alDisable.argtypes = [ALenum]
# /usr/include/AL/al.h:390
alIsEnabled = _lib.alIsEnabled
alIsEnabled.restype = ALboolean
alIsEnabled.argtypes = [ALenum]
# /usr/include/AL/al.h:396
alGetString = _lib.alGetString
alGetString.restype = POINTER(ALchar)
alGetString.argtypes = [ALenum]
# /usr/include/AL/al.h:398
alGetBooleanv = _lib.alGetBooleanv
alGetBooleanv.restype = None
alGetBooleanv.argtypes = [ALenum, POINTER(ALboolean)]
# /usr/include/AL/al.h:400
alGetIntegerv = _lib.alGetIntegerv
alGetIntegerv.restype = None
alGetIntegerv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:402
alGetFloatv = _lib.alGetFloatv
alGetFloatv.restype = None
alGetFloatv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:404
alGetDoublev = _lib.alGetDoublev
alGetDoublev.restype = None
alGetDoublev.argtypes = [ALenum, POINTER(ALdouble)]
# /usr/include/AL/al.h:406
alGetBoolean = _lib.alGetBoolean
alGetBoolean.restype = ALboolean
alGetBoolean.argtypes = [ALenum]
# /usr/include/AL/al.h:408
alGetInteger = _lib.alGetInteger
alGetInteger.restype = ALint
alGetInteger.argtypes = [ALenum]
# /usr/include/AL/al.h:410
alGetFloat = _lib.alGetFloat
alGetFloat.restype = ALfloat
alGetFloat.argtypes = [ALenum]
# /usr/include/AL/al.h:412
alGetDouble = _lib.alGetDouble
alGetDouble.restype = ALdouble
alGetDouble.argtypes = [ALenum]
# /usr/include/AL/al.h:419
alGetError = _lib.alGetError
alGetError.restype = ALenum
alGetError.argtypes = []
# /usr/include/AL/al.h:427
alIsExtensionPresent = _lib.alIsExtensionPresent
alIsExtensionPresent.restype = ALboolean
alIsExtensionPresent.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:429
alGetProcAddress = _lib.alGetProcAddress
alGetProcAddress.restype = POINTER(c_void)
alGetProcAddress.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:431
alGetEnumValue = _lib.alGetEnumValue
alGetEnumValue.restype = ALenum
alGetEnumValue.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:450
alListenerf = _lib.alListenerf
alListenerf.restype = None
alListenerf.argtypes = [ALenum, ALfloat]
# /usr/include/AL/al.h:452
alListener3f = _lib.alListener3f
alListener3f.restype = None
alListener3f.argtypes = [ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:454
alListenerfv = _lib.alListenerfv
alListenerfv.restype = None
alListenerfv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:456
alListeneri = _lib.alListeneri
alListeneri.restype = None
alListeneri.argtypes = [ALenum, ALint]
# /usr/include/AL/al.h:458
#alListener3i = _lib.alListener3i
#alListener3i.restype = None
#alListener3i.argtypes = [ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:460
#alListeneriv = _lib.alListeneriv
#alListeneriv.restype = None
#alListeneriv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:465
alGetListenerf = _lib.alGetListenerf
alGetListenerf.restype = None
alGetListenerf.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:467
alGetListener3f = _lib.alGetListener3f
alGetListener3f.restype = None
alGetListener3f.argtypes = [ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:469
alGetListenerfv = _lib.alGetListenerfv
alGetListenerfv.restype = None
alGetListenerfv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:471
alGetListeneri = _lib.alGetListeneri
alGetListeneri.restype = None
alGetListeneri.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:473
alGetListener3i = _lib.alGetListener3i
alGetListener3i.restype = None
alGetListener3i.argtypes = [ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:475
alGetListeneriv = _lib.alGetListeneriv
alGetListeneriv.restype = None
alGetListeneriv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:512
alGenSources = _lib.alGenSources
alGenSources.restype = None
alGenSources.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:515
alDeleteSources = _lib.alDeleteSources
alDeleteSources.restype = None
alDeleteSources.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:518
alIsSource = _lib.alIsSource
alIsSource.restype = ALboolean
alIsSource.argtypes = [ALuint]
# /usr/include/AL/al.h:523
alSourcef = _lib.alSourcef
alSourcef.restype = None
alSourcef.argtypes = [ALuint, ALenum, ALfloat]
# /usr/include/AL/al.h:525
alSource3f = _lib.alSource3f
alSource3f.restype = None
alSource3f.argtypes = [ALuint, ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:527
alSourcefv = _lib.alSourcefv
alSourcefv.restype = None
alSourcefv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:529
alSourcei = _lib.alSourcei
alSourcei.restype = None
alSourcei.argtypes = [ALuint, ALenum, ALint]
# /usr/include/AL/al.h:531
#alSource3i = _lib.alSource3i
#alSource3i.restype = None
#alSource3i.argtypes = [ALuint, ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:533
#alSourceiv = _lib.alSourceiv
#alSourceiv.restype = None
#alSourceiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:538
alGetSourcef = _lib.alGetSourcef
alGetSourcef.restype = None
alGetSourcef.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:540
alGetSource3f = _lib.alGetSource3f
alGetSource3f.restype = None
alGetSource3f.argtypes = [ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:542
alGetSourcefv = _lib.alGetSourcefv
alGetSourcefv.restype = None
alGetSourcefv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:544
alGetSourcei = _lib.alGetSourcei
alGetSourcei.restype = None
alGetSourcei.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:546
#alGetSource3i = _lib.alGetSource3i
#alGetSource3i.restype = None
#alGetSource3i.argtypes = [ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:548
alGetSourceiv = _lib.alGetSourceiv
alGetSourceiv.restype = None
alGetSourceiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:556
alSourcePlayv = _lib.alSourcePlayv
alSourcePlayv.restype = None
alSourcePlayv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:559
alSourceStopv = _lib.alSourceStopv
alSourceStopv.restype = None
alSourceStopv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:562
alSourceRewindv = _lib.alSourceRewindv
alSourceRewindv.restype = None
alSourceRewindv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:565
alSourcePausev = _lib.alSourcePausev
alSourcePausev.restype = None
alSourcePausev.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:572
alSourcePlay = _lib.alSourcePlay
alSourcePlay.restype = None
alSourcePlay.argtypes = [ALuint]
# /usr/include/AL/al.h:575
alSourceStop = _lib.alSourceStop
alSourceStop.restype = None
alSourceStop.argtypes = [ALuint]
# /usr/include/AL/al.h:578
alSourceRewind = _lib.alSourceRewind
alSourceRewind.restype = None
alSourceRewind.argtypes = [ALuint]
# /usr/include/AL/al.h:581
alSourcePause = _lib.alSourcePause
alSourcePause.restype = None
alSourcePause.argtypes = [ALuint]
# /usr/include/AL/al.h:586
alSourceQueueBuffers = _lib.alSourceQueueBuffers
alSourceQueueBuffers.restype = None
alSourceQueueBuffers.argtypes = [ALuint, ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:588
alSourceUnqueueBuffers = _lib.alSourceUnqueueBuffers
alSourceUnqueueBuffers.restype = None
alSourceUnqueueBuffers.argtypes = [ALuint, ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:606
alGenBuffers = _lib.alGenBuffers
alGenBuffers.restype = None
alGenBuffers.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:609
alDeleteBuffers = _lib.alDeleteBuffers
alDeleteBuffers.restype = None
alDeleteBuffers.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:612
alIsBuffer = _lib.alIsBuffer
alIsBuffer.restype = ALboolean
alIsBuffer.argtypes = [ALuint]
# /usr/include/AL/al.h:615
alBufferData = _lib.alBufferData
alBufferData.restype = None
alBufferData.argtypes = [ALuint, ALenum, POINTER(ALvoid), ALsizei, ALsizei]
# /usr/include/AL/al.h:620
alBufferf = _lib.alBufferf
alBufferf.restype = None
alBufferf.argtypes = [ALuint, ALenum, ALfloat]
# /usr/include/AL/al.h:622
alBuffer3f = _lib.alBuffer3f
alBuffer3f.restype = None
alBuffer3f.argtypes = [ALuint, ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:624
alBufferfv = _lib.alBufferfv
alBufferfv.restype = None
alBufferfv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:626
alBufferi = _lib.alBufferi
alBufferi.restype = None
alBufferi.argtypes = [ALuint, ALenum, ALint]
# /usr/include/AL/al.h:628
alBuffer3i = _lib.alBuffer3i
alBuffer3i.restype = None
alBuffer3i.argtypes = [ALuint, ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:630
alBufferiv = _lib.alBufferiv
alBufferiv.restype = None
alBufferiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:635
alGetBufferf = _lib.alGetBufferf
alGetBufferf.restype = None
alGetBufferf.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:637
alGetBuffer3f = _lib.alGetBuffer3f
alGetBuffer3f.restype = None
alGetBuffer3f.argtypes = [ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:639
alGetBufferfv = _lib.alGetBufferfv
alGetBufferfv.restype = None
alGetBufferfv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:641
alGetBufferi = _lib.alGetBufferi
alGetBufferi.restype = None
alGetBufferi.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:643
alGetBuffer3i = _lib.alGetBuffer3i
alGetBuffer3i.restype = None
alGetBuffer3i.argtypes = [ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:645
alGetBufferiv = _lib.alGetBufferiv
alGetBufferiv.restype = None
alGetBufferiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:651
alDopplerFactor = _lib.alDopplerFactor
alDopplerFactor.restype = None
alDopplerFactor.argtypes = [ALfloat]
# /usr/include/AL/al.h:653
alDopplerVelocity = _lib.alDopplerVelocity
alDopplerVelocity.restype = None
alDopplerVelocity.argtypes = [ALfloat]
# /usr/include/AL/al.h:655
alSpeedOfSound = _lib.alSpeedOfSound
alSpeedOfSound.restype = None
alSpeedOfSound.argtypes = [ALfloat]
# /usr/include/AL/al.h:657
alDistanceModel = _lib.alDistanceModel
alDistanceModel.restype = None
alDistanceModel.argtypes = [ALenum]
LPALENABLE = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:662
LPALDISABLE = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:663
LPALISENABLED = CFUNCTYPE(ALboolean, ALenum) # /usr/include/AL/al.h:664
LPALGETSTRING = CFUNCTYPE(POINTER(ALchar), ALenum) # /usr/include/AL/al.h:665
LPALGETBOOLEANV = CFUNCTYPE(None, ALenum, POINTER(ALboolean)) # /usr/include/AL/al.h:666
LPALGETINTEGERV = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:667
LPALGETFLOATV = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:668
LPALGETDOUBLEV = CFUNCTYPE(None, ALenum, POINTER(ALdouble)) # /usr/include/AL/al.h:669
LPALGETBOOLEAN = CFUNCTYPE(ALboolean, ALenum) # /usr/include/AL/al.h:670
LPALGETINTEGER = CFUNCTYPE(ALint, ALenum) # /usr/include/AL/al.h:671
LPALGETFLOAT = CFUNCTYPE(ALfloat, ALenum) # /usr/include/AL/al.h:672
LPALGETDOUBLE = CFUNCTYPE(ALdouble, ALenum) # /usr/include/AL/al.h:673
LPALGETERROR = CFUNCTYPE(ALenum) # /usr/include/AL/al.h:674
LPALISEXTENSIONPRESENT = CFUNCTYPE(ALboolean, POINTER(ALchar)) # /usr/include/AL/al.h:675
LPALGETPROCADDRESS = CFUNCTYPE(POINTER(c_void), POINTER(ALchar)) # /usr/include/AL/al.h:676
LPALGETENUMVALUE = CFUNCTYPE(ALenum, POINTER(ALchar)) # /usr/include/AL/al.h:677
LPALLISTENERF = CFUNCTYPE(None, ALenum, ALfloat) # /usr/include/AL/al.h:678
LPALLISTENER3F = CFUNCTYPE(None, ALenum, ALfloat, ALfloat, ALfloat) # /usr/include/AL/al.h:679
LPALLISTENERFV = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:680
LPALLISTENERI = CFUNCTYPE(None, ALenum, ALint) # /usr/include/AL/al.h:681
LPALLISTENER3I = CFUNCTYPE(None, ALenum, ALint, ALint, ALint) # /usr/include/AL/al.h:682
LPALLISTENERIV = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:683
LPALGETLISTENERF = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:684
LPALGETLISTENER3F = CFUNCTYPE(None, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:685
LPALGETLISTENERFV = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:686
LPALGETLISTENERI = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:687
LPALGETLISTENER3I = CFUNCTYPE(None, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:688
LPALGETLISTENERIV = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:689
LPALGENSOURCES = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:690
LPALDELETESOURCES = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:691
LPALISSOURCE = CFUNCTYPE(ALboolean, ALuint) # /usr/include/AL/al.h:692
LPALSOURCEF = CFUNCTYPE(None, ALuint, ALenum, ALfloat) # /usr/include/AL/al.h:693
LPALSOURCE3F = CFUNCTYPE(None, ALuint, ALenum, ALfloat, ALfloat, ALfloat) # /usr/include/AL/al.h:694
LPALSOURCEFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:695
LPALSOURCEI = CFUNCTYPE(None, ALuint, ALenum, ALint) # /usr/include/AL/al.h:696
LPALSOURCE3I = CFUNCTYPE(None, ALuint, ALenum, ALint, ALint, ALint) # /usr/include/AL/al.h:697
LPALSOURCEIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:698
LPALGETSOURCEF = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:699
LPALGETSOURCE3F = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:700
LPALGETSOURCEFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:701
LPALGETSOURCEI = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:702
LPALGETSOURCE3I = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:703
LPALGETSOURCEIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:704
LPALSOURCEPLAYV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:705
LPALSOURCESTOPV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:706
LPALSOURCEREWINDV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:707
LPALSOURCEPAUSEV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:708
LPALSOURCEPLAY = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:709
LPALSOURCESTOP = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:710
LPALSOURCEREWIND = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:711
LPALSOURCEPAUSE = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:712
LPALSOURCEQUEUEBUFFERS = CFUNCTYPE(None, ALuint, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:713
LPALSOURCEUNQUEUEBUFFERS = CFUNCTYPE(None, ALuint, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:714
LPALGENBUFFERS = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:715
LPALDELETEBUFFERS = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:716
LPALISBUFFER = CFUNCTYPE(ALboolean, ALuint) # /usr/include/AL/al.h:717
LPALBUFFERDATA = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALvoid), ALsizei, ALsizei) # /usr/include/AL/al.h:718
LPALBUFFERF = CFUNCTYPE(None, ALuint, ALenum, ALfloat) # /usr/include/AL/al.h:719
LPALBUFFER3F = CFUNCTYPE(None, ALuint, ALenum, ALfloat, ALfloat, ALfloat) # /usr/include/AL/al.h:720
LPALBUFFERFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:721
LPALBUFFERI = CFUNCTYPE(None, ALuint, ALenum, ALint) # /usr/include/AL/al.h:722
LPALBUFFER3I = CFUNCTYPE(None, ALuint, ALenum, ALint, ALint, ALint) # /usr/include/AL/al.h:723
LPALBUFFERIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:724
LPALGETBUFFERF = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:725
LPALGETBUFFER3F = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:726
LPALGETBUFFERFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:727
LPALGETBUFFERI = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:728
LPALGETBUFFER3I = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:729
LPALGETBUFFERIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:730
LPALDOPPLERFACTOR = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:731
LPALDOPPLERVELOCITY = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:732
LPALSPEEDOFSOUND = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:733
LPALDISTANCEMODEL = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:734
__all__ = ['AL_API', 'ALAPI', 'AL_INVALID', 'AL_ILLEGAL_ENUM',
'AL_ILLEGAL_COMMAND', 'ALboolean', 'ALchar', 'ALbyte', 'ALubyte', 'ALshort',
'ALushort', 'ALint', 'ALuint', 'ALsizei', 'ALenum', 'ALfloat', 'ALdouble',
'ALvoid', 'AL_NONE', 'AL_FALSE', 'AL_TRUE', 'AL_SOURCE_RELATIVE',
'AL_CONE_INNER_ANGLE', 'AL_CONE_OUTER_ANGLE', 'AL_PITCH', 'AL_POSITION',
'AL_DIRECTION', 'AL_VELOCITY', 'AL_LOOPING', 'AL_BUFFER', 'AL_GAIN',
'AL_MIN_GAIN', 'AL_MAX_GAIN', 'AL_ORIENTATION', 'AL_SOURCE_STATE',
'AL_INITIAL', 'AL_PLAYING', 'AL_PAUSED', 'AL_STOPPED', 'AL_BUFFERS_QUEUED',
'AL_BUFFERS_PROCESSED', 'AL_SEC_OFFSET', 'AL_SAMPLE_OFFSET', 'AL_BYTE_OFFSET',
'AL_SOURCE_TYPE', 'AL_STATIC', 'AL_STREAMING', 'AL_UNDETERMINED',
'AL_FORMAT_MONO8', 'AL_FORMAT_MONO16', 'AL_FORMAT_STEREO8',
'AL_FORMAT_STEREO16', 'AL_REFERENCE_DISTANCE', 'AL_ROLLOFF_FACTOR',
'AL_CONE_OUTER_GAIN', 'AL_MAX_DISTANCE', 'AL_FREQUENCY', 'AL_BITS',
'AL_CHANNELS', 'AL_SIZE', 'AL_UNUSED', 'AL_PENDING', 'AL_PROCESSED',
'AL_NO_ERROR', 'AL_INVALID_NAME', 'AL_INVALID_ENUM', 'AL_INVALID_VALUE',
'AL_INVALID_OPERATION', 'AL_OUT_OF_MEMORY', 'AL_VENDOR', 'AL_VERSION',
'AL_RENDERER', 'AL_EXTENSIONS', 'AL_DOPPLER_FACTOR', 'AL_DOPPLER_VELOCITY',
'AL_SPEED_OF_SOUND', 'AL_DISTANCE_MODEL', 'AL_INVERSE_DISTANCE',
'AL_INVERSE_DISTANCE_CLAMPED', 'AL_LINEAR_DISTANCE',
'AL_LINEAR_DISTANCE_CLAMPED', 'AL_EXPONENT_DISTANCE',
'AL_EXPONENT_DISTANCE_CLAMPED', 'alEnable', 'alDisable', 'alIsEnabled',
'alGetString', 'alGetBooleanv', 'alGetIntegerv', 'alGetFloatv',
'alGetDoublev', 'alGetBoolean', 'alGetInteger', 'alGetFloat', 'alGetDouble',
'alGetError', 'alIsExtensionPresent', 'alGetProcAddress', 'alGetEnumValue',
'alListenerf', 'alListener3f', 'alListenerfv', 'alListeneri', 'alListener3i',
'alListeneriv', 'alGetListenerf', 'alGetListener3f', 'alGetListenerfv',
'alGetListeneri', 'alGetListener3i', 'alGetListeneriv', 'alGenSources',
'alDeleteSources', 'alIsSource', 'alSourcef', 'alSource3f', 'alSourcefv',
'alSourcei', 'alSource3i', 'alSourceiv', 'alGetSourcef', 'alGetSource3f',
'alGetSourcefv', 'alGetSourcei', 'alGetSource3i', 'alGetSourceiv',
'alSourcePlayv', 'alSourceStopv', 'alSourceRewindv', 'alSourcePausev',
'alSourcePlay', 'alSourceStop', 'alSourceRewind', 'alSourcePause',
'alSourceQueueBuffers', 'alSourceUnqueueBuffers', 'alGenBuffers',
'alDeleteBuffers', 'alIsBuffer', 'alBufferData', 'alBufferf', 'alBuffer3f',
'alBufferfv', 'alBufferi', 'alBuffer3i', 'alBufferiv', 'alGetBufferf',
'alGetBuffer3f', 'alGetBufferfv', 'alGetBufferi', 'alGetBuffer3i',
'alGetBufferiv', 'alDopplerFactor', 'alDopplerVelocity', 'alSpeedOfSound',
'alDistanceModel', 'LPALENABLE', 'LPALDISABLE', 'LPALISENABLED',
'LPALGETSTRING', 'LPALGETBOOLEANV', 'LPALGETINTEGERV', 'LPALGETFLOATV',
'LPALGETDOUBLEV', 'LPALGETBOOLEAN', 'LPALGETINTEGER', 'LPALGETFLOAT',
'LPALGETDOUBLE', 'LPALGETERROR', 'LPALISEXTENSIONPRESENT',
'LPALGETPROCADDRESS', 'LPALGETENUMVALUE', 'LPALLISTENERF', 'LPALLISTENER3F',
'LPALLISTENERFV', 'LPALLISTENERI', 'LPALLISTENER3I', 'LPALLISTENERIV',
'LPALGETLISTENERF', 'LPALGETLISTENER3F', 'LPALGETLISTENERFV',
'LPALGETLISTENERI', 'LPALGETLISTENER3I', 'LPALGETLISTENERIV',
'LPALGENSOURCES', 'LPALDELETESOURCES', 'LPALISSOURCE', 'LPALSOURCEF',
'LPALSOURCE3F', 'LPALSOURCEFV', 'LPALSOURCEI', 'LPALSOURCE3I', 'LPALSOURCEIV',
'LPALGETSOURCEF', 'LPALGETSOURCE3F', 'LPALGETSOURCEFV', 'LPALGETSOURCEI',
'LPALGETSOURCE3I', 'LPALGETSOURCEIV', 'LPALSOURCEPLAYV', 'LPALSOURCESTOPV',
'LPALSOURCEREWINDV', 'LPALSOURCEPAUSEV', 'LPALSOURCEPLAY', 'LPALSOURCESTOP',
'LPALSOURCEREWIND', 'LPALSOURCEPAUSE', 'LPALSOURCEQUEUEBUFFERS',
'LPALSOURCEUNQUEUEBUFFERS', 'LPALGENBUFFERS', 'LPALDELETEBUFFERS',
'LPALISBUFFER', 'LPALBUFFERDATA', 'LPALBUFFERF', 'LPALBUFFER3F',
'LPALBUFFERFV', 'LPALBUFFERI', 'LPALBUFFER3I', 'LPALBUFFERIV',
'LPALGETBUFFERF', 'LPALGETBUFFER3F', 'LPALGETBUFFERFV', 'LPALGETBUFFERI',
'LPALGETBUFFER3I', 'LPALGETBUFFERIV', 'LPALDOPPLERFACTOR',
'LPALDOPPLERVELOCITY', 'LPALSPEEDOFSOUND', 'LPALDISTANCEMODEL']
| bsd-3-clause | -4,812,014,104,058,694,000 | 41.512977 | 131 | 0.740465 | false |
sirex/Misago | misago/threads/views/generic/threads/actions.py | 8 | 1141 | from django.contrib import messages
from django.core.paginator import Paginator
from django.shortcuts import redirect
from django.utils.translation import ungettext, ugettext_lazy, ugettext as _
from misago.threads import moderation
from misago.threads.views.generic.actions import ActionsBase, ReloadAfterDelete
__all__ = ['Actions', 'ReloadAfterDelete']
class Actions(ActionsBase):
select_items_message = ugettext_lazy(
"You have to select at least one thread.")
is_mass_action = True
def redirect_after_deletion(self, request, queryset):
paginator = Paginator(queryset, 20, 10)
current_page = int(request.resolver_match.kwargs.get('page', 0))
if paginator.num_pages < current_page:
namespace = request.resolver_match.namespace
url_name = request.resolver_match.url_name
kwars = request.resolver_match.kwargs
kwars['page'] = paginator.num_pages
if kwars['page'] == 1:
del kwars['page']
return redirect('%s:%s' % (namespace, url_name), **kwars)
else:
return redirect(request.path)
| gpl-2.0 | -4,070,534,243,884,151,000 | 35.806452 | 79 | 0.670465 | false |
TheBoegl/letsencrypt | letsencrypt-apache/letsencrypt_apache/augeas_configurator.py | 6 | 7387 | """Class of Augeas Configurators."""
import logging
import augeas
from letsencrypt import errors
from letsencrypt import reverter
from letsencrypt.plugins import common
from letsencrypt_apache import constants
logger = logging.getLogger(__name__)
class AugeasConfigurator(common.Plugin):
"""Base Augeas Configurator class.
:ivar config: Configuration.
:type config: :class:`~letsencrypt.interfaces.IConfig`
:ivar aug: Augeas object
:type aug: :class:`augeas.Augeas`
:ivar str save_notes: Human-readable configuration change notes
:ivar reverter: saves and reverts checkpoints
:type reverter: :class:`letsencrypt.reverter.Reverter`
"""
def __init__(self, *args, **kwargs):
super(AugeasConfigurator, self).__init__(*args, **kwargs)
self.aug = augeas.Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(augeas.Augeas.NONE | augeas.Augeas.NO_MODL_AUTOLOAD))
self.save_notes = ""
# See if any temporary changes need to be recovered
# This needs to occur before VirtualHost objects are setup...
# because this will change the underlying configuration and potential
# vhosts
self.reverter = reverter.Reverter(self.config)
self.recovery_routine()
def check_parsing_errors(self, lens):
"""Verify Augeas can parse all of the lens files.
:param str lens: lens to check for errors
:raises .errors.PluginError: If there has been an error in parsing with
the specified lens.
"""
error_files = self.aug.match("/augeas//error")
for path in error_files:
# Check to see if it was an error resulting from the use of
# the httpd lens
lens_path = self.aug.get(path + "/lens")
# As aug.get may return null
if lens_path and lens in lens_path:
msg = (
"There has been an error in parsing the file (%s): %s",
# Strip off /augeas/files and /error
path[13:len(path) - 6], self.aug.get(path + "/message"))
raise errors.PluginError(msg)
# TODO: Cleanup this function
def save(self, title=None, temporary=False):
"""Saves all changes to the configuration files.
This function first checks for save errors, if none are found,
all configuration changes made will be saved. According to the
function parameters. If an exception is raised, a new checkpoint
was not created.
:param str title: The title of the save. If a title is given, the
configuration will be saved as a new checkpoint and put in a
timestamped directory.
:param bool temporary: Indicates whether the changes made will
be quickly reversed in the future (ie. challenges)
:raises .errors.PluginError: If there was an error in Augeas, in
an attempt to save the configuration, or an error creating a
checkpoint
"""
save_state = self.aug.get("/augeas/save")
self.aug.set("/augeas/save", "noop")
# Existing Errors
ex_errs = self.aug.match("/augeas//error")
try:
# This is a noop save
self.aug.save()
except (RuntimeError, IOError):
self._log_save_errors(ex_errs)
# Erase Save Notes
self.save_notes = ""
raise errors.PluginError(
"Error saving files, check logs for more info.")
# Retrieve list of modified files
# Note: Noop saves can cause the file to be listed twice, I used a
# set to remove this possibility. This is a known augeas 0.10 error.
save_paths = self.aug.match("/augeas/events/saved")
# If the augeas tree didn't change, no files were saved and a backup
# should not be created
if save_paths:
save_files = set()
for path in save_paths:
save_files.add(self.aug.get(path)[6:])
try:
# Create Checkpoint
if temporary:
self.reverter.add_to_temp_checkpoint(
save_files, self.save_notes)
else:
self.reverter.add_to_checkpoint(save_files,
self.save_notes)
except errors.ReverterError as err:
raise errors.PluginError(str(err))
self.aug.set("/augeas/save", save_state)
self.save_notes = ""
self.aug.save()
if title and not temporary:
try:
self.reverter.finalize_checkpoint(title)
except errors.ReverterError as err:
raise errors.PluginError(str(err))
def _log_save_errors(self, ex_errs):
"""Log errors due to bad Augeas save.
:param list ex_errs: Existing errors before save
"""
# Check for the root of save problems
new_errs = self.aug.match("/augeas//error")
# logger.error("During Save - %s", mod_conf)
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
", ".join(err[13:len(err) - 6] for err in new_errs
# Only new errors caused by recent save
if err not in ex_errs), self.save_notes)
# Wrapper functions for Reverter class
def recovery_routine(self):
"""Revert all previously modified files.
Reverts all modified files that have not been saved as a checkpoint
:raises .errors.PluginError: If unable to recover the configuration
"""
try:
self.reverter.recovery_routine()
except errors.ReverterError as err:
raise errors.PluginError(str(err))
# Need to reload configuration after these changes take effect
self.aug.load()
def revert_challenge_config(self):
"""Used to cleanup challenge configurations.
:raises .errors.PluginError: If unable to revert the challenge config.
"""
try:
self.reverter.revert_temporary_config()
except errors.ReverterError as err:
raise errors.PluginError(str(err))
self.aug.load()
def rollback_checkpoints(self, rollback=1):
"""Rollback saved checkpoints.
:param int rollback: Number of checkpoints to revert
:raises .errors.PluginError: If there is a problem with the input or
the function is unable to correctly revert the configuration
"""
try:
self.reverter.rollback_checkpoints(rollback)
except errors.ReverterError as err:
raise errors.PluginError(str(err))
self.aug.load()
def view_config_changes(self):
"""Show all of the configuration changes that have taken place.
:raises .errors.PluginError: If there is a problem while processing
the checkpoints directories.
"""
try:
self.reverter.view_config_changes()
except errors.ReverterError as err:
raise errors.PluginError(str(err))
| apache-2.0 | -7,626,691,757,195,288,000 | 35.034146 | 79 | 0.600514 | false |
cedricporter/everlost | frameworks/cocos2d-x/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/enumerations.py | 307 | 1077 | #===- enumerations.py - Python Enumerations ------------------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
"""
Clang Enumerations
==================
This module provides static definitions of enumerations that exist in libclang.
Enumerations are typically defined as a list of tuples. The exported values are
typically munged into other types or classes at module load time.
All enumerations are centrally defined in this file so they are all grouped
together and easier to audit. And, maybe even one day this file will be
automatically generated by scanning the libclang headers!
"""
# Maps to CXTokenKind. Note that libclang maintains a separate set of token
# enumerations from the C++ API.
TokenKinds = [
('PUNCTUATION', 0),
('KEYWORD', 1),
('IDENTIFIER', 2),
('LITERAL', 3),
('COMMENT', 4),
]
__all__ = ['TokenKinds']
| apache-2.0 | -1,907,653,916,701,922,300 | 30.676471 | 80 | 0.635097 | false |
sebastic/QGIS | python/plugins/processing/algs/qgis/ZonalStatistics.py | 2 | 10813 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ZonalStatistics.py
---------------------
Date : August 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'August 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import numpy
try:
from scipy.stats.mstats import mode
hasSciPy = True
except:
hasSciPy = False
from osgeo import gdal, ogr, osr
from qgis.core import QgsRectangle, QgsGeometry, QgsFeature
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterRaster
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputVector
from processing.tools.raster import mapToPixel
from processing.tools import dataobjects, vector
class ZonalStatistics(GeoAlgorithm):
INPUT_RASTER = 'INPUT_RASTER'
RASTER_BAND = 'RASTER_BAND'
INPUT_VECTOR = 'INPUT_VECTOR'
COLUMN_PREFIX = 'COLUMN_PREFIX'
GLOBAL_EXTENT = 'GLOBAL_EXTENT'
OUTPUT_LAYER = 'OUTPUT_LAYER'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Zonal Statistics')
self.group, self.i18n_group = self.trAlgorithm('Raster tools')
self.addParameter(ParameterRaster(self.INPUT_RASTER,
self.tr('Raster layer')))
self.addParameter(ParameterNumber(self.RASTER_BAND,
self.tr('Raster band'), 1, 999, 1))
self.addParameter(ParameterVector(self.INPUT_VECTOR,
self.tr('Vector layer containing zones'),
[ParameterVector.VECTOR_TYPE_POLYGON]))
self.addParameter(ParameterString(self.COLUMN_PREFIX,
self.tr('Output column prefix'), '_'))
self.addParameter(ParameterBoolean(self.GLOBAL_EXTENT,
self.tr('Load whole raster in memory')))
self.addOutput(OutputVector(self.OUTPUT_LAYER, self.tr('Zonal statistics')))
def processAlgorithm(self, progress):
""" Based on code by Matthew Perry
https://gist.github.com/perrygeo/5667173
"""
layer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT_VECTOR))
rasterPath = unicode(self.getParameterValue(self.INPUT_RASTER))
bandNumber = self.getParameterValue(self.RASTER_BAND)
columnPrefix = self.getParameterValue(self.COLUMN_PREFIX)
useGlobalExtent = self.getParameterValue(self.GLOBAL_EXTENT)
rasterDS = gdal.Open(rasterPath, gdal.GA_ReadOnly)
geoTransform = rasterDS.GetGeoTransform()
rasterBand = rasterDS.GetRasterBand(bandNumber)
noData = rasterBand.GetNoDataValue()
cellXSize = abs(geoTransform[1])
cellYSize = abs(geoTransform[5])
rasterXSize = rasterDS.RasterXSize
rasterYSize = rasterDS.RasterYSize
rasterBBox = QgsRectangle(geoTransform[0], geoTransform[3] - cellYSize
* rasterYSize, geoTransform[0] + cellXSize
* rasterXSize, geoTransform[3])
rasterGeom = QgsGeometry.fromRect(rasterBBox)
crs = osr.SpatialReference()
crs.ImportFromProj4(str(layer.crs().toProj4()))
if useGlobalExtent:
xMin = rasterBBox.xMinimum()
xMax = rasterBBox.xMaximum()
yMin = rasterBBox.yMinimum()
yMax = rasterBBox.yMaximum()
(startColumn, startRow) = mapToPixel(xMin, yMax, geoTransform)
(endColumn, endRow) = mapToPixel(xMax, yMin, geoTransform)
width = endColumn - startColumn
height = endRow - startRow
srcOffset = (startColumn, startRow, width, height)
srcArray = rasterBand.ReadAsArray(*srcOffset)
srcArray = srcArray * rasterBand.GetScale() + rasterBand.GetOffset()
newGeoTransform = (
geoTransform[0] + srcOffset[0] * geoTransform[1],
geoTransform[1],
0.0,
geoTransform[3] + srcOffset[1] * geoTransform[5],
0.0,
geoTransform[5],
)
memVectorDriver = ogr.GetDriverByName('Memory')
memRasterDriver = gdal.GetDriverByName('MEM')
fields = layer.pendingFields()
(idxMin, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'min', 21, 6)
(idxMax, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'max', 21, 6)
(idxSum, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'sum', 21, 6)
(idxCount, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'count', 21, 6)
(idxMean, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'mean', 21, 6)
(idxStd, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'std', 21, 6)
(idxUnique, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'unique', 21, 6)
(idxRange, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'range', 21, 6)
(idxVar, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'var', 21, 6)
(idxMedian, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'median', 21, 6)
if hasSciPy:
(idxMode, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'mode', 21, 6)
writer = self.getOutputFromName(self.OUTPUT_LAYER).getVectorWriter(
fields.toList(), layer.dataProvider().geometryType(), layer.crs())
outFeat = QgsFeature()
outFeat.initAttributes(len(fields))
outFeat.setFields(fields)
features = vector.features(layer)
total = 100.0 / len(features)
for current, f in enumerate(features):
geom = f.geometry()
intersectedGeom = rasterGeom.intersection(geom)
ogrGeom = ogr.CreateGeometryFromWkt(intersectedGeom.exportToWkt())
if not useGlobalExtent:
bbox = intersectedGeom.boundingBox()
xMin = bbox.xMinimum()
xMax = bbox.xMaximum()
yMin = bbox.yMinimum()
yMax = bbox.yMaximum()
(startColumn, startRow) = mapToPixel(xMin, yMax, geoTransform)
(endColumn, endRow) = mapToPixel(xMax, yMin, geoTransform)
width = endColumn - startColumn
height = endRow - startRow
if width == 0 or height == 0:
continue
srcOffset = (startColumn, startRow, width, height)
srcArray = rasterBand.ReadAsArray(*srcOffset)
srcArray = srcArray * rasterBand.GetScale() + rasterBand.GetOffset()
newGeoTransform = (
geoTransform[0] + srcOffset[0] * geoTransform[1],
geoTransform[1],
0.0,
geoTransform[3] + srcOffset[1] * geoTransform[5],
0.0,
geoTransform[5],
)
# Create a temporary vector layer in memory
memVDS = memVectorDriver.CreateDataSource('out')
memLayer = memVDS.CreateLayer('poly', crs, ogr.wkbPolygon)
ft = ogr.Feature(memLayer.GetLayerDefn())
ft.SetGeometry(ogrGeom)
memLayer.CreateFeature(ft)
ft.Destroy()
# Rasterize it
rasterizedDS = memRasterDriver.Create('', srcOffset[2],
srcOffset[3], 1, gdal.GDT_Byte)
rasterizedDS.SetGeoTransform(newGeoTransform)
gdal.RasterizeLayer(rasterizedDS, [1], memLayer, burn_values=[1])
rasterizedArray = rasterizedDS.ReadAsArray()
srcArray = numpy.nan_to_num(srcArray)
masked = numpy.ma.MaskedArray(srcArray,
mask=numpy.logical_or(srcArray == noData,
numpy.logical_not(rasterizedArray)))
outFeat.setGeometry(geom)
attrs = f.attributes()
attrs.insert(idxMin, float(masked.min()))
attrs.insert(idxMax, float(masked.max()))
attrs.insert(idxSum, float(masked.sum()))
attrs.insert(idxCount, int(masked.count()))
attrs.insert(idxMean, float(masked.mean()))
attrs.insert(idxStd, float(masked.std()))
attrs.insert(idxUnique, numpy.unique(masked.compressed()).size)
attrs.insert(idxRange, float(masked.max()) - float(masked.min()))
attrs.insert(idxVar, float(masked.var()))
attrs.insert(idxMedian, float(numpy.ma.median(masked)))
if hasSciPy:
attrs.insert(idxMode, float(mode(masked, axis=None)[0][0]))
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
memVDS = None
rasterizedDS = None
progress.setPercentage(int(current * total))
rasterDS = None
del writer
| gpl-2.0 | 1,031,326,189,392,499,000 | 41.73913 | 100 | 0.544807 | false |
cognitiveclass/edx-platform | common/djangoapps/config_models/models.py | 38 | 7489 | """
Django Model baseclass for database-backed configuration.
"""
from django.db import connection, models
from django.contrib.auth.models import User
from django.core.cache import caches, InvalidCacheBackendError
from django.utils.translation import ugettext_lazy as _
try:
cache = caches['configuration'] # pylint: disable=invalid-name
except InvalidCacheBackendError:
from django.core.cache import cache
class ConfigurationModelManager(models.Manager):
"""
Query manager for ConfigurationModel
"""
def _current_ids_subquery(self):
"""
Internal helper method to return an SQL string that will get the IDs of
all the current entries (i.e. the most recent entry for each unique set
of key values). Only useful if KEY_FIELDS is set.
"""
key_fields_escaped = [connection.ops.quote_name(name) for name in self.model.KEY_FIELDS]
# The following assumes that the rows with the most recent date also have the highest IDs
return "SELECT MAX(id) FROM {table_name} GROUP BY {key_fields}".format(
key_fields=', '.join(key_fields_escaped),
table_name=self.model._meta.db_table # pylint: disable=protected-access
)
def current_set(self):
"""
A queryset for the active configuration entries only. Only useful if KEY_FIELDS is set.
Active means the means recent entries for each unique combination of keys. It does not
necessaryily mean enbled.
"""
assert self.model.KEY_FIELDS != (), "Just use model.current() if there are no KEY_FIELDS"
return self.get_queryset().extra( # pylint: disable=no-member
where=["id IN ({subquery})".format(subquery=self._current_ids_subquery())],
select={'is_active': 1}, # This annotation is used by the admin changelist. sqlite requires '1', not 'True'
)
def with_active_flag(self):
"""
A query set where each result is annotated with an 'is_active' field that indicates
if it's the most recent entry for that combination of keys.
"""
if self.model.KEY_FIELDS:
subquery = self._current_ids_subquery()
return self.get_queryset().extra( # pylint: disable=no-member
select={'is_active': "id IN ({subquery})".format(subquery=subquery)}
)
else:
return self.get_queryset().extra( # pylint: disable=no-member
select={'is_active': "id = {pk}".format(pk=self.model.current().pk)}
)
class ConfigurationModel(models.Model):
"""
Abstract base class for model-based configuration
Properties:
cache_timeout (int): The number of seconds that this configuration
should be cached
"""
class Meta(object):
abstract = True
ordering = ("-change_date", )
objects = ConfigurationModelManager()
KEY_FIELDS = ()
# The number of seconds
cache_timeout = 600
change_date = models.DateTimeField(auto_now_add=True, verbose_name=_("Change date"))
changed_by = models.ForeignKey(
User,
editable=False,
null=True,
on_delete=models.PROTECT,
# Translators: this label indicates the name of the user who made this change:
verbose_name=_("Changed by"),
)
enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
def save(self, *args, **kwargs):
"""
Clear the cached value when saving a new configuration entry
"""
# Always create a new entry, instead of updating an existing model
self.pk = None # pylint: disable=invalid-name
super(ConfigurationModel, self).save(*args, **kwargs)
cache.delete(self.cache_key_name(*[getattr(self, key) for key in self.KEY_FIELDS]))
if self.KEY_FIELDS:
cache.delete(self.key_values_cache_key_name())
@classmethod
def cache_key_name(cls, *args):
"""Return the name of the key to use to cache the current configuration"""
if cls.KEY_FIELDS != ():
if len(args) != len(cls.KEY_FIELDS):
raise TypeError(
"cache_key_name() takes exactly {} arguments ({} given)".format(len(cls.KEY_FIELDS), len(args))
)
return u'configuration/{}/current/{}'.format(cls.__name__, u','.join(unicode(arg) for arg in args))
else:
return 'configuration/{}/current'.format(cls.__name__)
@classmethod
def current(cls, *args):
"""
Return the active configuration entry, either from cache,
from the database, or by creating a new empty entry (which is not
persisted).
"""
cached = cache.get(cls.cache_key_name(*args))
if cached is not None:
return cached
key_dict = dict(zip(cls.KEY_FIELDS, args))
try:
current = cls.objects.filter(**key_dict).order_by('-change_date')[0]
except IndexError:
current = cls(**key_dict)
cache.set(cls.cache_key_name(*args), current, cls.cache_timeout)
return current
@classmethod
def is_enabled(cls):
"""Returns True if this feature is configured as enabled, else False."""
return cls.current().enabled
@classmethod
def key_values_cache_key_name(cls, *key_fields):
""" Key for fetching unique key values from the cache """
key_fields = key_fields or cls.KEY_FIELDS
return 'configuration/{}/key_values/{}'.format(cls.__name__, ','.join(key_fields))
@classmethod
def key_values(cls, *key_fields, **kwargs):
"""
Get the set of unique values in the configuration table for the given
key[s]. Calling cls.current(*value) for each value in the resulting
list should always produce an entry, though any such entry may have
enabled=False.
Arguments:
key_fields: The positional arguments are the KEY_FIELDS to return. For example if
you had a course embargo configuration where each entry was keyed on (country,
course), then you might want to know "What countries have embargoes configured?"
with cls.key_values('country'), or "Which courses have country restrictions?"
with cls.key_values('course'). You can also leave this unspecified for the
default, which returns the distinct combinations of all keys.
flat: If you pass flat=True as a kwarg, it has the same effect as in Django's
'values_list' method: Instead of returning a list of lists, you'll get one list
of values. This makes sense to use whenever there is only one key being queried.
Return value:
List of lists of each combination of keys found in the database.
e.g. [("Italy", "course-v1:SomeX+some+2015"), ...] for the course embargo example
"""
flat = kwargs.pop('flat', False)
assert not kwargs, "'flat' is the only kwarg accepted"
key_fields = key_fields or cls.KEY_FIELDS
cache_key = cls.key_values_cache_key_name(*key_fields)
cached = cache.get(cache_key)
if cached is not None:
return cached
values = list(cls.objects.values_list(*key_fields, flat=flat).order_by().distinct())
cache.set(cache_key, values, cls.cache_timeout)
return values
| agpl-3.0 | -3,632,492,992,637,380,600 | 41.073034 | 120 | 0.624249 | false |
telefonicaid/selenium | py/test/selenium/webdriver/common/window_tests.py | 15 | 1750 | # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pytest
from selenium.webdriver.support.wait import WebDriverWait
class WindowTests(unittest.TestCase):
@pytest.mark.ignore_chrome
@pytest.mark.ignore_opera
@pytest.mark.ignore_ie
def testShouldMaximizeTheWindow(self):
resize_timeout = 5
wait = WebDriverWait(self.driver, resize_timeout)
old_size = self.driver.get_window_size()
self.driver.set_window_size(200, 200)
wait.until(
lambda dr: dr.get_window_size() != old_size if old_size["width"] != 200 and old_size["height"] != 200 \
else True)
size = self.driver.get_window_size()
self.driver.maximize_window()
wait.until(lambda dr: dr.get_window_size() != size)
new_size = self.driver.get_window_size()
assert new_size["width"] > size["width"]
assert new_size["height"] > size["height"]
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| apache-2.0 | -6,501,804,740,573,229,000 | 34.714286 | 115 | 0.681714 | false |
gavin-feng/odoo | addons/contacts/__openerp__.py | 260 | 1594 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Address Book',
'version': '1.0',
'category': 'Tools',
'description': """
This module gives you a quick view of your address book, accessible from your home page.
You can track your suppliers, customers and other contacts.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/crm',
'summary': 'Contacts, People and Companies',
'depends': [
'mail',
],
'data': [
'contacts_view.xml',
],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,652,215,441,814,680,000 | 35.227273 | 88 | 0.602258 | false |
henzk/ape | ape/container_mode/tasks.py | 1 | 12347 | from __future__ import unicode_literals, print_function
import os
import sys
import subprocess
import json
from ape import feaquencer
from ape import tasks
from .exceptions import ContainerError, ContainerNotFound, ProductNotFound
class Config(object):
APE_ROOT = os.environ['APE_ROOT_DIR']
SOURCE_HEADER = '#please execute the following in your shell:\n'
introduce_conf = Config()
@tasks.register_helper
def get_container_dir(container_name):
return tasks.conf.APE_ROOT + '/' + container_name
@tasks.register_helper
def get_product_dir(container_name, product_name):
return tasks.get_container_dir(container_name) + '/products/' + product_name
@tasks.register_helper
def get_containers():
entries = os.listdir(tasks.conf.APE_ROOT)
containers = []
for entry in entries:
if os.path.isdir(tasks.get_container_dir(entry) + '/products'):
containers.append(entry)
return containers
@tasks.register_helper
def get_products(container_name):
products_dir = tasks.get_container_dir(container_name) + '/products'
if not os.path.isdir(products_dir):
return []
products = os.listdir(products_dir)
def is_product(p):
return not p.startswith('.') and not p.startswith('_')
return [p for p in products if is_product(p)]
@tasks.register
def info():
"""
List information about this productive environment
:return:
"""
print()
print('root directory :', tasks.conf.APE_ROOT)
print()
print('active container :', os.environ.get('CONTAINER_NAME', ''))
print()
print('active product :', os.environ.get('PRODUCT_NAME', ''))
print()
print('ape feature selection :', tasks.FEATURE_SELECTION)
print()
print('containers and products:')
print('-' * 30)
print()
for container_name in tasks.get_containers():
print(container_name)
for product_name in tasks.get_products(container_name):
print(' ' + product_name)
print()
@tasks.register
def cd(doi):
"""
cd to directory of interest(doi)
a doi can be:
herbert - the container named "herbert"
sdox:dev - product "website" located in container "herbert"
:param doi:
:return:
"""
parts = doi.split(':')
if len(parts) == 2:
container_name, product_name = parts[0], parts[1]
elif len(parts) == 1 and os.environ.get('CONTAINER_NAME'):
# interpret poi as product name if already zapped into a product in order
# to enable simply switching products by doing ape zap prod.
product_name = parts[0]
container_name = os.environ.get('CONTAINER_NAME')
else:
print('unable to parse context - format: <container_name>:<product_name>')
sys.exit(1)
if container_name not in tasks.get_containers():
print('No such container')
else:
if product_name:
if product_name not in tasks.get_products(container_name):
print('No such product')
else:
print(tasks.conf.SOURCE_HEADER)
print('cd ' + tasks.get_product_dir(container_name, product_name))
else:
print(tasks.conf.SOURCE_HEADER)
print('cd ' + tasks.get_container_dir(container_name))
SWITCH_TEMPLATE = '''{source_header}
export CONTAINER_NAME={container_name}
export PRODUCT_NAME={product_name}
update_ape_env
'''
@tasks.register
def switch(poi):
"""
Zaps into a specific product specified by switch context to the product of interest(poi)
A poi is:
sdox:dev - for product "dev" located in container "sdox"
If poi does not contain a ":" it is interpreted as product name implying that a product within this
container is already active. So if this task is called with ape zap prod (and the corresponding container is
already zapped in), than only the product is switched.
After the context has been switched to sdox:dev additional commands may be available
that are relevant to sdox:dev
:param poi: product of interest, string: <container_name>:<product_name> or <product_name>.
"""
parts = poi.split(':')
if len(parts) == 2:
container_name, product_name = parts
elif len(parts) == 1 and os.environ.get('CONTAINER_NAME'):
# interpret poi as product name if already zapped into a product in order
# to enable simply switching products by doing ape zap prod.
container_name = os.environ.get('CONTAINER_NAME')
product_name = parts[0]
else:
print('unable to find poi: ', poi)
sys.exit(1)
if container_name not in tasks.get_containers():
raise ContainerNotFound('No such container %s' % container_name)
elif product_name not in tasks.get_products(container_name):
raise ProductNotFound('No such product %s' % product_name)
else:
print(SWITCH_TEMPLATE.format(
source_header=tasks.conf.SOURCE_HEADER,
container_name=container_name,
product_name=product_name
))
@tasks.register
def teleport(poi):
"""
switch and cd in one operation
:param poi:
:return:
"""
tasks.switch(poi)
tasks.cd(poi)
@tasks.register
def zap(poi):
'''alias for "teleport"'''
tasks.teleport(poi)
@tasks.register
def install_container(container_name):
"""
Installs the container specified by container_name
:param container_name: string, name of the container
"""
container_dir = os.path.join(os.environ['APE_ROOT_DIR'], container_name)
if os.path.exists(container_dir):
os.environ['CONTAINER_DIR'] = container_dir
else:
raise ContainerNotFound('ERROR: container directory not found: %s' % container_dir)
install_script = os.path.join(container_dir, 'install.py')
if os.path.exists(install_script):
print('... running install.py for %s' % container_name)
subprocess.check_call(['python', install_script])
else:
raise ContainerError('ERROR: this container does not provide an install.py!')
@tasks.register_helper
def get_extra_pypath(container_name=None):
from ape.installtools import pypath
return pypath.get_extra_pypath()
@tasks.register_helper
def get_poi_tuple(poi=None):
"""
Takes the poi or None and returns the container_dir and the product name either of the passed poi
(<container_name>: <product_name>) or from os.environ-
:param poi: optional; <container_name>: <product_name>
:return: tuple of the container directory and the product name
"""
if poi:
parts = poi.split(':')
if len(parts) == 2:
container_name, product_name = parts
if container_name not in tasks.get_containers():
print('No such container')
sys.exit(1)
elif product_name not in tasks.get_products(container_name):
print('No such product')
sys.exit(1)
else:
container_dir = tasks.get_container_dir(container_name)
else:
print('Please check your arguments: --poi <container>:<product>')
sys.exit(1)
else:
container_dir = os.environ.get('CONTAINER_DIR')
product_name = os.environ.get('PRODUCT_NAME')
return container_dir, product_name
@tasks.register
def validate_product_equation(poi=None):
"""
Validates the product equation.
* Validates the feature order
* Validates the product spec (mandatory functional features)
:param poi: optional product of interest
"""
from . import utils
from . import validators
container_dir, product_name = tasks.get_poi_tuple(poi=poi)
feature_list = utils.get_features_from_equation(container_dir, product_name)
ordering_constraints = utils.get_feature_order_constraints(container_dir)
spec_path = utils.get_feature_ide_paths(container_dir, product_name).product_spec_path
print('*** Starting product.equation validation')
# --------------------------------------------------------
# Validate the feature order
print('\tChecking feature order')
feature_order_validator = validators.FeatureOrderValidator(feature_list, ordering_constraints)
feature_order_validator.check_order()
if feature_order_validator.has_errors():
print('\t\txxx ERROR in your product.equation feature order xxx')
for error in feature_order_validator.get_violations():
print('\t\t\t', error[1])
else:
print('\t\tOK')
# --------------------------------------------------------
# Validate the functional product specification
print('\tChecking functional product spec')
if not os.path.exists(spec_path):
print(
'\t\tSkipped - No product spec exists.\n'
'\t\tYou may create a product spec if you want to ensure that\n'
'\t\trequired functional features are represented in the product equation\n'
'\t\t=> Create spec file featuremodel/productline/<container>/product_spec.json'
)
return
spec_validator = validators.ProductSpecValidator(spec_path, product_name, feature_list)
if not spec_validator.is_valid():
if spec_validator.get_errors_mandatory():
print('\t\tERROR: The following feature are missing', spec_validator.get_errors_mandatory())
if spec_validator.get_errors_never():
print('\t\tERROR: The following feature are not allowed', spec_validator.get_errors_never())
else:
print('\t\tOK')
if feature_order_validator.has_errors() or spec_validator.has_errors():
sys.exit(1)
@tasks.register_helper
def get_ordered_feature_list(info_object, feature_list):
"""
Orders the passed feature list by the given, json-formatted feature
dependency file using feaquencer's topsort algorithm.
:param feature_list:
:param info_object:
:return:
"""
feature_dependencies = json.load(open(info_object.feature_order_json))
feature_selection = [feature for feature in [feature.strip().replace('\n', '') for feature in feature_list]
if len(feature) > 0 and not feature.startswith('_') and not feature.startswith('#')]
return [feature + '\n' for feature in feaquencer.get_total_order(feature_selection, feature_dependencies)]
@tasks.register
def config_to_equation(poi=None):
"""
Generates a product.equation file for the given product name.
It generates it from the <product_name>.config file in the products folder.
For that you need to have your project imported to featureIDE and set the correct settings.
"""
from . import utils
container_dir, product_name = tasks.get_poi_tuple(poi=poi)
info_object = utils.get_feature_ide_paths(container_dir, product_name)
feature_list = list()
try:
print('*** Processing ', info_object.config_file_path)
with open(info_object.config_file_path, 'r') as config_file:
config_file = config_file.readlines()
for line in config_file:
# in FeatureIDE we cant use '.' for the paths to sub-features so we used '__'
# e.g. django_productline__features__development
if len(line.split('__')) <= 2:
line = line
else:
line = line.replace('__', '.')
if line.startswith('abstract_'):
# we skipp abstract features; this is a special case as featureIDE does not work with abstract
# sub trees / leafs.
line = ''
feature_list.append(line)
except IOError:
print('{} does not exist. Make sure your config file exists.'.format(info_object.config_file_path))
feature_list = tasks.get_ordered_feature_list(info_object, feature_list)
try:
with open(info_object.equation_file_path, 'w') as eq_file:
eq_file.writelines(feature_list)
print('*** Successfully generated product.equation')
except IOError:
print('product.equation file not found. Please make sure you have a valid product.equation in your chosen product')
# finally performing the validation of the product equation
tasks.validate_product_equation()
| mit | -6,217,605,871,756,093,000 | 33.392758 | 123 | 0.643152 | false |
bclau/nova | nova/tests/api/openstack/compute/test_limits.py | 7 | 36000 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests dealing with HTTP rate-limiting.
"""
import httplib
import StringIO
from xml.dom import minidom
from lxml import etree
import webob
from nova.api.openstack.compute import limits
from nova.api.openstack.compute import views
from nova.api.openstack import xmlutil
import nova.context
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import matchers
from nova import utils
TEST_LIMITS = [
limits.Limit("GET", "/delayed", "^/delayed", 1,
utils.TIME_UNITS['MINUTE']),
limits.Limit("POST", "*", ".*", 7, utils.TIME_UNITS['MINUTE']),
limits.Limit("POST", "/servers", "^/servers", 3,
utils.TIME_UNITS['MINUTE']),
limits.Limit("PUT", "*", "", 10, utils.TIME_UNITS['MINUTE']),
limits.Limit("PUT", "/servers", "^/servers", 5,
utils.TIME_UNITS['MINUTE']),
]
NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/common/api/v1.0'
}
class BaseLimitTestSuite(test.NoDBTestCase):
"""Base test suite which provides relevant stubs and time abstraction."""
def setUp(self):
super(BaseLimitTestSuite, self).setUp()
self.time = 0.0
self.stubs.Set(limits.Limit, "_get_time", self._get_time)
self.absolute_limits = {}
def stub_get_project_quotas(context, project_id, usages=True):
return dict((k, dict(limit=v))
for k, v in self.absolute_limits.items())
self.stubs.Set(nova.quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
def _get_time(self):
"""Return the "time" according to this test suite."""
return self.time
class LimitsControllerTest(BaseLimitTestSuite):
"""
Tests for `limits.LimitsController` class.
"""
def setUp(self):
"""Run before each test."""
super(LimitsControllerTest, self).setUp()
self.controller = limits.create_resource()
self.ctrler = limits.LimitsController()
def _get_index_request(self, accept_header="application/json"):
"""Helper to set routing arguments."""
request = webob.Request.blank("/")
request.accept = accept_header
request.environ["wsgiorg.routing_args"] = (None, {
"action": "index",
"controller": "",
})
context = nova.context.RequestContext('testuser', 'testproject')
request.environ["nova.context"] = context
return request
def _populate_limits(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("POST", "*", ".*", 5, 60 * 60).display(),
limits.Limit("GET", "changes-since*", "changes-since",
5, 60).display(),
]
request.environ["nova.limits"] = _limits
return request
def test_empty_index_json(self):
# Test getting empty limit details in JSON.
request = self._get_index_request()
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def test_index_json(self):
# Test getting limit details in JSON.
request = self._get_index_request()
request = self._populate_limits(request)
self.absolute_limits = {
'ram': 512,
'instances': 5,
'cores': 21,
'key_pairs': 10,
'floating_ips': 10,
'security_groups': 10,
'security_group_rules': 20,
}
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
{
"verb": "POST",
"next-available": "1970-01-01T00:00:00Z",
"unit": "HOUR",
"value": 5,
"remaining": 5,
},
],
},
{
"regex": "changes-since",
"uri": "changes-since*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 5,
"remaining": 5,
},
],
},
],
"absolute": {
"maxTotalRAMSize": 512,
"maxTotalInstances": 5,
"maxTotalCores": 21,
"maxTotalKeypairs": 10,
"maxTotalFloatingIps": 10,
"maxSecurityGroups": 10,
"maxSecurityGroupRules": 20,
},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _populate_limits_diff_regex(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("GET", "*", "*.*", 10, 60).display(),
]
request.environ["nova.limits"] = _limits
return request
def test_index_diff_regex(self):
# Test getting limit details in JSON.
request = self._get_index_request()
request = self._populate_limits_diff_regex(request)
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
{
"regex": "*.*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _test_index_absolute_limits_json(self, expected):
request = self._get_index_request()
response = request.get_response(self.controller)
body = jsonutils.loads(response.body)
self.assertEqual(expected, body['limits']['absolute'])
def test_index_ignores_extra_absolute_limits_json(self):
self.absolute_limits = {'unknown_limit': 9001}
self._test_index_absolute_limits_json({})
def test_index_absolute_ram_json(self):
self.absolute_limits = {'ram': 1024}
self._test_index_absolute_limits_json({'maxTotalRAMSize': 1024})
def test_index_absolute_cores_json(self):
self.absolute_limits = {'cores': 17}
self._test_index_absolute_limits_json({'maxTotalCores': 17})
def test_index_absolute_instances_json(self):
self.absolute_limits = {'instances': 19}
self._test_index_absolute_limits_json({'maxTotalInstances': 19})
def test_index_absolute_metadata_json(self):
# NOTE: both server metadata and image metadata are overloaded
# into metadata_items
self.absolute_limits = {'metadata_items': 23}
expected = {
'maxServerMeta': 23,
'maxImageMeta': 23,
}
self._test_index_absolute_limits_json(expected)
def test_index_absolute_injected_files(self):
self.absolute_limits = {
'injected_files': 17,
'injected_file_content_bytes': 86753,
}
expected = {
'maxPersonality': 17,
'maxPersonalitySize': 86753,
}
self._test_index_absolute_limits_json(expected)
def test_index_absolute_security_groups(self):
self.absolute_limits = {
'security_groups': 8,
'security_group_rules': 16,
}
expected = {
'maxSecurityGroups': 8,
'maxSecurityGroupRules': 16,
}
self._test_index_absolute_limits_json(expected)
def test_limit_create(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.create,
req, {})
def test_limit_delete(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.delete,
req, 1)
def test_limit_detail(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.detail,
req)
def test_limit_show(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.show,
req, 1)
def test_limit_update(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.update,
req, 1, {})
class MockLimiter(limits.Limiter):
pass
class LimitMiddlewareTest(BaseLimitTestSuite):
"""
Tests for the `limits.RateLimitingMiddleware` class.
"""
@webob.dec.wsgify
def _empty_app(self, request):
"""Do-nothing WSGI app."""
pass
def setUp(self):
"""Prepare middleware for use through fake WSGI app."""
super(LimitMiddlewareTest, self).setUp()
_limits = '(GET, *, .*, 1, MINUTE)'
self.app = limits.RateLimitingMiddleware(self._empty_app, _limits,
"%s.MockLimiter" %
self.__class__.__module__)
def test_limit_class(self):
# Test that middleware selected correct limiter class.
assert isinstance(self.app._limiter, MockLimiter)
def test_good_request(self):
# Test successful GET request through middleware.
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
def test_limited_request_json(self):
# Test a rate-limited (429) GET request through middleware.
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(response.status_int, 429)
self.assertTrue('Retry-After' in response.headers)
retry_after = int(response.headers['Retry-After'])
self.assertAlmostEqual(retry_after, 60, 1)
body = jsonutils.loads(response.body)
expected = "Only 1 GET request(s) can be made to * every minute."
value = body["overLimit"]["details"].strip()
self.assertEqual(value, expected)
self.assertTrue("retryAfter" in body["overLimit"])
retryAfter = body["overLimit"]["retryAfter"]
self.assertEqual(retryAfter, "60")
def test_limited_request_xml(self):
# Test a rate-limited (429) response as XML.
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
request.accept = "application/xml"
response = request.get_response(self.app)
self.assertEqual(response.status_int, 429)
root = minidom.parseString(response.body).childNodes[0]
expected = "Only 1 GET request(s) can be made to * every minute."
self.assertNotEqual(root.attributes.getNamedItem("retryAfter"), None)
retryAfter = root.attributes.getNamedItem("retryAfter").value
self.assertEqual(retryAfter, "60")
details = root.getElementsByTagName("details")
self.assertEqual(details.length, 1)
value = details.item(0).firstChild.data.strip()
self.assertEqual(value, expected)
class LimitTest(BaseLimitTestSuite):
"""
Tests for the `limits.Limit` class.
"""
def test_GET_no_delay(self):
# Test a limit handles 1 GET per second.
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
self.assertEqual(0, limit.next_request)
self.assertEqual(0, limit.last_request)
def test_GET_delay(self):
# Test two calls to 1 GET per second limit.
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
delay = limit("GET", "/anything")
self.assertEqual(1, delay)
self.assertEqual(1, limit.next_request)
self.assertEqual(0, limit.last_request)
self.time += 4
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
self.assertEqual(4, limit.next_request)
self.assertEqual(4, limit.last_request)
class ParseLimitsTest(BaseLimitTestSuite):
"""
Tests for the default limits parser in the in-memory
`limits.Limiter` class.
"""
def test_invalid(self):
# Test that parse_limits() handles invalid input correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
';;;;;')
def test_bad_rule(self):
# Test that parse_limits() handles bad rules correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'GET, *, .*, 20, minute')
def test_missing_arg(self):
# Test that parse_limits() handles missing args correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20)')
def test_bad_value(self):
# Test that parse_limits() handles bad values correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, foo, minute)')
def test_bad_unit(self):
# Test that parse_limits() handles bad units correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20, lightyears)')
def test_multiple_rules(self):
# Test that parse_limits() handles multiple rules correctly.
try:
l = limits.Limiter.parse_limits('(get, *, .*, 20, minute);'
'(PUT, /foo*, /foo.*, 10, hour);'
'(POST, /bar*, /bar.*, 5, second);'
'(Say, /derp*, /derp.*, 1, day)')
except ValueError as e:
assert False, str(e)
# Make sure the number of returned limits are correct
self.assertEqual(len(l), 4)
# Check all the verbs...
expected = ['GET', 'PUT', 'POST', 'SAY']
self.assertEqual([t.verb for t in l], expected)
# ...the URIs...
expected = ['*', '/foo*', '/bar*', '/derp*']
self.assertEqual([t.uri for t in l], expected)
# ...the regexes...
expected = ['.*', '/foo.*', '/bar.*', '/derp.*']
self.assertEqual([t.regex for t in l], expected)
# ...the values...
expected = [20, 10, 5, 1]
self.assertEqual([t.value for t in l], expected)
# ...and the units...
expected = [utils.TIME_UNITS['MINUTE'], utils.TIME_UNITS['HOUR'],
utils.TIME_UNITS['SECOND'], utils.TIME_UNITS['DAY']]
self.assertEqual([t.unit for t in l], expected)
class LimiterTest(BaseLimitTestSuite):
"""
Tests for the in-memory `limits.Limiter` class.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
userlimits = {'limits.user3': '',
'limits.user0': '(get, *, .*, 4, minute);'
'(put, *, .*, 2, minute)'}
self.limiter = limits.Limiter(TEST_LIMITS, **userlimits)
def _check(self, num, verb, url, username=None):
"""Check and yield results from checks."""
for x in xrange(num):
yield self.limiter.check_for_delay(verb, url, username)[0]
def _check_sum(self, num, verb, url, username=None):
"""Check and sum results from checks."""
results = self._check(num, verb, url, username)
return sum(item for item in results if item)
def test_no_delay_GET(self):
"""
Simple test to ensure no delay on a single call for a limit verb we
didn"t set.
"""
delay = self.limiter.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_no_delay_PUT(self):
# Simple test to ensure no delay on a single call for a known limit.
delay = self.limiter.check_for_delay("PUT", "/anything")
self.assertEqual(delay, (None, None))
def test_delay_PUT(self):
"""
Ensure the 11th PUT will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_POST(self):
"""
Ensure the 8th POST will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 7
results = list(self._check(7, "POST", "/anything"))
self.assertEqual(expected, results)
expected = 60.0 / 7.0
results = self._check_sum(1, "POST", "/anything")
self.failUnlessAlmostEqual(expected, results, 8)
def test_delay_GET(self):
# Ensure the 11th GET will result in NO delay.
expected = [None] * 11
results = list(self._check(11, "GET", "/anything"))
self.assertEqual(expected, results)
expected = [None] * 4 + [15.0]
results = list(self._check(5, "GET", "/foo", "user0"))
self.assertEqual(expected, results)
def test_delay_PUT_servers(self):
"""
Ensure PUT on /servers limits at 5 requests, and PUT elsewhere is still
OK after 5 requests...but then after 11 total requests, PUT limiting
kicks in.
"""
# First 6 requests on PUT /servers
expected = [None] * 5 + [12.0]
results = list(self._check(6, "PUT", "/servers"))
self.assertEqual(expected, results)
# Next 5 request on PUT /anything
expected = [None] * 4 + [6.0]
results = list(self._check(5, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_PUT_wait(self):
"""
Ensure after hitting the limit and then waiting for the correct
amount of time, the limit will be lifted.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
# Advance time
self.time += 6.0
expected = [None, 6.0]
results = list(self._check(2, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_multiple_delays(self):
# Ensure multiple requests still get a delay.
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything"))
self.assertEqual(expected, results)
self.time += 1.0
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything"))
self.assertEqual(expected, results)
expected = [None] * 2 + [30.0] * 8
results = list(self._check(10, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
def test_user_limit(self):
# Test user-specific limits.
self.assertEqual(self.limiter.levels['user3'], [])
self.assertEqual(len(self.limiter.levels['user0']), 2)
def test_multiple_users(self):
# Tests involving multiple users.
# User0
expected = [None] * 2 + [30.0] * 8
results = list(self._check(10, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
# User1
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
# User2
expected = [None] * 10 + [6.0] * 5
results = list(self._check(15, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User3
expected = [None] * 20
results = list(self._check(20, "PUT", "/anything", "user3"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [4.0] * 5
results = list(self._check(5, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User0 again
expected = [28.0]
results = list(self._check(1, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
self.time += 28.0
expected = [None, 30.0]
results = list(self._check(2, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
class WsgiLimiterTest(BaseLimitTestSuite):
"""
Tests for `limits.WsgiLimiter` class.
"""
def setUp(self):
"""Run before each test."""
super(WsgiLimiterTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
def _request_data(self, verb, path):
"""Get data describing a limit request verb/path."""
return jsonutils.dumps({"verb": verb, "path": path})
def _request(self, verb, url, username=None):
"""Make sure that POSTing to the given url causes the given username
to perform the given action. Make the internal rate limiter return
delay and make sure that the WSGI app returns the correct response.
"""
if username:
request = webob.Request.blank("/%s" % username)
else:
request = webob.Request.blank("/")
request.method = "POST"
request.body = self._request_data(verb, url)
response = request.get_response(self.app)
if "X-Wait-Seconds" in response.headers:
self.assertEqual(response.status_int, 403)
return response.headers["X-Wait-Seconds"]
self.assertEqual(response.status_int, 204)
def test_invalid_methods(self):
# Only POSTs should work.
requests = []
for method in ["GET", "PUT", "DELETE", "HEAD", "OPTIONS"]:
request = webob.Request.blank("/", method=method)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 405)
def test_good_url(self):
delay = self._request("GET", "/something")
self.assertEqual(delay, None)
def test_escaping(self):
delay = self._request("GET", "/something/jump%20up")
self.assertEqual(delay, None)
def test_response_to_delays(self):
delay = self._request("GET", "/delayed")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed")
self.assertEqual(delay, '60.00')
def test_response_to_delays_usernames(self):
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, '60.00')
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, '60.00')
class FakeHttplibSocket(object):
"""
Fake `httplib.HTTPResponse` replacement.
"""
def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`."""
self._buffer = StringIO.StringIO(response_string)
def makefile(self, _mode, _other):
"""Returns the socket's internal buffer."""
return self._buffer
class FakeHttplibConnection(object):
"""
Fake `httplib.HTTPConnection`.
"""
def __init__(self, app, host):
"""
Initialize `FakeHttplibConnection`.
"""
self.app = app
self.host = host
def request(self, method, path, body="", headers=None):
"""
Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into
an `httplib.HTTPResponse`.
"""
if not headers:
headers = {}
req = webob.Request.blank(path)
req.method = method
req.headers = headers
req.host = self.host
req.body = body
resp = str(req.get_response(self.app))
resp = "HTTP/1.0 %s" % resp
sock = FakeHttplibSocket(resp)
self.http_response = httplib.HTTPResponse(sock)
self.http_response.begin()
def getresponse(self):
"""Return our generated response from the request."""
return self.http_response
def wire_HTTPConnection_to_WSGI(host, app):
"""Monkeypatches HTTPConnection so that if you try to connect to host, you
are instead routed straight to the given WSGI app.
After calling this method, when any code calls
httplib.HTTPConnection(host)
the connection object will be a fake. Its requests will be sent directly
to the given WSGI app rather than through a socket.
Code connecting to hosts other than host will not be affected.
This method may be called multiple times to map different hosts to
different apps.
This method returns the original HTTPConnection object, so that the caller
can restore the default HTTPConnection interface (for all hosts).
"""
class HTTPConnectionDecorator(object):
"""Wraps the real HTTPConnection class so that when you instantiate
the class you might instead get a fake instance.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
def __call__(self, connection_host, *args, **kwargs):
if connection_host == host:
return FakeHttplibConnection(app, host)
else:
return self.wrapped(connection_host, *args, **kwargs)
oldHTTPConnection = httplib.HTTPConnection
httplib.HTTPConnection = HTTPConnectionDecorator(httplib.HTTPConnection)
return oldHTTPConnection
class WsgiLimiterProxyTest(BaseLimitTestSuite):
"""
Tests for the `limits.WsgiLimiterProxy` class.
"""
def setUp(self):
"""
Do some nifty HTTP/WSGI magic which allows for WSGI to be called
directly by something like the `httplib` library.
"""
super(WsgiLimiterProxyTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
self.oldHTTPConnection = (
wire_HTTPConnection_to_WSGI("169.254.0.1:80", self.app))
self.proxy = limits.WsgiLimiterProxy("169.254.0.1:80")
def test_200(self):
# Successful request test.
delay = self.proxy.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_403(self):
# Forbidden request test.
delay = self.proxy.check_for_delay("GET", "/delayed")
self.assertEqual(delay, (None, None))
delay, error = self.proxy.check_for_delay("GET", "/delayed")
error = error.strip()
expected = ("60.00", "403 Forbidden\n\nOnly 1 GET request(s) can be "
"made to /delayed every minute.")
self.assertEqual((delay, error), expected)
def tearDown(self):
# restore original HTTPConnection object
httplib.HTTPConnection = self.oldHTTPConnection
super(WsgiLimiterProxyTest, self).tearDown()
class LimitsViewBuilderTest(test.NoDBTestCase):
def setUp(self):
super(LimitsViewBuilderTest, self).setUp()
self.view_builder = views.limits.ViewBuilder()
self.rate_limits = [{"URI": "*",
"regex": ".*",
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"resetTime": 1311272226},
{"URI": "*/servers",
"regex": "^/servers",
"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"resetTime": 1311272226}]
self.absolute_limits = {"metadata_items": 1,
"injected_files": 5,
"injected_file_content_bytes": 5}
def test_build_limits(self):
expected_limits = {"limits": {
"rate": [{
"uri": "*",
"regex": ".*",
"limit": [{"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": "2011-07-21T18:17:06Z"}]},
{"uri": "*/servers",
"regex": "^/servers",
"limit": [{"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": "2011-07-21T18:17:06Z"}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 5}}}
output = self.view_builder.build(self.rate_limits,
self.absolute_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
def test_build_limits_empty_limits(self):
expected_limits = {"limits": {"rate": [],
"absolute": {}}}
abs_limits = {}
rate_limits = []
output = self.view_builder.build(rate_limits, abs_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
class LimitsXMLSerializationTest(test.NoDBTestCase):
def test_xml_declaration(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_index(self):
serializer = limits.LimitsTemplate()
fixture = {
"limits": {
"rate": [{
"uri": "*",
"regex": ".*",
"limit": [{
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": "2011-12-15T22:42:45Z"}]},
{"uri": "*/servers",
"regex": "^/servers",
"limit": [{
"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": "2011-12-15T22:42:45Z"}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 10240}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 4)
for limit in absolutes:
name = limit.get('name')
value = limit.get('value')
self.assertEqual(value, str(fixture['limits']['absolute'][name]))
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 2)
for i, rate in enumerate(rates):
for key in ['uri', 'regex']:
self.assertEqual(rate.get(key),
str(fixture['limits']['rate'][i][key]))
rate_limits = rate.xpath('ns:limit', namespaces=NS)
self.assertEqual(len(rate_limits), 1)
for j, limit in enumerate(rate_limits):
for key in ['verb', 'value', 'remaining', 'unit',
'next-available']:
self.assertEqual(limit.get(key),
str(fixture['limits']['rate'][i]['limit'][j][key]))
def test_index_no_limits(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 0)
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 0)
| apache-2.0 | 4,373,541,249,970,015,700 | 34.714286 | 79 | 0.530167 | false |
cmichal/python-social-auth | social/backends/strava.py | 70 | 1850 | """
Strava OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/strava.html
"""
from social.backends.oauth import BaseOAuth2
class StravaOAuth(BaseOAuth2):
name = 'strava'
AUTHORIZATION_URL = 'https://www.strava.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://www.strava.com/oauth/token'
ACCESS_TOKEN_METHOD = 'POST'
# Strava doesn't check for parameters in redirect_uri and directly appends
# the auth parameters to it, ending with an URL like:
# http://example.com/complete/strava?redirect_state=xxx?code=xxx&state=xxx
# Check issue #259 for details.
REDIRECT_STATE = False
REVOKE_TOKEN_URL = 'https://www.strava.com/oauth/deauthorize'
def get_user_id(self, details, response):
return response['athlete']['id']
def get_user_details(self, response):
"""Return user details from Strava account"""
# because there is no usernames on strava
username = response['athlete']['id']
email = response['athlete'].get('email', '')
fullname, first_name, last_name = self.get_user_names(
first_name=response['athlete'].get('firstname', ''),
last_name=response['athlete'].get('lastname', ''),
)
return {'username': str(username),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'email': email}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://www.strava.com/api/v3/athlete',
params={'access_token': access_token})
def revoke_token_params(self, token, uid):
params = super(StravaOAuth, self).revoke_token_params(token, uid)
params['access_token'] = token
return params
| bsd-3-clause | 3,137,701,146,243,392,500 | 39.217391 | 78 | 0.622703 | false |
Voskrese/archlive.archldr | src/pypack/altgraph/Graph.py | 9 | 19562 | """
Base Graph class
#--Version 2.1
#--Bob Ippolito October, 2004
#--Version 2.0
#--Istvan Albert June, 2004
#--Version 1.0
#--Nathan Denny, May 27, 1999
"""
from altgraph import GraphError
from compat import *
class Graph(object):
"""
The Graph class represents a directed graph with C{N} nodes and C{E} edges.
Naming conventions:
- the prefixes such asC{out}, C{inc} and C{all} will refer to methods
that operate on the outgoing, incoming or all edges of that node.
For example: L{inc_degree} will refer to the degree of the node
computed over the incoming edges (the number of neighbours linking to
the node).
- the prefixes such as C{forw} and C{back} will refer to the
orientation of the edges used in the method with respect to the node.
For example: L{forw_bfs} will start at the node then use the outgoing
edges to traverse the graph (goes forward).
"""
def __init__(self, edges=None):
"""
Initialization
"""
self.next_edge = 0
self.nodes, self.edges = {}, {}
self.hidden_edges, self.hidden_nodes = {}, {}
try:
# instantiate graph from iterable data
if edges:
cols = len(edges[0])
if cols == 2:
for head, tail in edges:
self.add_edge(head, tail)
elif cols == 3:
for head, tail, data in edges:
self.add_edge(head, tail, data)
except Exception, exc:
raise GraphError('%s -> Cannot create graph from edges=%s' %
(exc, edges))
def __repr__(self):
return '<Graph: %d nodes, %d edges>' % (
self.number_of_nodes(), self.number_of_edges())
def add_node(self, node, node_data=None):
"""
Creates a new node with a node. Arbitrary data can be attached to the
node via the node_data parameter. Adding the same node twice will be
silently ignored.
"""
#
# the nodes will contain tuples that will store incoming edges,
# outgoing edges and data
#
# index 0 -> incoming edges
# index 1 -> outgoing edges
if node not in self.nodes:
self.nodes[node] = ([], [], node_data)
def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
"""
Adds a directed edge going from head_id to tail_id.
Arbitrary data can be attached to the edge via edge_data.
It may create the nodes if adding edges between nonexisting ones.
@param head_id: head node
@param tail_id: tail node
@param edge_data: (optional) data attached to the edge
@param create_nodes: (optional) creates the head_id or tail_id node in case they did not exist
"""
# shorcut
edge = self.next_edge
# add nodes if on automatic node creation
if create_nodes:
self.add_node(head_id)
self.add_node(tail_id)
# store edge information
self.edges[edge] = (head_id, tail_id, edge_data)
# update the corresponding incoming and outgoing lists in the nodes
# index 0 -> incoming edges
# index 1 -> outgoing edges
try:
self.nodes[tail_id][0].append(edge)
self.nodes[head_id][1].append(edge)
except KeyError:
raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
self.next_edge += 1
def hide_edge(self, edge):
"""
Hides an edge from the graph. The edge may be unhidden at some later
time.
"""
try:
head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge]
self.nodes[tail_id][0].remove(edge)
self.nodes[head_id][1].remove(edge)
del self.edges[edge]
except KeyError:
raise GraphError('Invalid edge %s' % edge)
def hide_node(self, node):
"""
Hides a node from the graph. The incoming and outgoing edges of the
node will also be hidden. The node may be unhidden at some later time.
"""
try:
all_edges = self.all_edges(node)
self.hidden_nodes[node] = (self.nodes[node], all_edges)
for edge in all_edges:
self.hide_edge(edge)
del self.nodes[node]
except KeyError:
raise GraphError('Invalid node %s' % node)
def restore_node(self, node):
"""
Restores a previously hidden node back into the graph and restores
all of its incoming and outgoing edges.
"""
try:
self.nodes[node], all_edges = self.hidden_nodes[node]
for edge in all_edges:
self.restore_edge(edge)
del self.hidden_nodes[node]
except KeyError:
raise GraphError('Invalid node %s' % node)
def restore_edge(self, edge):
"""
Restores a previously hidden edge back into the graph.
"""
try:
self.edges[edge] = head_id, tail_id, data = self.hidden_edges[edge]
self.nodes[tail_id][0].append(edge)
self.nodes[head_id][1].append(edge)
del self.hidden_edges[edge]
except KeyError:
raise GraphError('Invalid edge %s' % edge)
def restore_all_edges(self):
"""
Restores all hidden edges.
"""
for edge in self.hidden_edges.keys():
self.restore_edge(edge)
def restore_all_nodes(self):
"""
Restores all hidden nodes.
"""
for node in self.hidden_nodes.keys():
self.restore_node(node)
def __contains__(self, node):
"""
Test whether a node is in the graph
"""
return node in self.nodes
def edge_by_id(self, edge):
"""
Returns the edge that connects the head_id and tail_id nodes
"""
try:
head, tail, data = self.edges[edge]
except KeyError:
head, tail = None, None
raise GraphError('Invalid edge %s' % edge)
return (head, tail)
def edge_by_node(self, head, tail):
"""
Returns the edge that connects the head_id and tail_id nodes
"""
for edge in self.out_edges(head):
if self.tail(edge) == tail:
return edge
return None
def number_of_nodes(self):
"""
Returns the number of nodes
"""
return len(self.nodes)
def number_of_edges(self):
"""
Returns the number of edges
"""
return len(self.edges)
def __iter__(self):
"""
Iterates over all nodes in the graph
"""
return iter(self.nodes)
def node_list(self):
"""
Return a list of the node ids for all visible nodes in the graph.
"""
return self.nodes.keys()
def edge_list(self):
"""
Returns an iterator for all visible nodes in the graph.
"""
return self.edges.keys()
def number_of_hidden_edges(self):
"""
Returns the number of hidden edges
"""
return len(self.hidden_edges)
def number_of_hidden_nodes(self):
"""
Returns the number of hidden nodes
"""
return len(self.hidden_nodes)
def hidden_node_list(self):
"""
Returns the list with the hidden nodes
"""
return self.hidden_nodes.keys()
def hidden_edge_list(self):
"""
Returns a list with the hidden edges
"""
return self.hidden_edges.keys()
def describe_node(self, node):
"""
return node, node data, outgoing edges, incoming edges for node
"""
incoming, outgoing, data = self.nodes[node]
return node, data, outgoing, incoming
def describe_edge(self, edge):
"""
return edge, edge data, head, tail for edge
"""
head, tail, data = self.edges[edge]
return edge, data, head, tail
def node_data(self, node):
"""
Returns the data associated with a node
"""
return self.nodes[node][2]
def edge_data(self, edge):
"""
Returns the data associated with an edge
"""
return self.edges[edge][2]
def head(self, edge):
"""
Returns the node of the head of the edge.
"""
return self.edges[edge][0]
def tail(self, edge):
"""
Returns node of the tail of the edge.
"""
return self.edges[edge][1]
def out_nbrs(self, node):
"""
List of nodes connected by outgoing edges
"""
return map(self.tail, self.out_edges(node))
def inc_nbrs(self, node):
"""
List of nodes connected by incoming edges
"""
return map(self.head, self.inc_edges(node))
def all_nbrs(self, node):
"""
List of nodes connected by incoming and outgoing edges
"""
return self.inc_nbrs(node) + self.out_nbrs(node)
def out_edges(self, node):
"""
Returns a list of the outgoing edges
"""
try:
return list(self.nodes[node][1])
except KeyError:
raise GraphError('Invalid node %s' % node)
return None
def inc_edges(self, node):
"""
Returns a list of the incoming edges
"""
try:
return list(self.nodes[node][0])
except KeyError:
raise GraphError('Invalid node %s' % node)
return None
def all_edges(self, node):
"""
Returns a list of incoming and outging edges.
"""
return set(self.inc_edges(node) + self.out_edges(node))
def out_degree(self, node):
"""
Returns the number of outgoing edges
"""
return len(self.out_edges(node))
def inc_degree(self, node):
"""
Returns the number of incoming edges
"""
return len(self.inc_edges(node))
def all_degree(self, node):
"""
The total degree of a node
"""
return self.inc_degree(node) + self.out_degree(node)
def _topo_sort(self, forward=True):
"""
Topological sort.
Returns a list of nodes where the successors (based on outgoing and
incoming edges selected by the forward parameter) of any given node
appear in the sequence after that node.
"""
topo_list = []
queue = deque()
indeg = {}
# select the operation that will be performed
if forward:
get_edges = self.out_edges
get_degree = self.inc_degree
else:
get_edges = self.inc_edges
get_degree = self.out_degree
for node in self.node_list():
degree = get_degree(node)
if degree:
indeg[node] = degree
else:
queue.append(node)
while queue:
curr_node = queue.popleft()
topo_list.append(curr_node)
for edge in get_edges(curr_node):
tail_id = self.tail(edge)
indeg[tail_id] -= 1
if indeg[tail_id] == 0:
queue.append(tail_id)
if len(topo_list) == len(self.node_list()):
valid = True
else:
# the graph has cycles, invalid topological sort
valid = False
return (valid, topo_list)
def forw_topo_sort(self):
"""
Topological sort.
Returns a list of nodes where the successors (based on outgoing edges)
of any given node appear in the sequence after that node.
"""
return self._topo_sort(forward=True)
def back_topo_sort(self):
"""
Reverse topological sort.
Returns a list of nodes where the successors (based on incoming edges)
of any given node appear in the sequence after that node.
"""
return self._topo_sort(forward=False)
def _bfs_subgraph(self, start_id, forward=True):
"""
Private method creates a subgraph in a bfs order.
The forward parameter specifies whether it is a forward or backward
traversal.
"""
if forward:
get_bfs = self.forw_bfs
get_nbrs = self.out_nbrs
else:
get_bfs = self.back_bfs
get_nbrs = self.inc_nbrs
g = Graph()
bfs_list = get_bfs(start_id)
for (hop_num, node) in bfs_list:
g.add_node(node)
for (hop_num, node) in bfs_list:
for nbr_id in get_nbrs(node):
g.add_edge(node, nbr_id)
return g
def forw_bfs_subgraph(self, start_id):
"""
Creates and returns a subgraph consisting of the breadth first
reachable nodes based on their outgoing edges.
"""
return self._bfs_subgraph(start_id, forward=True)
def back_bfs_subgraph(self, start_id):
"""
Creates and returns a subgraph consisting of the breadth first
reachable nodes based on the incoming edges.
"""
return self._bfs_subgraph(start_id, forward=True)
def iterdfs(self, start, end=None, forward=True):
"""
Collecting nodes in some depth first traversal.
The forward parameter specifies whether it is a forward or backward
traversal.
"""
visited, stack = set([start]), deque([start])
if forward:
get_edges = self.out_edges
else:
get_edges = self.inc_edges
while stack:
curr_node = stack.pop()
yield curr_node
if curr_node == end:
break
for edge in get_edges(curr_node):
tail = self.tail(edge)
if tail not in visited:
visited.add(tail)
stack.append(tail)
def iterdata(self, start, end=None, forward=True, condition=None):
visited, stack = set([start]), deque([start])
if forward:
get_edges = self.out_edges
else:
get_edges = self.inc_edges
get_data = self.node_data
while stack:
curr_node = stack.pop()
curr_data = get_data(curr_node)
if curr_data is not None:
if condition is not None and not condition(curr_data):
continue
yield curr_data
if curr_node == end:
break
for edge in get_edges(curr_node):
tail = self.tail(edge)
if tail not in visited:
visited.add(tail)
stack.append(tail)
def _dfs(self, start, end=None, forward=True):
return list(self.iterdfs(start, end=end, forward=forward))
def _iterbfs(self, start, end=None, forward=True):
"""
Private method, collecting nodes in some breadth first traversal.
The forward parameter specifies whether it is a forward or backward
traversal. Returns a list of tuples where the first value is the hop
value the second value is the node id.
"""
queue, visited = deque([(start, 0)]), set([start])
# the direction of the bfs depends on the edges that are sampled
if forward:
get_edges = self.out_edges
else:
get_edges = self.inc_edges
while queue:
curr_node, curr_step = queue.popleft()
yield (curr_node, curr_step)
if curr_node == end:
break
for edge in get_edges(curr_node):
tail = self.tail(edge)
if tail not in visited:
visited.add(tail)
queue.append((tail, curr_step + 1))
def _bfs(self, start, end=None, forward=True):
return list(self._iterbfs(start, end=end, forward=forward))
def forw_bfs(self, start, end=None):
"""
Returns a list of nodes in some forward BFS order.
Starting from the start node the breadth first search proceeds along
outgoing edges.
"""
return [node for node, step in self._bfs(start, end, forward=True)]
def back_bfs(self, start, end=None):
"""
Returns a list of nodes in some backward BFS order.
Starting from the start node the breadth first search proceeds along
incoming edges.
"""
return [node for node, step in self._bfs(start, end, forward=False)]
def forw_dfs(self, start, end=None):
"""
Returns a list of nodes in some forward DFS order.
Starting with the start node the depth first search proceeds along
outgoing edges.
"""
return self._dfs(start, end, forward=True)
def back_dfs(self, start, end=None):
"""
Returns a list of nodes in some backward DFS order.
Starting from the start node the depth first search proceeds along
incoming edges.
"""
return self._dfs(start, end, forward=False)
def connected(self):
"""
Returns C{True} if the graph's every node can be reached from every
other node.
"""
node_list = self.node_list()
for node in node_list:
bfs_list = self.forw_bfs(node)
if len(bfs_list) != len(node_list):
return False
return True
def clust_coef(self, node):
"""
Computes and returns the clustering coefficient of node. The clustering
coeffcient is defined as ...
"""
num = 0
nbr_set = set(self.out_nbrs(node))
nbr_set.remove(node) # loop defense
for nbr in nbr_set:
sec_set = set(self.out_nbrs(nbr))
sec_set.remove(nbr) # loop defense
num += len(nbr_set & sec_set)
nbr_num = len(nbr_set)
if nbr_num:
clust_coef = float(num) / (nbr_num * (nbr_num - 1))
else:
clust_coef = 0.0
return clust_coef
def get_hops(self, start, end=None, forward=True):
"""
Computes the hop distance to all nodes centered around a specified node.
First order neighbours are at hop 1, their neigbours are at hop 2 etc.
Uses L{forw_bfs} or L{back_bfs} depending on the value of the forward
parameter. If the distance between all neighbouring nodes is 1 the hop
number corresponds to the shortest distance between the nodes.
@param start: the starting node
@param end: ending node (optional). When not specified will search the whole graph.
@param forward: directionality parameter (optional). If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}.
@return: returns a list of tuples where each tuple contains the node and the hop.
Typical usage::
>>> print graph.get_hops(1, 8)
>>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
# node 1 is at 0 hops
# node 2 is at 1 hop
# ...
# node 8 is at 5 hops
"""
if forward:
return self._bfs(start=start, end=end, forward=True)
else:
return self._bfs(start=start, end=end, forward=False)
| gpl-2.0 | 4,889,670,197,461,690,000 | 30.249201 | 124 | 0.55025 | false |
smcantab/pele | pele/potentials/gminpotential.py | 5 | 1703 | from pele.potentials import BasePotential
import numpy as np
__all__ = ["GMINPotential"]
class GMINPotential(BasePotential): # pragma: no cover
"""
Interface to fortran GMIN potential
Potentials implemented in GMIN can be called from python if GMIN is compiled with the flag WITH_PYTHON enabled. This creates
python modules (dynamic libraries). However, the interface is still very rough and GMINPotential provides a wrapper for
easy access to GMIN.
The imported GMIN module requires a data file to be present in the current directory. All parameters except for the ones
responsible to setup the potential will be ignored and can be skipped. The first call after importing the module should be
initialize.
Attributes
----------
GMIN :
reference to the gmin module
Examples
--------
The following example imports the GMIN python interface and evaluates the energy
>>> import gmin_
>>>
>>> gmin_.initialize() # finish gmin initialization
>>> pot = GMINPotential(gmin_)
>>>
>>> coords = pot.getCoords()
>>> pot.getEnergy(coords)
"""
def __init__(self, GMIN):
"""
Constructor
"""
self.GMIN = GMIN
self.ncalls = 0
def getEnergy(self, coords):
self.ncalls += 1
return self.GMIN.getEnergy(coords)
def getEnergyGradient(self, coords):
self.ncalls += 1
grad = np.zeros(3 * self.GMIN.getNAtoms())
E = self.GMIN.getEnergyGradient(coords, grad)
return E, grad[0:coords.size]
def getCoords(self):
coords = np.zeros(self.GMIN.getDOF())
self.GMIN.getCoords(coords)
return coords
| gpl-3.0 | 3,580,945,908,551,981,000 | 27.864407 | 128 | 0.650029 | false |
Lilywei123/tempest | tempest/api/compute/admin/test_flavors_extra_specs.py | 3 | 5162 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import test
class FlavorsExtraSpecsTestJSON(base.BaseV2ComputeAdminTest):
"""
Tests Flavor Extra Spec API extension.
SET, UNSET, UPDATE Flavor Extra specs require admin privileges.
GET Flavor Extra specs can be performed even by without admin privileges.
"""
@classmethod
def resource_setup(cls):
super(FlavorsExtraSpecsTestJSON, cls).resource_setup()
if not test.is_extension_enabled('OS-FLV-EXT-DATA', 'compute'):
msg = "OS-FLV-EXT-DATA extension not enabled."
raise cls.skipException(msg)
cls.client = cls.os_adm.flavors_client
flavor_name = data_utils.rand_name('test_flavor')
ram = 512
vcpus = 1
disk = 10
ephemeral = 10
cls.new_flavor_id = data_utils.rand_int_id(start=1000)
swap = 1024
rxtx = 1
# Create a flavor so as to set/get/unset extra specs
resp, cls.flavor = cls.client.create_flavor(flavor_name,
ram, vcpus,
disk,
cls.new_flavor_id,
ephemeral=ephemeral,
swap=swap, rxtx=rxtx)
@classmethod
def resource_cleanup(cls):
resp, body = cls.client.delete_flavor(cls.flavor['id'])
cls.client.wait_for_resource_deletion(cls.flavor['id'])
super(FlavorsExtraSpecsTestJSON, cls).resource_cleanup()
@test.attr(type='gate')
def test_flavor_set_get_update_show_unset_keys(self):
# Test to SET, GET, UPDATE, SHOW, UNSET flavor extra
# spec as a user with admin privileges.
# Assigning extra specs values that are to be set
specs = {"key1": "value1", "key2": "value2"}
# SET extra specs to the flavor created in setUp
set_resp, set_body = \
self.client.set_flavor_extra_spec(self.flavor['id'], specs)
self.assertEqual(set_resp.status, 200)
self.assertEqual(set_body, specs)
# GET extra specs and verify
get_resp, get_body = \
self.client.get_flavor_extra_spec(self.flavor['id'])
self.assertEqual(get_resp.status, 200)
self.assertEqual(get_body, specs)
# UPDATE the value of the extra specs key1
update_resp, update_body = \
self.client.update_flavor_extra_spec(self.flavor['id'],
"key1",
key1="value")
self.assertEqual(update_resp.status, 200)
self.assertEqual({"key1": "value"}, update_body)
# GET extra specs and verify the value of the key2
# is the same as before
get_resp, get_body = \
self.client.get_flavor_extra_spec(self.flavor['id'])
self.assertEqual(get_resp.status, 200)
self.assertEqual(get_body, {"key1": "value", "key2": "value2"})
# UNSET extra specs that were set in this test
unset_resp, _ = \
self.client.unset_flavor_extra_spec(self.flavor['id'], "key1")
self.assertEqual(unset_resp.status, 200)
unset_resp, _ = \
self.client.unset_flavor_extra_spec(self.flavor['id'], "key2")
self.assertEqual(unset_resp.status, 200)
@test.attr(type='gate')
def test_flavor_non_admin_get_all_keys(self):
specs = {"key1": "value1", "key2": "value2"}
set_resp, set_body = self.client.set_flavor_extra_spec(
self.flavor['id'], specs)
resp, body = self.flavors_client.get_flavor_extra_spec(
self.flavor['id'])
self.assertEqual(resp.status, 200)
for key in specs:
self.assertEqual(body[key], specs[key])
@test.attr(type='gate')
def test_flavor_non_admin_get_specific_key(self):
specs = {"key1": "value1", "key2": "value2"}
resp, body = self.client.set_flavor_extra_spec(
self.flavor['id'], specs)
self.assertEqual(resp.status, 200)
self.assertEqual(body['key1'], 'value1')
self.assertIn('key2', body)
resp, body = self.flavors_client.get_flavor_extra_spec_with_key(
self.flavor['id'], 'key1')
self.assertEqual(resp.status, 200)
self.assertEqual(body['key1'], 'value1')
self.assertNotIn('key2', body)
| apache-2.0 | -8,602,827,174,891,453,000 | 40.96748 | 78 | 0.5926 | false |
openstack/python-muranoclient | doc/source/conf.py | 1 | 2827 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'openstackdocstheme',]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'OpenStack Foundation'
exclude_trees = ['api']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'openstackdocs'
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['_theme']
#html_theme_path = [openstackdocstheme.get_html_theme_path()]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/python-muranoclient'
openstackdocs_bug_project = 'python-muranoclient'
openstackdocs_bug_tag = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-muranoclientdoc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
(
'index',
'python-muranoclient.tex',
u'python-muranoclient Documentation',
u'OpenStack Foundation',
'manual'
),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 | -908,016,960,662,793,100 | 33.060241 | 79 | 0.702158 | false |
betoesquivel/fil2014 | build/django/build/lib.linux-x86_64-2.7/django/templatetags/future.py | 130 | 1640 | from django.template import Library
from django.template import defaulttags
register = Library()
@register.tag
def ssi(parser, token):
# Used for deprecation path during 1.3/1.4, will be removed in 2.0
return defaulttags.ssi(parser, token)
@register.tag
def url(parser, token):
# Used for deprecation path during 1.3/1.4, will be removed in 2.0
return defaulttags.url(parser, token)
@register.tag
def cycle(parser, token):
"""
This is the future version of `cycle` with auto-escaping.
By default all strings are escaped.
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% cycle var1 var2 var3 as somecycle %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% cycle var1 var2|safe var3|safe as somecycle %}
"""
return defaulttags.cycle(parser, token, escape=True)
@register.tag
def firstof(parser, token):
"""
This is the future version of `firstof` with auto-escaping.
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
return defaulttags.firstof(parser, token, escape=True)
| mit | -4,415,674,575,184,406,500 | 24.230769 | 80 | 0.622561 | false |
joopert/home-assistant | homeassistant/components/brunt/cover.py | 4 | 5263 | """Support for Brunt Blind Engine covers."""
import logging
from brunt import BruntAPI
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_POSITION,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverDevice,
)
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
COVER_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
DEVICE_CLASS = "window"
ATTR_REQUEST_POSITION = "request_position"
NOTIFICATION_ID = "brunt_notification"
NOTIFICATION_TITLE = "Brunt Cover Setup"
ATTRIBUTION = "Based on an unofficial Brunt SDK."
CLOSED_POSITION = 0
OPEN_POSITION = 100
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the brunt platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
bapi = BruntAPI(username=username, password=password)
try:
things = bapi.getThings()["things"]
if not things:
_LOGGER.error("No things present in account.")
else:
add_entities(
[
BruntDevice(bapi, thing["NAME"], thing["thingUri"])
for thing in things
],
True,
)
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
class BruntDevice(CoverDevice):
"""
Representation of a Brunt cover device.
Contains the common logic for all Brunt devices.
"""
def __init__(self, bapi, name, thing_uri):
"""Init the Brunt device."""
self._bapi = bapi
self._name = name
self._thing_uri = thing_uri
self._state = {}
self._available = None
@property
def name(self):
"""Return the name of the device as reported by tellcore."""
return self._name
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def current_cover_position(self):
"""
Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("currentPosition")
return int(pos) if pos else None
@property
def request_cover_position(self):
"""
Return request position of cover.
The request position is the position of the last request
to Brunt, at times there is a diff of 1 to current
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("requestPosition")
return int(pos) if pos else None
@property
def move_state(self):
"""
Return current moving state of cover.
None is unknown, 0 when stopped, 1 when opening, 2 when closing
"""
mov = self._state.get("moveState")
return int(mov) if mov else None
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self.move_state == 1
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self.move_state == 2
@property
def device_state_attributes(self):
"""Return the detailed device state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_REQUEST_POSITION: self.request_cover_position,
}
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS
@property
def supported_features(self):
"""Flag supported features."""
return COVER_FEATURES
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return self.current_cover_position == CLOSED_POSITION
def update(self):
"""Poll the current state of the device."""
try:
self._state = self._bapi.getState(thingUri=self._thing_uri).get("thing")
self._available = True
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
self._available = False
def open_cover(self, **kwargs):
"""Set the cover to the open position."""
self._bapi.changeRequestPosition(OPEN_POSITION, thingUri=self._thing_uri)
def close_cover(self, **kwargs):
"""Set the cover to the closed position."""
self._bapi.changeRequestPosition(CLOSED_POSITION, thingUri=self._thing_uri)
def set_cover_position(self, **kwargs):
"""Set the cover to a specific position."""
self._bapi.changeRequestPosition(
kwargs[ATTR_POSITION], thingUri=self._thing_uri
)
| apache-2.0 | -705,078,675,644,631,000 | 28.402235 | 84 | 0.614668 | false |
StamusNetworks/scirius | setup.py | 1 | 1133 | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='scirius',
version='2.0',
packages=['scirius','rules','suricata', 'accounts', 'viz'],
scripts=['manage.py'],
include_package_data=True,
description='A web interface to manage Suricata rulesets',
long_description=README,
url='https://www.stamus-networks.com/open-source/#scirius',
author='Eric Leblond',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| gpl-3.0 | 6,940,178,123,591,757,000 | 34.40625 | 78 | 0.639011 | false |
cfriedt/gnuradio | gr-digital/python/digital/ofdm_txrx.py | 27 | 20975 | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
OFDM Transmitter / Receiver hier blocks.
For simple configurations, no need to connect all the relevant OFDM blocks
to form an OFDM Tx/Rx--simply use these.
"""
# Reminder: All frequency-domain stuff is in shifted form, i.e. DC carrier
# in the middle!
import numpy
from gnuradio import gr
import digital_swig as digital
from utils import tagged_streams
try:
# This will work when feature #505 is added.
from gnuradio import fft
from gnuradio import blocks
from gnuradio import analog
except ImportError:
# Until then this will work.
import fft_swig as fft
import blocks_swig as blocks
import analog_swig as analog
_def_fft_len = 64
_def_cp_len = 16
_def_frame_length_tag_key = "frame_length"
_def_packet_length_tag_key = "packet_length"
_def_packet_num_tag_key = "packet_num"
# Data and pilot carriers are same as in 802.11a
_def_occupied_carriers = (range(-26, -21) + range(-20, -7) + range(-6, 0) + range(1, 7) + range(8, 21) + range(22, 27),)
_def_pilot_carriers=((-21, -7, 7, 21,),)
_pilot_sym_scramble_seq = (
1,1,1,1, -1,-1,-1,1, -1,-1,-1,-1, 1,1,-1,1, -1,-1,1,1, -1,1,1,-1, 1,1,1,1, 1,1,-1,1,
1,1,-1,1, 1,-1,-1,1, 1,1,-1,1, -1,-1,-1,1, -1,1,-1,-1, 1,-1,-1,1, 1,1,1,1, -1,-1,1,1,
-1,-1,1,-1, 1,-1,1,1, -1,-1,-1,1, 1,-1,-1,-1, -1,1,-1,-1, 1,-1,1,1, 1,1,-1,1, -1,1,-1,1,
-1,-1,-1,-1, -1,1,-1,1, 1,-1,1,-1, 1,1,1,-1, -1,1,-1,-1, -1,1,1,1, -1,-1,-1,-1, -1,-1,-1
)
_def_pilot_symbols= tuple([(x, x, x, -x) for x in _pilot_sym_scramble_seq])
_seq_seed = 42
def _get_active_carriers(fft_len, occupied_carriers, pilot_carriers):
""" Returns a list of all carriers that at some point carry data or pilots. """
active_carriers = list()
for carrier in list(occupied_carriers[0]) + list(pilot_carriers[0]):
if carrier < 0:
carrier += fft_len
active_carriers.append(carrier)
return active_carriers
def _make_sync_word1(fft_len, occupied_carriers, pilot_carriers):
""" Creates a random sync sequence for fine frequency offset and timing
estimation. This is the first of typically two sync preamble symbols
for the Schmidl & Cox sync algorithm.
The relevant feature of this symbols is that every second sub-carrier
is zero. In the time domain, this results in two identical halves of
the OFDM symbols.
Symbols are always BPSK symbols. Carriers are scaled by sqrt(2) to keep
total energy constant.
Carrier 0 (DC carrier) is always zero. If used, carrier 1 is non-zero.
This means the sync algorithm has to check on odd carriers!
"""
active_carriers = _get_active_carriers(fft_len, occupied_carriers, pilot_carriers)
numpy.random.seed(_seq_seed)
bpsk = {0: numpy.sqrt(2), 1: -numpy.sqrt(2)}
sw1 = [bpsk[numpy.random.randint(2)] if x in active_carriers and x % 2 else 0 for x in range(fft_len)]
return numpy.fft.fftshift(sw1)
def _make_sync_word2(fft_len, occupied_carriers, pilot_carriers):
""" Creates a random sync sequence for coarse frequency offset and channel
estimation. This is the second of typically two sync preamble symbols
for the Schmidl & Cox sync algorithm.
Symbols are always BPSK symbols.
"""
active_carriers = _get_active_carriers(fft_len, occupied_carriers, pilot_carriers)
numpy.random.seed(_seq_seed)
bpsk = {0: 1, 1: -1}
sw2 = [bpsk[numpy.random.randint(2)] if x in active_carriers else 0 for x in range(fft_len)]
sw2[0] = 0j
return numpy.fft.fftshift(sw2)
def _get_constellation(bps):
""" Returns a modulator block for a given number of bits per symbol """
constellation = {
1: digital.constellation_bpsk(),
2: digital.constellation_qpsk(),
3: digital.constellation_8psk()
}
try:
return constellation[bps]
except KeyError:
print 'Modulation not supported.'
exit(1)
class ofdm_tx(gr.hier_block2):
"""Hierarchical block for OFDM modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
Args:
fft_len: The length of FFT (integer).
cp_len: The length of cyclic prefix in total samples (integer).
packet_length_tag_key: The name of the tag giving packet length at the input.
occupied_carriers: A vector of vectors describing which OFDM carriers are occupied.
pilot_carriers: A vector of vectors describing which OFDM carriers are occupied with pilot symbols.
pilot_symbols: The pilot symbols.
bps_header: Bits per symbol (header).
bps_payload: Bits per symbol (payload).
sync_word1: The first sync preamble symbol. This has to be with
| zeros on alternating carriers. Used for fine and
| coarse frequency offset and timing estimation.
sync_word2: The second sync preamble symbol. This has to be filled
| entirely. Also used for coarse frequency offset and
| channel estimation.
rolloff: The rolloff length in samples. Must be smaller than the CP.
debug_log: Write output into log files (Warning: creates lots of data!)
scramble_bits: Activates the scramblers (set this to True unless debugging)
"""
def __init__(self, fft_len=_def_fft_len, cp_len=_def_cp_len,
packet_length_tag_key=_def_packet_length_tag_key,
occupied_carriers=_def_occupied_carriers,
pilot_carriers=_def_pilot_carriers,
pilot_symbols=_def_pilot_symbols,
bps_header=1,
bps_payload=1,
sync_word1=None,
sync_word2=None,
rolloff=0,
debug_log=False,
scramble_bits=False
):
gr.hier_block2.__init__(self, "ofdm_tx",
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(1, 1, gr.sizeof_gr_complex))
### Param init / sanity check ########################################
self.fft_len = fft_len
self.cp_len = cp_len
self.packet_length_tag_key = packet_length_tag_key
self.occupied_carriers = occupied_carriers
self.pilot_carriers = pilot_carriers
self.pilot_symbols = pilot_symbols
self.bps_header = bps_header
self.bps_payload = bps_payload
self.sync_word1 = sync_word1
if sync_word1 is None:
self.sync_word1 = _make_sync_word1(fft_len, occupied_carriers, pilot_carriers)
else:
if len(sync_word1) != self.fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_words = [self.sync_word1,]
if sync_word2 is None:
self.sync_word2 = _make_sync_word2(fft_len, occupied_carriers, pilot_carriers)
else:
self.sync_word2 = sync_word2
if len(self.sync_word2):
if len(self.sync_word2) != fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word2 = list(self.sync_word2)
self.sync_words.append(self.sync_word2)
if scramble_bits:
self.scramble_seed = 0x7f
else:
self.scramble_seed = 0x00 # We deactivate the scrambler by init'ing it with zeros
### Header modulation ################################################
crc = digital.crc32_bb(False, self.packet_length_tag_key)
header_constellation = _get_constellation(bps_header)
header_mod = digital.chunks_to_symbols_bc(header_constellation.points())
formatter_object = digital.packet_header_ofdm(
occupied_carriers=occupied_carriers, n_syms=1,
bits_per_header_sym=self.bps_header,
bits_per_payload_sym=self.bps_payload,
scramble_header=scramble_bits
)
header_gen = digital.packet_headergenerator_bb(formatter_object.base(), self.packet_length_tag_key)
header_payload_mux = blocks.tagged_stream_mux(
itemsize=gr.sizeof_gr_complex*1,
lengthtagname=self.packet_length_tag_key,
tag_preserve_head_pos=1 # Head tags on the payload stream stay on the head
)
self.connect(
self,
crc,
header_gen,
header_mod,
(header_payload_mux, 0)
)
if debug_log:
self.connect(header_gen, blocks.file_sink(1, 'tx-hdr.dat'))
### Payload modulation ###############################################
payload_constellation = _get_constellation(bps_payload)
payload_mod = digital.chunks_to_symbols_bc(payload_constellation.points())
payload_scrambler = digital.additive_scrambler_bb(
0x8a,
self.scramble_seed,
7,
0, # Don't reset after fixed length (let the reset tag do that)
bits_per_byte=8, # This is before unpacking
reset_tag_key=self.packet_length_tag_key
)
payload_unpack = blocks.repack_bits_bb(
8, # Unpack 8 bits per byte
bps_payload,
self.packet_length_tag_key
)
self.connect(
crc,
payload_scrambler,
payload_unpack,
payload_mod,
(header_payload_mux, 1)
)
### Create OFDM frame ################################################
allocator = digital.ofdm_carrier_allocator_cvc(
self.fft_len,
occupied_carriers=self.occupied_carriers,
pilot_carriers=self.pilot_carriers,
pilot_symbols=self.pilot_symbols,
sync_words=self.sync_words,
len_tag_key=self.packet_length_tag_key
)
ffter = fft.fft_vcc(
self.fft_len,
False, # Inverse FFT
(), # No window
True # Shift
)
cyclic_prefixer = digital.ofdm_cyclic_prefixer(
self.fft_len,
self.fft_len+self.cp_len,
rolloff,
self.packet_length_tag_key
)
self.connect(header_payload_mux, allocator, ffter, cyclic_prefixer, self)
if debug_log:
self.connect(allocator, blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'tx-post-allocator.dat'))
self.connect(cyclic_prefixer, blocks.file_sink(gr.sizeof_gr_complex, 'tx-signal.dat'))
class ofdm_rx(gr.hier_block2):
"""Hierarchical block for OFDM demodulation.
The input is a complex baseband signal (e.g. from a UHD source).
The detected packets are output as a stream of packed bits on the output.
Args:
fft_len: The length of FFT (integer).
cp_len: The length of cyclic prefix in total samples (integer).
frame_length_tag_key: Used internally to tag the length of the OFDM frame.
packet_length_tag_key: The name of the tag giving packet length at the input.
occupied_carriers: A vector of vectors describing which OFDM carriers are occupied.
pilot_carriers: A vector of vectors describing which OFDM carriers are occupied with pilot symbols.
pilot_symbols: The pilot symbols.
bps_header: Bits per symbol (header).
bps_payload: Bits per symbol (payload).
sync_word1: The first sync preamble symbol. This has to be with
| zeros on alternating carriers. Used for fine and
| coarse frequency offset and timing estimation.
sync_word2: The second sync preamble symbol. This has to be filled
| entirely. Also used for coarse frequency offset and
| channel estimation.
"""
def __init__(self, fft_len=_def_fft_len, cp_len=_def_cp_len,
frame_length_tag_key=_def_frame_length_tag_key,
packet_length_tag_key=_def_packet_length_tag_key,
packet_num_tag_key=_def_packet_num_tag_key,
occupied_carriers=_def_occupied_carriers,
pilot_carriers=_def_pilot_carriers,
pilot_symbols=_def_pilot_symbols,
bps_header=1,
bps_payload=1,
sync_word1=None,
sync_word2=None,
debug_log=False,
scramble_bits=False
):
gr.hier_block2.__init__(self, "ofdm_rx",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_char))
### Param init / sanity check ########################################
self.fft_len = fft_len
self.cp_len = cp_len
self.frame_length_tag_key = frame_length_tag_key
self.packet_length_tag_key = packet_length_tag_key
self.occupied_carriers = occupied_carriers
self.bps_header = bps_header
self.bps_payload = bps_payload
n_sync_words = 1
if sync_word1 is None:
self.sync_word1 = _make_sync_word1(fft_len, occupied_carriers, pilot_carriers)
else:
if len(sync_word1) != self.fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word1 = sync_word1
self.sync_word2 = ()
if sync_word2 is None:
self.sync_word2 = _make_sync_word2(fft_len, occupied_carriers, pilot_carriers)
n_sync_words = 2
elif len(sync_word2):
if len(sync_word2) != fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word2 = sync_word2
n_sync_words = 2
if scramble_bits:
self.scramble_seed = 0x7f
else:
self.scramble_seed = 0x00 # We deactivate the scrambler by init'ing it with zeros
### Sync ############################################################
sync_detect = digital.ofdm_sync_sc_cfb(fft_len, cp_len)
delay = blocks.delay(gr.sizeof_gr_complex, fft_len+cp_len)
oscillator = analog.frequency_modulator_fc(-2.0 / fft_len)
mixer = blocks.multiply_cc()
hpd = digital.header_payload_demux(
n_sync_words+1, # Number of OFDM symbols before payload (sync + 1 sym header)
fft_len, cp_len, # FFT length, guard interval
frame_length_tag_key, # Frame length tag key
"", # We're not using trigger tags
True # One output item is one OFDM symbol (False would output complex scalars)
)
self.connect(self, sync_detect)
self.connect(self, delay, (mixer, 0), (hpd, 0))
self.connect((sync_detect, 0), oscillator, (mixer, 1))
self.connect((sync_detect, 1), (hpd, 1))
if debug_log:
self.connect((sync_detect, 0), blocks.file_sink(gr.sizeof_float, 'freq-offset.dat'))
self.connect((sync_detect, 1), blocks.file_sink(gr.sizeof_char, 'sync-detect.dat'))
### Header demodulation ##############################################
header_fft = fft.fft_vcc(self.fft_len, True, (), True)
chanest = digital.ofdm_chanest_vcvc(self.sync_word1, self.sync_word2, 1)
header_constellation = _get_constellation(bps_header)
header_equalizer = digital.ofdm_equalizer_simpledfe(
fft_len,
header_constellation.base(),
occupied_carriers,
pilot_carriers,
pilot_symbols,
symbols_skipped=0,
)
header_eq = digital.ofdm_frame_equalizer_vcvc(
header_equalizer.base(),
cp_len,
self.frame_length_tag_key,
True,
1 # Header is 1 symbol long
)
header_serializer = digital.ofdm_serializer_vcc(
fft_len, occupied_carriers,
self.frame_length_tag_key
)
header_demod = digital.constellation_decoder_cb(header_constellation.base())
header_formatter = digital.packet_header_ofdm(
occupied_carriers, 1,
packet_length_tag_key,
frame_length_tag_key,
packet_num_tag_key,
bps_header,
bps_payload,
scramble_header=scramble_bits
)
header_parser = digital.packet_headerparser_b(header_formatter.formatter())
self.connect(
(hpd, 0),
header_fft,
chanest,
header_eq,
header_serializer,
header_demod,
header_parser
)
self.msg_connect(header_parser, "header_data", hpd, "header_data")
if debug_log:
self.connect((chanest, 1), blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'channel-estimate.dat'))
self.connect((chanest, 0), blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'post-hdr-chanest.dat'))
self.connect((chanest, 0), blocks.tag_debug(gr.sizeof_gr_complex * fft_len, 'post-hdr-chanest'))
self.connect(header_eq, blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'post-hdr-eq.dat'))
self.connect(header_serializer, blocks.file_sink(gr.sizeof_gr_complex, 'post-hdr-serializer.dat'))
self.connect(header_descrambler, blocks.file_sink(1, 'post-hdr-demod.dat'))
### Payload demod ####################################################
payload_fft = fft.fft_vcc(self.fft_len, True, (), True)
payload_constellation = _get_constellation(bps_payload)
payload_equalizer = digital.ofdm_equalizer_simpledfe(
fft_len,
payload_constellation.base(),
occupied_carriers,
pilot_carriers,
pilot_symbols,
symbols_skipped=1, # (that was already in the header)
alpha=0.1
)
payload_eq = digital.ofdm_frame_equalizer_vcvc(
payload_equalizer.base(),
cp_len,
self.frame_length_tag_key
)
payload_serializer = digital.ofdm_serializer_vcc(
fft_len, occupied_carriers,
self.frame_length_tag_key,
self.packet_length_tag_key,
1 # Skip 1 symbol (that was already in the header)
)
payload_demod = digital.constellation_decoder_cb(payload_constellation.base())
self.payload_descrambler = digital.additive_scrambler_bb(
0x8a,
self.scramble_seed,
7,
0, # Don't reset after fixed length
bits_per_byte=8, # This is after packing
reset_tag_key=self.packet_length_tag_key
)
payload_pack = blocks.repack_bits_bb(bps_payload, 8, self.packet_length_tag_key, True)
self.crc = digital.crc32_bb(True, self.packet_length_tag_key)
self.connect(
(hpd, 1),
payload_fft,
payload_eq,
payload_serializer,
payload_demod,
payload_pack,
self.payload_descrambler,
self.crc,
self
)
if debug_log:
self.connect((hpd, 1), blocks.tag_debug(gr.sizeof_gr_complex*fft_len, 'post-hpd'))
self.connect(payload_fft, blocks.file_sink(gr.sizeof_gr_complex*fft_len, 'post-payload-fft.dat'))
self.connect(payload_eq, blocks.file_sink(gr.sizeof_gr_complex*fft_len, 'post-payload-eq.dat'))
self.connect(payload_serializer, blocks.file_sink(gr.sizeof_gr_complex, 'post-payload-serializer.dat'))
self.connect(payload_demod, blocks.file_sink(1, 'post-payload-demod.dat'))
self.connect(payload_pack, blocks.file_sink(1, 'post-payload-pack.dat'))
self.connect(crc, blocks.file_sink(1, 'post-payload-crc.dat'))
| gpl-3.0 | 5,155,114,331,590,833,000 | 45.200441 | 123 | 0.581359 | false |
wooyek/nuntio | web/plebe/template.py | 1 | 3070 | # -*- coding: utf-8 -*-
# Copyright 2008 Janusz Skonieczny
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is a set of utilities for faster development with Django templates.
Template loaders that load from the app/template/model directories
'templates' folder when you specify an app prefix ('app/template.html').
It's possible to register global template libraries by adding this to your
settings:
GLOBALTAGS = (
'myapp.templatetags.cooltags',
)
"""
from django.conf import settings
from django.http import HttpResponse
from django.template import RequestContext, add_to_builtins, loader, TemplateDoesNotExist
from django.utils.functional import Promise
from django.utils.encoding import force_unicode
from django.utils import simplejson
from ragendja.apputils import get_app_dirs
import os, logging
def get_template_sources(template_name, template_dirs=None):
""" Returs a collection of paths used to load templates in this module """
packed = template_name.split('/', 1)
if len(packed) == 2 and packed[0] in app_template_dirs:
model_prefixed = packed[1].split('_',1)
generic_path = os.path.join(app_template_dirs[packed[0]], model_prefixed[1])
model_prefixed = os.path.join(*model_prefixed)
model_path = os.path.join(app_template_dirs[packed[0]], model_prefixed)
return [model_path, generic_path]
return []
def app_model_templates_loader(template_name, template_dirs=None):
"""
Loader for model dependent templates stored in model named
directories, app/templates/<model_name>/form.html and generic
templates fallback app/templates/form.html.
The following defines a template loader that loads templates from a specific
app based on the prefix of the template path:
get_template("app/<model_name>_template.html") => app/templates/<model_name>/template.html
if not found, will try generic template
get_template("app/<model_name>_template.html") => app/templates/template.html
This keeps the code DRY and prevents name clashes.
"""
for path in get_template_sources(template_name, template_dirs):
logging.debug("Looking for tempalte: %s" % path)
try:
return (open(path).read().decode(settings.FILE_CHARSET), path)
except IOError:
pass
raise TemplateDoesNotExist, template_name
app_model_templates_loader.is_usable = True
# This is needed by app_prefixed_loader.
app_template_dirs = get_app_dirs('templates') | mit | -4,725,231,425,481,565,000 | 38.421053 | 94 | 0.711401 | false |
zmarvel/playground | sound/testplay.py | 1 | 3152 | import alsaaudio
from math import pi, sin, pow
import getch
SAMPLE_RATE = 44100
FORMAT = alsaaudio.PCM_FORMAT_U8
PERIOD_SIZE = 512
N_SAMPLES = 1024
notes = "abcdefg"
frequencies = {}
for i, note in enumerate(notes):
frequencies[note] = 440 * pow(pow(2, 1/2), i)
# Generate the sine wave, centered at y=128 with 1024 samples
sine_wave = [int(sin(x * 2*pi/N_SAMPLES) * 127) for x in range(0, N_SAMPLES)]
square_wave = []
sawtooth_wave = []
triangle_wave = []
for i in range(0, N_SAMPLES):
phase = (i * 2*pi / N_SAMPLES) % 2*pi
if phase < pi:
square_wave.append(127)
else:
square_wave.append(-128)
sawtooth_wave.append(int(127 - (127 // pi * phase)))
if phase < pi:
triangle_wave.append(int(-127 + (2 * 127 * phase // pi)))
else:
triangle_wave.append(int(3 * 127 - (2 * 127 * phase // pi)))
def main():
buf = bytearray(PERIOD_SIZE)
# alsaaudio setup
dev = alsaaudio.PCM(type=alsaaudio.PCM_PLAYBACK)
dev.setchannels(1)
dev.setrate(SAMPLE_RATE)
dev.setformat(FORMAT)
dev.setperiodsize(PERIOD_SIZE)
#load_buf(buf, 440)
f = 440
w_half = [x//2 + 128 for x in make_wave(sine_wave, f)]
#w_o1 = [x//4 for x in make_wave(f*2)]
#w_o2 = [x//6 for x in make_wave(f*3)]
#w_o3 = [x//8 for x in make_wave(f*4)]
#w_o4 = [x//10 for x in make_wave(f*5)]
#w_o4 = [x//12 for x in make_wave(f*6)]
#w_o5 = [x//14 for x in make_wave(f*7)]
#w_o6 = [x//16 for x in make_wave(f*8)]
#for i, samp in enumerate(w_o1):
# w[i] += samp + w_o2[i] + w_o3[i] + w_o4[i] + w_o5[i] + w_o6[i] + 128
# print(w[i])
#buf = bytearray(w)
#for i, samp in enumerate(w):
# if samp > 0:
# samp = 127
# else:
# samp = -128
w = [x + 128 for x in make_wave(square_wave, 440)]
buf = bytearray(w)
char = getch.getch()
last = 'q'
while char != 'q':
if char != last:
if char == '1':
w = [x//2 + 128 for x in make_wave(sine_wave, 440)]
buf = bytearray(w)
elif char == '2':
w = [x//2 + 128 for x in make_wave(square_wave, 440)]
buf = bytearray(w)
elif char == '3':
w = [x//2 + 128 for x in make_wave(sawtooth_wave, 440)]
buf = bytearray(w)
elif char == '4':
w = [x//2 + 128 for x in make_wave(triangle_wave, 440)]
buf = bytearray(w)
elif char == '5':
buf = bytearray(w_half)
dev.write(buf)
dev.write(buf)
dev.write(buf)
last = char
char = getch.getch()
return 0
#def load_buf(buf, frequency):
# step = N_SAMPLES * frequency // SAMPLE_RATE
# for i in range(0, PERIOD_SIZE):
# buf[i] = wave[(step * i * N_SAMPLES // PERIOD_SIZE) % N_SAMPLES]
# return buf
def make_wave(wave, frequency):
step = N_SAMPLES * frequency // SAMPLE_RATE
w = []
for i in range(0, PERIOD_SIZE):
w.append(wave[(step * i * N_SAMPLES // PERIOD_SIZE) % N_SAMPLES])
return w
if __name__ == '__main__':
main()
| mit | 8,533,461,279,944,520,000 | 26.649123 | 77 | 0.527602 | false |
imageboards/Orphereus | Orphereus/controllers/Orphie_Public.py | 1 | 12982 | # -*- coding: utf-8 -*-
################################################################################
# Copyright (C) 2009 Johan Liebert, Mantycore, Hedger, Rusanon #
# < [email protected] ; http://orphereus.anoma.ch > #
# #
# This file is part of Orphereus, an imageboard engine. #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. #
################################################################################
import logging
from Orphereus.lib.base import *
from Orphereus.model import *
from sqlalchemy.orm import eagerload
import os
import datetime
from Orphereus.lib.miscUtils import *
from Orphereus.lib.constantValues import *
from OrphieBaseController import OrphieBaseController
log = logging.getLogger(__name__)
class OrphiePublicController(OrphieBaseController):
def __before__(self):
OrphieBaseController.__before__(self)
c.title = g.OPT.title
if g.OPT.refControlEnabled:
ref = request.headers.get('REFERER', False)
if ref:
ref = filterText(ref)
if ref:
rickroll = True
for rc in g.OPT.refControlList:
if rc in ref:
rickroll = False
if (rickroll):
redir = g.OPT.fakeLinks[random.randint(0, len(g.OPT.fakeLinks) - 1)]
toLog(LOG_EVENT_RICKROLLD, "Request rickrolld. Referer: %s, Redir: %s, IP: %s, User-Agent: %s" % (ref, redir, getUserIp(), filterText(request.headers.get('User-Agent', '?'))))
redirect_to(str(redir))
if (self.userInst and self.userInst.isValid()) or g.OPT.allowAnonymous:
self.initEnvironment()
else:
self.setCookie()
def ipBanned(self):
if c.ban:
return self.error(_('You are banned on %s for %s days for the following reason:<br/>%s') % (c.ban.date, c.ban.period, c.ban.reason))
else:
return self.error(_("ORLY?"))
def login(self, user):
if g.OPT.allowLogin:
session['uidNumber'] = user.uidNumber
session.save()
else:
self.logout()
def logout(self):
session.clear()
session.save()
session.delete()
redirect_to('boardBase')
def captchaPic(self, cid):
# TODO: fix shitty code
#log.debug('user cap lang: %s' %c.userInst.cLang)
self.setLang(True)
"""
sessionCid = None
if session.has_key('anonCaptId'):
sessionCid = session['anonCaptId']
if session.has_key('cid'):
sessionCid = session['cid']
"""
pic = Captcha.picture(cid, g.OPT.captchaFont)
"""
if sessionCid:
log.debug("%s:%s" % (str(cid), str(sessionCid)))
if (str(cid) != str(sessionCid)):
redirect_to('captcha', cid = sessionCid)
"""
if ("Wrong ID" == pic):
newCaptcha = Captcha.create()
session['anonCaptId'] = newCaptcha.id
session.save()
redirect_to('captcha', cid = newCaptcha.id)
response.headers['Content-Length'] = len(pic)
response.headers['Content-Type'] = 'image/png'
return str(pic)
def authorize(self, url):
if url:
c.currentURL = u'/%s' % url #.encode('utf-8')
else:
c.currentURL = u''
if not g.OPT.allowLogin:
return self.error(_("Authorization disabled"))
ip = getUserIp()
tracker = LoginTracker.getTracker(ip)
captchaOk = True
captcha = False
if tracker.attempts >= 2:
if session and session.has_key('anonCaptId'):
anonCapt = Captcha.getCaptcha(session['anonCaptId'])
if tracker.cid and (str(tracker.cid) != str(anonCapt.id)):
trackerCapt = Captcha.getCaptcha(tracker.cid)
if trackerCapt:
trackerCapt.delete()
tracker.cid = anonCapt.id
meta.Session.commit()
c.showCaptcha = True
captchaOk = False
if tracker.cid:
captcha = Captcha.getCaptcha(tracker.cid)
if not captcha:
if c.userInst.isValid():
oldLang = h.setLang(self.userInst.cLang)
captcha = Captcha.create()
if c.userInst.isValid():
h.setLang(oldLang)
tracker.cid = captcha.id
meta.Session.commit()
c.captcha = Captcha.getCaptcha(tracker.cid)
if request.POST.get('code', False):
code = User.genUid(request.POST['code'].encode('utf-8'))
user = User.getByUid(code)
#log.debug("code: %s user: %s",code,str(user))
captid = request.POST.get('captid', False)
captval = request.POST.get('captcha', False)
#log.debug("got: %s:%s" %(captid, captval))
if (not captchaOk) and captid and captval and isNumber(captid):
if captcha and int(captid) == captcha.id:
captchaOk = captcha.test(captval)
captcha = False
if not captchaOk:
if c.userInst.isValid():
oldLang = h.setLang(self.userInst.cLang)
captcha = Captcha.create()
if c.userInst.isValid():
h.setLang(oldLang)
tracker.cid = captcha.id
if user and captchaOk:
if tracker:
tracker.delete()
if captcha:
captcha.delete()
self.login(user)
c.loginSuccessful = True
else:
tracker.attempts += 1
tracker.lastAttempt = datetime.datetime.now()
meta.Session.commit()
#log.debug("redir: %s" % c.currentURL)
if (not g.OPT.framedMain or (user and not(user.useFrame))): # (1) frame turned off
if (g.OPT.allowAnonymous): # (1.1) remove navigation frame if exists
c.proceedRedirect = True
c.frameEnabled = False
return self.render('loginRedirect')
else: # (1.2) frame is impossible
return redirect_to('boardBase', board = c.currentURL)
else: # (2) frame turned on
if (g.OPT.allowAnonymous and not g.OPT.obligatoryFrameCreation):
# (2.1) change navigation frame location if exists. DON'T create frame!
c.proceedRedirect = True
c.frameEnabled = True
return self.render('loginRedirect')
else: # (2.2) create new frame with correct target.
if c.currentURL:
return redirect_to('boardBase', frameTarget = c.currentURL)
else:
return redirect_to('boardBase')
c.boardName = _('Login')
return self.render('login')
def register(self, invite):
if 'invite' not in session:
iid = Invite.getId(invite)
if iid:
session['invite'] = invite
session['iid'] = iid
session['openReg'] = False
session.save()
elif g.OPT.allowRegistration:
session['invite'] = invite
session['iid'] = False
session['openReg'] = True
session.save()
else:
c.currentURL = u''
return self.render('login')
c.openReg = session['openReg']
c.captcha = None
captchaOk = True
if session['openReg']:
captchaOk = False
if session.get('cid', False):
captcha = Captcha.getCaptcha(session['cid'])
if captcha:
captchaOk = captcha.test(request.POST.get('captcha', False))
session['cid'] = None
session.save()
if not captchaOk:
captcha = Captcha.create()
session['cid'] = captcha.id
session.save()
c.captcha = captcha
key = request.POST.get('key', '').encode('utf-8')
key2 = request.POST.get('key2', '').encode('utf-8')
if key and captchaOk:
if len(key) >= g.OPT.minPassLength and key == key2:
uid = User.genUid(key)
user = User.getByUid(uid)
if user:
user.ban(7777, _("Your Security Code was used during registration by another user. Contact administrator immediately please."), -1)
del session['invite']
del session['iid']
return self.error(_("You entered already existing password. Previous account was banned. Contact administrator please."))
user = User.create(uid)
regId = user.secid() * user.secid() - user.secid()
toLog(LOG_EVENT_INVITE_USED, _("Utilized invite #%d [RID:%d]") % (session['iid'], regId))
del session['invite']
del session['iid']
session.save()
self.login(user)
redirect_to('boardBase', board = '!')
c.boardName = _('Register')
return self.render('register')
def banned(self):
c.userInst = self.userInst
if self.userInst.isValid() and self.userInst.isBanned():
c.boardName = _('Banned')
return self.render('banned')
else:
return self.error(_("ORLY?"))
def UnknownAction(self):
c.userInst = self.userInst
return self.error(_("Excuse me, WTF are you?"))
def saveUploaded(self, expandedName, content):
localFilePath = os.path.join(g.OPT.uploadPath, expandedName)
targetDir = os.path.dirname(localFilePath)
if not os.path.exists(targetDir):
os.makedirs(targetDir)
localFile = open(localFilePath, 'wb')
localFile.write(content)
localFile.close()
def oekakiSave(self, environ, start_response, url, tempid):
start_response('200 OK', [('Content-Type', 'text/plain'), ('Content-Length', '2')])
oekaki = Oekaki.get(tempid)
cl = int(request.environ['CONTENT_LENGTH'])
if oekaki and cl:
id = request.environ['wsgi.input'].read(1)
if id == 'S':
headerLength = int(request.environ['wsgi.input'].read(8))
header = request.environ['wsgi.input'].read(headerLength)
bodyLength = int(request.environ['wsgi.input'].read(8))
request.environ['wsgi.input'].read(2)
body = request.environ['wsgi.input'].read(bodyLength)
headers = header.split('&')
type = filterText(headers[0].split('=')[1])
time = headers[1].split('=')[1]
savedOekakiPath = h.expandName('%s.%s' % (tempid, type))
self.saveUploaded(savedOekakiPath, body)
animPath = None
animLength = request.environ['wsgi.input'].read(8)
if animLength:
animLength = int(animLength)
anim = request.environ['wsgi.input'].read(animLength)
animPath = h.expandName('%s.%s' % (tempid, 'pch'))
self.saveUploaded(animPath, anim)
oekaki.setPathsAndTime(savedOekakiPath, animPath, time)
return ['ok']
| gpl-2.0 | 9,192,122,475,054,785,000 | 39.952681 | 195 | 0.505469 | false |
Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/build/android/pylib/instrumentation/test_package.py | 7 | 1335 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class representing instrumentation test apk and jar."""
import os
from devil.android import apk_helper
from pylib.instrumentation import test_jar
class TestPackage(test_jar.TestJar):
def __init__(self, apk_path, jar_path, test_support_apk_path):
test_jar.TestJar.__init__(self, jar_path)
if not os.path.exists(apk_path):
raise Exception('%s not found, please build it' % apk_path)
self._apk_path = apk_path
self._apk_name = os.path.splitext(os.path.basename(apk_path))[0]
self._package_name = apk_helper.GetPackageName(self._apk_path)
self._test_support_apk_path = test_support_apk_path
def GetApkPath(self):
"""Returns the absolute path to the APK."""
return self._apk_path
def GetApkName(self):
"""Returns the name of the apk without the suffix."""
return self._apk_name
def GetPackageName(self):
"""Returns the package name of this APK."""
return self._package_name
# Override.
def Install(self, device):
device.Install(self.GetApkPath())
if (self._test_support_apk_path and
os.path.exists(self._test_support_apk_path)):
device.Install(self._test_support_apk_path)
| mit | 7,392,824,088,950,520,000 | 31.560976 | 72 | 0.69588 | false |
jamestwebber/scipy | scipy/linalg/tests/test_solvers.py | 2 | 31084 | from __future__ import division, print_function, absolute_import
import os
import numpy as np
from numpy.testing import assert_array_almost_equal
import pytest
from pytest import raises as assert_raises
from scipy.linalg import solve_sylvester
from scipy.linalg import solve_continuous_lyapunov, solve_discrete_lyapunov
from scipy.linalg import solve_continuous_are, solve_discrete_are
from scipy.linalg import block_diag, solve, LinAlgError
from scipy.sparse.sputils import matrix
def _load_data(name):
"""
Load npz data file under data/
Returns a copy of the data, rather than keeping the npz file open.
"""
filename = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'data', name)
with np.load(filename) as f:
return dict(f.items())
class TestSolveLyapunov(object):
cases = [
(np.array([[1, 2], [3, 4]]),
np.array([[9, 10], [11, 12]])),
# a, q all complex.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a real; q complex.
(np.array([[1.0, 2.0], [3.0, 5.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a complex; q real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[2.0, 2.0], [-1.0, 2.0]])),
# An example from Kitagawa, 1977
(np.array([[3, 9, 5, 1, 4], [1, 2, 3, 8, 4], [4, 6, 6, 6, 3],
[1, 5, 2, 0, 7], [5, 3, 3, 1, 5]]),
np.array([[2, 4, 1, 0, 1], [4, 1, 0, 2, 0], [1, 0, 3, 0, 3],
[0, 2, 0, 1, 0], [1, 0, 3, 0, 4]])),
# Companion matrix example. a complex; q real; a.shape[0] = 11
(np.array([[0.100+0.j, 0.091+0.j, 0.082+0.j, 0.073+0.j, 0.064+0.j,
0.055+0.j, 0.046+0.j, 0.037+0.j, 0.028+0.j, 0.019+0.j,
0.010+0.j],
[1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j,
0.000+0.j]]),
np.eye(11)),
# https://github.com/scipy/scipy/issues/4176
(matrix([[0, 1], [-1/2, -1]]),
(matrix([0, 3]).T * matrix([0, 3]).T.T)),
# https://github.com/scipy/scipy/issues/4176
(matrix([[0, 1], [-1/2, -1]]),
(np.array(matrix([0, 3]).T * matrix([0, 3]).T.T))),
]
def test_continuous_squareness_and_shape(self):
nsq = np.ones((3, 2))
sq = np.eye(3)
assert_raises(ValueError, solve_continuous_lyapunov, nsq, sq)
assert_raises(ValueError, solve_continuous_lyapunov, sq, nsq)
assert_raises(ValueError, solve_continuous_lyapunov, sq, np.eye(2))
def check_continuous_case(self, a, q):
x = solve_continuous_lyapunov(a, q)
assert_array_almost_equal(
np.dot(a, x) + np.dot(x, a.conj().transpose()), q)
def check_discrete_case(self, a, q, method=None):
x = solve_discrete_lyapunov(a, q, method=method)
assert_array_almost_equal(
np.dot(np.dot(a, x), a.conj().transpose()) - x, -1.0*q)
def test_cases(self):
for case in self.cases:
self.check_continuous_case(case[0], case[1])
self.check_discrete_case(case[0], case[1])
self.check_discrete_case(case[0], case[1], method='direct')
self.check_discrete_case(case[0], case[1], method='bilinear')
def test_solve_continuous_are():
mat6 = _load_data('carex_6_data.npz')
mat15 = _load_data('carex_15_data.npz')
mat18 = _load_data('carex_18_data.npz')
mat19 = _load_data('carex_19_data.npz')
mat20 = _load_data('carex_20_data.npz')
cases = [
# Carex examples taken from (with default parameters):
# [1] P.BENNER, A.J. LAUB, V. MEHRMANN: 'A Collection of Benchmark
# Examples for the Numerical Solution of Algebraic Riccati
# Equations II: Continuous-Time Case', Tech. Report SPC 95_23,
# Fak. f. Mathematik, TU Chemnitz-Zwickau (Germany), 1995.
#
# The format of the data is (a, b, q, r, knownfailure), where
# knownfailure is None if the test passes or a string
# indicating the reason for failure.
#
# Test Case 0: carex #1
(np.diag([1.], 1),
np.array([[0], [1]]),
block_diag(1., 2.),
1,
None),
# Test Case 1: carex #2
(np.array([[4, 3], [-4.5, -3.5]]),
np.array([[1], [-1]]),
np.array([[9, 6], [6, 4.]]),
1,
None),
# Test Case 2: carex #3
(np.array([[0, 1, 0, 0],
[0, -1.89, 0.39, -5.53],
[0, -0.034, -2.98, 2.43],
[0.034, -0.0011, -0.99, -0.21]]),
np.array([[0, 0], [0.36, -1.6], [-0.95, -0.032], [0.03, 0]]),
np.array([[2.313, 2.727, 0.688, 0.023],
[2.727, 4.271, 1.148, 0.323],
[0.688, 1.148, 0.313, 0.102],
[0.023, 0.323, 0.102, 0.083]]),
np.eye(2),
None),
# Test Case 3: carex #4
(np.array([[-0.991, 0.529, 0, 0, 0, 0, 0, 0],
[0.522, -1.051, 0.596, 0, 0, 0, 0, 0],
[0, 0.522, -1.118, 0.596, 0, 0, 0, 0],
[0, 0, 0.522, -1.548, 0.718, 0, 0, 0],
[0, 0, 0, 0.922, -1.64, 0.799, 0, 0],
[0, 0, 0, 0, 0.922, -1.721, 0.901, 0],
[0, 0, 0, 0, 0, 0.922, -1.823, 1.021],
[0, 0, 0, 0, 0, 0, 0.922, -1.943]]),
np.array([[3.84, 4.00, 37.60, 3.08, 2.36, 2.88, 3.08, 3.00],
[-2.88, -3.04, -2.80, -2.32, -3.32, -3.82, -4.12, -3.96]]
).T * 0.001,
np.array([[1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.1],
[0.0, 1.0, 0.0, 0.0, 0.1, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.5, 0.1, 0.0, 0.0, 0.1, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.0, 0.0, 0.1, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.0],
[0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1]]),
np.eye(2),
None),
# Test Case 4: carex #5
(np.array(
[[-4.019, 5.120, 0., 0., -2.082, 0., 0., 0., 0.870],
[-0.346, 0.986, 0., 0., -2.340, 0., 0., 0., 0.970],
[-7.909, 15.407, -4.069, 0., -6.450, 0., 0., 0., 2.680],
[-21.816, 35.606, -0.339, -3.870, -17.800, 0., 0., 0., 7.390],
[-60.196, 98.188, -7.907, 0.340, -53.008, 0., 0., 0., 20.400],
[0, 0, 0, 0, 94.000, -147.200, 0., 53.200, 0.],
[0, 0, 0, 0, 0, 94.000, -147.200, 0, 0],
[0, 0, 0, 0, 0, 12.800, 0.000, -31.600, 0],
[0, 0, 0, 0, 12.800, 0.000, 0.000, 18.800, -31.600]]),
np.array([[0.010, -0.011, -0.151],
[0.003, -0.021, 0.000],
[0.009, -0.059, 0.000],
[0.024, -0.162, 0.000],
[0.068, -0.445, 0.000],
[0.000, 0.000, 0.000],
[0.000, 0.000, 0.000],
[0.000, 0.000, 0.000],
[0.000, 0.000, 0.000]]),
np.eye(9),
np.eye(3),
None),
# Test Case 5: carex #6
(mat6['A'], mat6['B'], mat6['Q'], mat6['R'], None),
# Test Case 6: carex #7
(np.array([[1, 0], [0, -2.]]),
np.array([[1e-6], [0]]),
np.ones((2, 2)),
1.,
'Bad residual accuracy'),
# Test Case 7: carex #8
(block_diag(-0.1, -0.02),
np.array([[0.100, 0.000], [0.001, 0.010]]),
np.array([[100, 1000], [1000, 10000]]),
np.ones((2, 2)) + block_diag(1e-6, 0),
None),
# Test Case 8: carex #9
(np.array([[0, 1e6], [0, 0]]),
np.array([[0], [1.]]),
np.eye(2),
1.,
None),
# Test Case 9: carex #10
(np.array([[1.0000001, 1], [1., 1.0000001]]),
np.eye(2),
np.eye(2),
np.eye(2),
None),
# Test Case 10: carex #11
(np.array([[3, 1.], [4, 2]]),
np.array([[1], [1]]),
np.array([[-11, -5], [-5, -2.]]),
1.,
None),
# Test Case 11: carex #12
(np.array([[7000000., 2000000., -0.],
[2000000., 6000000., -2000000.],
[0., -2000000., 5000000.]]) / 3,
np.eye(3),
np.array([[1., -2., -2.], [-2., 1., -2.], [-2., -2., 1.]]).dot(
np.diag([1e-6, 1, 1e6])).dot(
np.array([[1., -2., -2.], [-2., 1., -2.], [-2., -2., 1.]])) / 9,
np.eye(3) * 1e6,
'Bad Residual Accuracy'),
# Test Case 12: carex #13
(np.array([[0, 0.4, 0, 0],
[0, 0, 0.345, 0],
[0, -0.524e6, -0.465e6, 0.262e6],
[0, 0, 0, -1e6]]),
np.array([[0, 0, 0, 1e6]]).T,
np.diag([1, 0, 1, 0]),
1.,
None),
# Test Case 13: carex #14
(np.array([[-1e-6, 1, 0, 0],
[-1, -1e-6, 0, 0],
[0, 0, 1e-6, 1],
[0, 0, -1, 1e-6]]),
np.ones((4, 1)),
np.ones((4, 4)),
1.,
None),
# Test Case 14: carex #15
(mat15['A'], mat15['B'], mat15['Q'], mat15['R'], None),
# Test Case 15: carex #16
(np.eye(64, 64, k=-1) + np.eye(64, 64)*(-2.) + np.rot90(
block_diag(1, np.zeros((62, 62)), 1)) + np.eye(64, 64, k=1),
np.eye(64),
np.eye(64),
np.eye(64),
None),
# Test Case 16: carex #17
(np.diag(np.ones((20, )), 1),
np.flipud(np.eye(21, 1)),
np.eye(21, 1) * np.eye(21, 1).T,
1,
'Bad Residual Accuracy'),
# Test Case 17: carex #18
(mat18['A'], mat18['B'], mat18['Q'], mat18['R'], None),
# Test Case 18: carex #19
(mat19['A'], mat19['B'], mat19['Q'], mat19['R'],
'Bad Residual Accuracy'),
# Test Case 19: carex #20
(mat20['A'], mat20['B'], mat20['Q'], mat20['R'],
'Bad Residual Accuracy')
]
# Makes the minimum precision requirements customized to the test.
# Here numbers represent the number of decimals that agrees with zero
# matrix when the solution x is plugged in to the equation.
#
# res = array([[8e-3,1e-16],[1e-16,1e-20]]) --> min_decimal[k] = 2
#
# If the test is failing use "None" for that entry.
#
min_decimal = (14, 12, 13, 14, 11, 6, None, 5, 7, 14, 14,
None, 9, 14, 13, 14, None, 12, None, None)
def _test_factory(case, dec):
"""Checks if 0 = XA + A'X - XB(R)^{-1} B'X + Q is true"""
a, b, q, r, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_continuous_are(a, b, q, r)
res = x.dot(a) + a.conj().T.dot(x) + q
out_fact = x.dot(b)
res -= out_fact.dot(solve(np.atleast_2d(r), out_fact.conj().T))
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
def test_solve_discrete_are():
cases = [
# Darex examples taken from (with default parameters):
# [1] P.BENNER, A.J. LAUB, V. MEHRMANN: 'A Collection of Benchmark
# Examples for the Numerical Solution of Algebraic Riccati
# Equations II: Discrete-Time Case', Tech. Report SPC 95_23,
# Fak. f. Mathematik, TU Chemnitz-Zwickau (Germany), 1995.
# [2] T. GUDMUNDSSON, C. KENNEY, A.J. LAUB: 'Scaling of the
# Discrete-Time Algebraic Riccati Equation to Enhance Stability
# of the Schur Solution Method', IEEE Trans.Aut.Cont., vol.37(4)
#
# The format of the data is (a, b, q, r, knownfailure), where
# knownfailure is None if the test passes or a string
# indicating the reason for failure.
#
# TEST CASE 0 : Complex a; real b, q, r
(np.array([[2, 1-2j], [0, -3j]]),
np.array([[0], [1]]),
np.array([[1, 0], [0, 2]]),
np.array([[1]]),
None),
# TEST CASE 1 :Real a, q, r; complex b
(np.array([[2, 1], [0, -1]]),
np.array([[-2j], [1j]]),
np.array([[1, 0], [0, 2]]),
np.array([[1]]),
None),
# TEST CASE 2 : Real a, b; complex q, r
(np.array([[3, 1], [0, -1]]),
np.array([[1, 2], [1, 3]]),
np.array([[1, 1+1j], [1-1j, 2]]),
np.array([[2, -2j], [2j, 3]]),
None),
# TEST CASE 3 : User-reported gh-2251 (Trac #1732)
(np.array([[0.63399379, 0.54906824, 0.76253406],
[0.5404729, 0.53745766, 0.08731853],
[0.27524045, 0.84922129, 0.4681622]]),
np.array([[0.96861695], [0.05532739], [0.78934047]]),
np.eye(3),
np.eye(1),
None),
# TEST CASE 4 : darex #1
(np.array([[4, 3], [-4.5, -3.5]]),
np.array([[1], [-1]]),
np.array([[9, 6], [6, 4]]),
np.array([[1]]),
None),
# TEST CASE 5 : darex #2
(np.array([[0.9512, 0], [0, 0.9048]]),
np.array([[4.877, 4.877], [-1.1895, 3.569]]),
np.array([[0.005, 0], [0, 0.02]]),
np.array([[1/3, 0], [0, 3]]),
None),
# TEST CASE 6 : darex #3
(np.array([[2, -1], [1, 0]]),
np.array([[1], [0]]),
np.array([[0, 0], [0, 1]]),
np.array([[0]]),
None),
# TEST CASE 7 : darex #4 (skipped the gen. Ric. term S)
(np.array([[0, 1], [0, -1]]),
np.array([[1, 0], [2, 1]]),
np.array([[-4, -4], [-4, 7]]) * (1/11),
np.array([[9, 3], [3, 1]]),
None),
# TEST CASE 8 : darex #5
(np.array([[0, 1], [0, 0]]),
np.array([[0], [1]]),
np.array([[1, 2], [2, 4]]),
np.array([[1]]),
None),
# TEST CASE 9 : darex #6
(np.array([[0.998, 0.067, 0, 0],
[-.067, 0.998, 0, 0],
[0, 0, 0.998, 0.153],
[0, 0, -.153, 0.998]]),
np.array([[0.0033, 0.0200],
[0.1000, -.0007],
[0.0400, 0.0073],
[-.0028, 0.1000]]),
np.array([[1.87, 0, 0, -0.244],
[0, 0.744, 0.205, 0],
[0, 0.205, 0.589, 0],
[-0.244, 0, 0, 1.048]]),
np.eye(2),
None),
# TEST CASE 10 : darex #7
(np.array([[0.984750, -.079903, 0.0009054, -.0010765],
[0.041588, 0.998990, -.0358550, 0.0126840],
[-.546620, 0.044916, -.3299100, 0.1931800],
[2.662400, -.100450, -.9245500, -.2632500]]),
np.array([[0.0037112, 0.0007361],
[-.0870510, 9.3411e-6],
[-1.198440, -4.1378e-4],
[-3.192700, 9.2535e-4]]),
np.eye(4)*1e-2,
np.eye(2),
None),
# TEST CASE 11 : darex #8
(np.array([[-0.6000000, -2.2000000, -3.6000000, -5.4000180],
[1.0000000, 0.6000000, 0.8000000, 3.3999820],
[0.0000000, 1.0000000, 1.8000000, 3.7999820],
[0.0000000, 0.0000000, 0.0000000, -0.9999820]]),
np.array([[1.0, -1.0, -1.0, -1.0],
[0.0, 1.0, -1.0, -1.0],
[0.0, 0.0, 1.0, -1.0],
[0.0, 0.0, 0.0, 1.0]]),
np.array([[2, 1, 3, 6],
[1, 2, 2, 5],
[3, 2, 6, 11],
[6, 5, 11, 22]]),
np.eye(4),
None),
# TEST CASE 12 : darex #9
(np.array([[95.4070, 1.9643, 0.3597, 0.0673, 0.0190],
[40.8490, 41.3170, 16.0840, 4.4679, 1.1971],
[12.2170, 26.3260, 36.1490, 15.9300, 12.3830],
[4.1118, 12.8580, 27.2090, 21.4420, 40.9760],
[0.1305, 0.5808, 1.8750, 3.6162, 94.2800]]) * 0.01,
np.array([[0.0434, -0.0122],
[2.6606, -1.0453],
[3.7530, -5.5100],
[3.6076, -6.6000],
[0.4617, -0.9148]]) * 0.01,
np.eye(5),
np.eye(2),
None),
# TEST CASE 13 : darex #10
(np.kron(np.eye(2), np.diag([1, 1], k=1)),
np.kron(np.eye(2), np.array([[0], [0], [1]])),
np.array([[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, -1, 0],
[0, 0, 0, -1, 1, 0],
[0, 0, 0, 0, 0, 0]]),
np.array([[3, 0], [0, 1]]),
None),
# TEST CASE 14 : darex #11
(0.001 * np.array(
[[870.1, 135.0, 11.59, .5014, -37.22, .3484, 0, 4.242, 7.249],
[76.55, 897.4, 12.72, 0.5504, -40.16, .3743, 0, 4.53, 7.499],
[-127.2, 357.5, 817, 1.455, -102.8, .987, 0, 11.85, 18.72],
[-363.5, 633.9, 74.91, 796.6, -273.5, 2.653, 0, 31.72, 48.82],
[-960, 1645.9, -128.9, -5.597, 71.42, 7.108, 0, 84.52, 125.9],
[-664.4, 112.96, -88.89, -3.854, 84.47, 13.6, 0, 144.3, 101.6],
[-410.2, 693, -54.71, -2.371, 66.49, 12.49, .1063, 99.97, 69.67],
[-179.9, 301.7, -23.93, -1.035, 60.59, 22.16, 0, 213.9, 35.54],
[-345.1, 580.4, -45.96, -1.989, 105.6, 19.86, 0, 219.1, 215.2]]),
np.array([[4.7600, -0.5701, -83.6800],
[0.8790, -4.7730, -2.7300],
[1.4820, -13.1200, 8.8760],
[3.8920, -35.1300, 24.8000],
[10.3400, -92.7500, 66.8000],
[7.2030, -61.5900, 38.3400],
[4.4540, -36.8300, 20.2900],
[1.9710, -15.5400, 6.9370],
[3.7730, -30.2800, 14.6900]]) * 0.001,
np.diag([50, 0, 0, 0, 50, 0, 0, 0, 0]),
np.eye(3),
None),
# TEST CASE 15 : darex #12 - numerically least accurate example
(np.array([[0, 1e6], [0, 0]]),
np.array([[0], [1]]),
np.eye(2),
np.array([[1]]),
None),
# TEST CASE 16 : darex #13
(np.array([[16, 10, -2],
[10, 13, -8],
[-2, -8, 7]]) * (1/9),
np.eye(3),
1e6 * np.eye(3),
1e6 * np.eye(3),
None),
# TEST CASE 17 : darex #14
(np.array([[1 - 1/1e8, 0, 0, 0],
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]]),
np.array([[1e-08], [0], [0], [0]]),
np.diag([0, 0, 0, 1]),
np.array([[0.25]]),
None),
# TEST CASE 18 : darex #15
(np.eye(100, k=1),
np.flipud(np.eye(100, 1)),
np.eye(100),
np.array([[1]]),
None)
]
# Makes the minimum precision requirements customized to the test.
# Here numbers represent the number of decimals that agrees with zero
# matrix when the solution x is plugged in to the equation.
#
# res = array([[8e-3,1e-16],[1e-16,1e-20]]) --> min_decimal[k] = 2
#
# If the test is failing use "None" for that entry.
#
min_decimal = (12, 14, 13, 14, 13, 16, 18, 14, 14, 13,
14, 13, 13, 14, 12, 2, 5, 6, 10)
def _test_factory(case, dec):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
a, b, q, r, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_discrete_are(a, b, q, r)
res = a.conj().T.dot(x.dot(a)) - x + q
res -= a.conj().T.dot(x.dot(b)).dot(
solve(r+b.conj().T.dot(x.dot(b)), b.conj().T).dot(x.dot(a))
)
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
# An infeasible example taken from https://arxiv.org/abs/1505.04861v1
A = np.triu(np.ones((3, 3)))
A[0, 1] = -1
B = np.array([[1, 1, 0], [0, 0, 1]]).T
Q = np.full_like(A, -2) + np.diag([8, -1, -1.9])
R = np.diag([-10, 0.1])
assert_raises(LinAlgError, solve_continuous_are, A, B, Q, R)
def test_solve_generalized_continuous_are():
cases = [
# Two random examples differ by s term
# in the absence of any literature for demanding examples.
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.zeros((3, 2)),
None),
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.ones((3, 2)),
None)
]
min_decimal = (10, 10)
def _test_factory(case, dec):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
a, b, q, r, e, s, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_continuous_are(a, b, q, r, e, s)
res = a.conj().T.dot(x.dot(e)) + e.conj().T.dot(x.dot(a)) + q
out_fact = e.conj().T.dot(x).dot(b) + s
res -= out_fact.dot(solve(np.atleast_2d(r), out_fact.conj().T))
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
def test_solve_generalized_discrete_are():
mat20170120 = _load_data('gendare_20170120_data.npz')
cases = [
# Two random examples differ by s term
# in the absence of any literature for demanding examples.
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.zeros((3, 2)),
None),
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.ones((3, 2)),
None),
# user-reported (under PR-6616) 20-Jan-2017
# tests against the case where E is None but S is provided
(mat20170120['A'],
mat20170120['B'],
mat20170120['Q'],
mat20170120['R'],
None,
mat20170120['S'],
None),
]
min_decimal = (11, 11, 16)
def _test_factory(case, dec):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
a, b, q, r, e, s, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_discrete_are(a, b, q, r, e, s)
if e is None:
e = np.eye(a.shape[0])
if s is None:
s = np.zeros_like(b)
res = a.conj().T.dot(x.dot(a)) - e.conj().T.dot(x.dot(e)) + q
res -= (a.conj().T.dot(x.dot(b)) + s).dot(
solve(r+b.conj().T.dot(x.dot(b)),
(b.conj().T.dot(x.dot(a)) + s.conj().T)
)
)
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
def test_are_validate_args():
def test_square_shape():
nsq = np.ones((3, 2))
sq = np.eye(3)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, nsq, 1, 1, 1)
assert_raises(ValueError, x, sq, sq, nsq, 1)
assert_raises(ValueError, x, sq, sq, sq, nsq)
assert_raises(ValueError, x, sq, sq, sq, sq, nsq)
def test_compatible_sizes():
nsq = np.ones((3, 2))
sq = np.eye(4)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, sq, nsq, 1, 1)
assert_raises(ValueError, x, sq, sq, sq, sq, sq, nsq)
assert_raises(ValueError, x, sq, sq, np.eye(3), sq)
assert_raises(ValueError, x, sq, sq, sq, np.eye(3))
assert_raises(ValueError, x, sq, sq, sq, sq, np.eye(3))
def test_symmetry():
nsym = np.arange(9).reshape(3, 3)
sym = np.eye(3)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, sym, sym, nsym, sym)
assert_raises(ValueError, x, sym, sym, sym, nsym)
def test_singularity():
sing = np.full((3, 3), 1e12)
sing[2, 2] -= 1
sq = np.eye(3)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, sq, sq, sq, sq, sing)
assert_raises(ValueError, solve_continuous_are, sq, sq, sq, sing)
def test_finiteness():
nm = np.full((2, 2), np.nan)
sq = np.eye(2)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, nm, sq, sq, sq)
assert_raises(ValueError, x, sq, nm, sq, sq)
assert_raises(ValueError, x, sq, sq, nm, sq)
assert_raises(ValueError, x, sq, sq, sq, nm)
assert_raises(ValueError, x, sq, sq, sq, sq, nm)
assert_raises(ValueError, x, sq, sq, sq, sq, sq, nm)
class TestSolveSylvester(object):
cases = [
# a, b, c all real.
(np.array([[1, 2], [0, 4]]),
np.array([[5, 6], [0, 8]]),
np.array([[9, 10], [11, 12]])),
# a, b, c all real, 4x4. a and b have non-trival 2x2 blocks in their
# quasi-triangular form.
(np.array([[1.0, 0, 0, 0],
[0, 1.0, 2.0, 0.0],
[0, 0, 3.0, -4],
[0, 0, 2, 5]]),
np.array([[2.0, 0, 0, 1.0],
[0, 1.0, 0.0, 0.0],
[0, 0, 1.0, -1],
[0, 0, 1, 1]]),
np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 1.0]])),
# a, b, c all complex.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 2j], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a and b real; c complex.
(np.array([[1.0, 2.0], [3.0, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a and c complex; b real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a complex; b and c real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0, 2.0], [-1.0, 2.0]])),
# not square matrices, real
(np.array([[8, 1, 6], [3, 5, 7], [4, 9, 2]]),
np.array([[2, 3], [4, 5]]),
np.array([[1, 2], [3, 4], [5, 6]])),
# not square matrices, complex
(np.array([[8, 1j, 6+2j], [3, 5, 7], [4, 9, 2]]),
np.array([[2, 3], [4, 5-1j]]),
np.array([[1, 2j], [3, 4j], [5j, 6+7j]])),
]
def check_case(self, a, b, c):
x = solve_sylvester(a, b, c)
assert_array_almost_equal(np.dot(a, x) + np.dot(x, b), c)
def test_cases(self):
for case in self.cases:
self.check_case(case[0], case[1], case[2])
def test_trivial(self):
a = np.array([[1.0, 0.0], [0.0, 1.0]])
b = np.array([[1.0]])
c = np.array([2.0, 2.0]).reshape(-1, 1)
x = solve_sylvester(a, b, c)
assert_array_almost_equal(x, np.array([1.0, 1.0]).reshape(-1, 1))
| bsd-3-clause | -1,425,860,346,977,748,000 | 39.473958 | 79 | 0.439873 | false |
shikhardb/scikit-learn | examples/covariance/plot_mahalanobis_distances.py | 348 | 6232 | r"""
================================================================
Robust covariance estimation and Mahalanobis distances relevance
================================================================
An example to show covariance estimation with the Mahalanobis
distances on Gaussian distributed data.
For Gaussian distributed data, the distance of an observation
:math:`x_i` to the mode of the distribution can be computed using its
Mahalanobis distance: :math:`d_{(\mu,\Sigma)}(x_i)^2 = (x_i -
\mu)'\Sigma^{-1}(x_i - \mu)` where :math:`\mu` and :math:`\Sigma` are
the location and the covariance of the underlying Gaussian
distribution.
In practice, :math:`\mu` and :math:`\Sigma` are replaced by some
estimates. The usual covariance maximum likelihood estimate is very
sensitive to the presence of outliers in the data set and therefor,
the corresponding Mahalanobis distances are. One would better have to
use a robust estimator of covariance to guarantee that the estimation is
resistant to "erroneous" observations in the data set and that the
associated Mahalanobis distances accurately reflect the true
organisation of the observations.
The Minimum Covariance Determinant estimator is a robust,
high-breakdown point (i.e. it can be used to estimate the covariance
matrix of highly contaminated datasets, up to
:math:`\frac{n_\text{samples}-n_\text{features}-1}{2}` outliers)
estimator of covariance. The idea is to find
:math:`\frac{n_\text{samples}+n_\text{features}+1}{2}`
observations whose empirical covariance has the smallest determinant,
yielding a "pure" subset of observations from which to compute
standards estimates of location and covariance.
The Minimum Covariance Determinant estimator (MCD) has been introduced
by P.J.Rousseuw in [1].
This example illustrates how the Mahalanobis distances are affected by
outlying data: observations drawn from a contaminating distribution
are not distinguishable from the observations coming from the real,
Gaussian distribution that one may want to work with. Using MCD-based
Mahalanobis distances, the two populations become
distinguishable. Associated applications are outliers detection,
observations ranking, clustering, ...
For visualization purpose, the cubic root of the Mahalanobis distances
are represented in the boxplot, as Wilson and Hilferty suggest [2]
[1] P. J. Rousseeuw. Least median of squares regression. J. Am
Stat Ass, 79:871, 1984.
[2] Wilson, E. B., & Hilferty, M. M. (1931). The distribution of chi-square.
Proceedings of the National Academy of Sciences of the United States
of America, 17, 684-688.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.covariance import EmpiricalCovariance, MinCovDet
n_samples = 125
n_outliers = 25
n_features = 2
# generate data
gen_cov = np.eye(n_features)
gen_cov[0, 0] = 2.
X = np.dot(np.random.randn(n_samples, n_features), gen_cov)
# add some outliers
outliers_cov = np.eye(n_features)
outliers_cov[np.arange(1, n_features), np.arange(1, n_features)] = 7.
X[-n_outliers:] = np.dot(np.random.randn(n_outliers, n_features), outliers_cov)
# fit a Minimum Covariance Determinant (MCD) robust estimator to data
robust_cov = MinCovDet().fit(X)
# compare estimators learnt from the full data set with true parameters
emp_cov = EmpiricalCovariance().fit(X)
###############################################################################
# Display results
fig = plt.figure()
plt.subplots_adjust(hspace=-.1, wspace=.4, top=.95, bottom=.05)
# Show data set
subfig1 = plt.subplot(3, 1, 1)
inlier_plot = subfig1.scatter(X[:, 0], X[:, 1],
color='black', label='inliers')
outlier_plot = subfig1.scatter(X[:, 0][-n_outliers:], X[:, 1][-n_outliers:],
color='red', label='outliers')
subfig1.set_xlim(subfig1.get_xlim()[0], 11.)
subfig1.set_title("Mahalanobis distances of a contaminated data set:")
# Show contours of the distance functions
xx, yy = np.meshgrid(np.linspace(plt.xlim()[0], plt.xlim()[1], 100),
np.linspace(plt.ylim()[0], plt.ylim()[1], 100))
zz = np.c_[xx.ravel(), yy.ravel()]
mahal_emp_cov = emp_cov.mahalanobis(zz)
mahal_emp_cov = mahal_emp_cov.reshape(xx.shape)
emp_cov_contour = subfig1.contour(xx, yy, np.sqrt(mahal_emp_cov),
cmap=plt.cm.PuBu_r,
linestyles='dashed')
mahal_robust_cov = robust_cov.mahalanobis(zz)
mahal_robust_cov = mahal_robust_cov.reshape(xx.shape)
robust_contour = subfig1.contour(xx, yy, np.sqrt(mahal_robust_cov),
cmap=plt.cm.YlOrBr_r, linestyles='dotted')
subfig1.legend([emp_cov_contour.collections[1], robust_contour.collections[1],
inlier_plot, outlier_plot],
['MLE dist', 'robust dist', 'inliers', 'outliers'],
loc="upper right", borderaxespad=0)
plt.xticks(())
plt.yticks(())
# Plot the scores for each point
emp_mahal = emp_cov.mahalanobis(X - np.mean(X, 0)) ** (0.33)
subfig2 = plt.subplot(2, 2, 3)
subfig2.boxplot([emp_mahal[:-n_outliers], emp_mahal[-n_outliers:]], widths=.25)
subfig2.plot(1.26 * np.ones(n_samples - n_outliers),
emp_mahal[:-n_outliers], '+k', markeredgewidth=1)
subfig2.plot(2.26 * np.ones(n_outliers),
emp_mahal[-n_outliers:], '+k', markeredgewidth=1)
subfig2.axes.set_xticklabels(('inliers', 'outliers'), size=15)
subfig2.set_ylabel(r"$\sqrt[3]{\rm{(Mahal. dist.)}}$", size=16)
subfig2.set_title("1. from non-robust estimates\n(Maximum Likelihood)")
plt.yticks(())
robust_mahal = robust_cov.mahalanobis(X - robust_cov.location_) ** (0.33)
subfig3 = plt.subplot(2, 2, 4)
subfig3.boxplot([robust_mahal[:-n_outliers], robust_mahal[-n_outliers:]],
widths=.25)
subfig3.plot(1.26 * np.ones(n_samples - n_outliers),
robust_mahal[:-n_outliers], '+k', markeredgewidth=1)
subfig3.plot(2.26 * np.ones(n_outliers),
robust_mahal[-n_outliers:], '+k', markeredgewidth=1)
subfig3.axes.set_xticklabels(('inliers', 'outliers'), size=15)
subfig3.set_ylabel(r"$\sqrt[3]{\rm{(Mahal. dist.)}}$", size=16)
subfig3.set_title("2. from robust estimates\n(Minimum Covariance Determinant)")
plt.yticks(())
plt.show()
| bsd-3-clause | 2,299,898,999,090,624,500 | 42.277778 | 79 | 0.679718 | false |
5y/kivy | kivy/tools/report.py | 17 | 3660 | '''
Report tool
===========
This tool is a helper for users. It can be used to dump information
for help during the debugging process.
'''
import os
import sys
import time
from time import ctime
from configparser import ConfigParser
from io import StringIO
from xmlrpc.client import ServerProxy
import kivy
report = []
def title(t):
report.append('')
report.append('=' * 80)
report.append(t)
report.append('=' * 80)
report.append('')
# ----------------------------------------------------------
# Start output debugging
# ----------------------------------------------------------
title('Global')
report.append('OS platform : %s' % sys.platform)
report.append('Python EXE : %s' % sys.executable)
report.append('Python Version : %s' % sys.version)
report.append('Python API : %s' % sys.api_version)
report.append('Kivy Version : %s' % kivy.__version__)
report.append('Install path : %s' % os.path.dirname(kivy.__file__))
report.append('Install date : %s' % ctime(os.path.getctime(kivy.__file__)))
title('OpenGL')
from kivy.core import gl
from kivy.core.window import Window
report.append('GL Vendor: %s' % gl.glGetString(gl.GL_VENDOR))
report.append('GL Renderer: %s' % gl.glGetString(gl.GL_RENDERER))
report.append('GL Version: %s' % gl.glGetString(gl.GL_VERSION))
ext = gl.glGetString(gl.GL_EXTENSIONS)
if ext is None:
report.append('GL Extensions: %s' % ext)
else:
report.append('GL Extensions:')
for x in ext.split():
report.append('\t%s' % x)
Window.close()
title('Core selection')
from kivy.core.audio import SoundLoader
report.append('Audio = %s' % SoundLoader._classes)
from kivy.core.camera import Camera
report.append('Camera = %s' % Camera)
from kivy.core.image import ImageLoader
report.append('Image = %s' % ImageLoader.loaders)
from kivy.core.text import Label
report.append('Text = %s' % Label)
from kivy.core.video import Video
report.append('Video = %s' % Video)
report.append('Window = %s' % Window)
title('Libraries')
def testimport(libname):
try:
l = __import__(libname)
report.append('%-20s exist at %s' % (libname, l.__file__))
except ImportError:
report.append('%-20s is missing' % libname)
for x in (
'gst',
'pygame',
'pygame.midi',
'pyglet',
'videocapture',
'squirtle',
'PIL',
'opencv',
'opencv.cv',
'opencv.highgui',
'cython'):
testimport(x)
title('Configuration')
s = StringIO()
from kivy.config import Config
ConfigParser.write(Config, s)
report.extend(s.getvalue().split('\n'))
title('Input availability')
from kivy.input.factory import MotionEventFactory
for x in MotionEventFactory.list():
report.append(x)
'''
title('Log')
for x in pymt_logger_history.history:
report.append(x.message)
'''
title('Environ')
for k, v in os.environ.items():
report.append('%s = %s' % (k, v))
title('Options')
for k, v in kivy.kivy_options.items():
report.append('%s = %s' % (k, v))
report = '\n'.join(report)
print(report)
print()
print()
try:
reply = input(
'Do you accept to send report to paste.pocoo.org (Y/n) : ')
except EOFError:
sys.exit(0)
if reply.lower().strip() in ('', 'y'):
print('Please wait while sending the report...')
s = ServerProxy('http://paste.pocoo.org/xmlrpc/')
r = s.pastes.newPaste('text', report)
print()
print()
print('REPORT posted at http://paste.pocoo.org/show/%s/' % r)
print()
print()
else:
print('No report posted.')
# On windows system, the console leave directly after the end
# of the dump. That's not cool if we want get report url
input('Enter any key to leave.')
| mit | -4,001,076,655,932,923,400 | 23.563758 | 78 | 0.635792 | false |
SchoolIdolTomodachi/CinderellaProducers | cpro/filters.py | 1 | 10802 | from django.db.models import Q
from django.core.exceptions import PermissionDenied
from cpro import models
############################################################
# Cards
def filterCards(queryset, parameters, request):
if request.user.is_authenticated():
request.user.all_accounts = request.user.accounts.all()
accounts_pks = ','.join([str(account.pk) for account in request.user.all_accounts])
if accounts_pks:
queryset = queryset.extra(select={
'total_owned': 'SELECT COUNT(*) FROM cpro_ownedcard WHERE card_id = cpro_card.id AND account_id IN ({})'.format(accounts_pks),
'favorited': 'SELECT COUNT(*) FROM cpro_favoritecard WHERE card_id = cpro_card.id AND owner_id IN ({})'.format(request.user.id),
})
if 'favorite_of' in parameters and parameters['favorite_of']:
queryset = queryset.filter(fans__owner_id=parameters['favorite_of'])
if 'ids' in parameters and parameters['ids']:
queryset = queryset.filter(id__in=parameters['ids'].split(','))
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(idol__name__icontains=term)
| Q(idol__japanese_name__icontains=term)
| Q(title__icontains=term)
| Q(translated_title__icontains=term)
| Q(skill_name__icontains=term)
| Q(translated_skill_name__icontains=term)
)
if 'i_rarity' in parameters and parameters['i_rarity']:
queryset = queryset.filter(i_rarity=parameters['i_rarity'])
if 'type' in parameters and parameters['type']:
queryset = queryset.filter(idol__i_type=parameters['type'])
if 'is_event' in parameters and parameters['is_event']:
if parameters['is_event'] == '2':
queryset = queryset.filter(event__isnull=False)
elif parameters['is_event'] == '3':
queryset = queryset.filter(event__isnull=True)
if 'is_limited' in parameters and parameters['is_limited']:
if parameters['is_limited'] == '2':
queryset = queryset.filter(is_limited=True)
elif parameters['is_limited'] == '3':
queryset = queryset.filter(is_limited=False)
if 'has_art' in parameters and parameters['has_art']:
if parameters['has_art'] == '2':
queryset = queryset.filter(art__isnull=False).exclude(art='')
elif parameters['has_art'] == '3':
queryset = queryset.filter(Q(art__isnull=True) | Q(art=''))
if 'has_art_hd' in parameters and parameters['has_art_hd']:
if parameters['has_art_hd'] == '2':
queryset = queryset.filter(art_hd__isnull=False).exclude(art_hd='')
elif parameters['has_art_hd'] == '3':
queryset = queryset.filter(Q(art_hd__isnull=True) | Q(art_hd=''))
if 'i_skill' in parameters and parameters['i_skill']:
queryset = queryset.filter(i_skill=parameters['i_skill'])
if 'leader_skill' in parameters and parameters['leader_skill']:
value = parameters['leader_skill']
if value.startswith('type-'):
queryset = queryset.filter(leader_skill_type=int(value[5:]))
elif value.startswith('apply-'):
queryset = queryset.filter(leader_skill_apply=int(value[6:]))
if 'idol' in parameters and parameters['idol']:
queryset = queryset.filter(idol=parameters['idol'])
if 'event' in parameters and parameters['event']:
queryset = queryset.filter(event=parameters['event'])
return queryset
def filterCard(queryset, parameters, request):
queryset = filterCards(queryset, parameters, request)
return queryset
############################################################
# Idols
def filterIdols(queryset, parameters, request):
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(name__icontains=term)
| Q(japanese_name__icontains=term)
| Q(romaji_hometown__icontains=term)
| Q(hometown__icontains=term)
| Q(hobbies__icontains=term)
| Q(CV__icontains=term)
| Q(romaji_CV__icontains=term)
)
if 'type' in parameters and parameters['type']:
queryset = queryset.filter(i_type=parameters['type'])
if 'i_blood_type' in parameters and parameters['i_blood_type']:
queryset = queryset.filter(i_blood_type=parameters['i_blood_type'])
if 'i_writing_hand' in parameters and parameters['i_writing_hand']:
queryset = queryset.filter(i_writing_hand=parameters['i_writing_hand'])
if 'i_astrological_sign' in parameters and parameters['i_astrological_sign']:
queryset = queryset.filter(i_astrological_sign=parameters['i_astrological_sign'])
if 'has_signature' in parameters and parameters['has_signature']:
if parameters['has_signature'] == '2':
queryset = queryset.filter(signature__isnull=False).exclude(signature='')
elif parameters['has_signature'] == '3':
queryset = queryset.filter(Q(signature__isnull=True) | Q(signature=''))
return queryset
############################################################
# Accounts
def filterAccounts(queryset, parameters, request):
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(owner__username__icontains=term)
| Q(owner__email__iexact=term)
| Q(nickname__icontains=term)
| Q(device__icontains=term)
| Q(owner__preferences__description__icontains=term)
| Q(owner__preferences__location__icontains=term)
)
if 'own_card' in parameters and parameters['own_card']:
queryset = queryset.filter(ownedcards__card__id=parameters['own_card'])
if 'favorite_card' in parameters and parameters['favorite_card']:
queryset = queryset.filter(owner__favoritecards__card__id=parameters['favorite_card'])
if 'user_type' in parameters and parameters['user_type']:
queryset = queryset.filter(owner__preferences__color=unicode(parameters['user_type']))
if 'game_id' in parameters and parameters['game_id']:
queryset = queryset.filter(game_id=parameters['game_id'])
if 'favorite_character' in parameters and parameters['favorite_character']:
queryset = queryset.filter(Q(owner__preferences__favorite_character1=parameters['favorite_character'])
| Q(owner__preferences__favorite_character2=parameters['favorite_character'])
| Q(owner__preferences__favorite_character3=parameters['favorite_character'])
)
if 'starter_id' in parameters and parameters['starter_id']:
queryset = queryset.filter(starter_id=parameters['starter_id'])
if 'center_type' in parameters and parameters['center_type']:
queryset = queryset.filter(center__card__idol__i_type=parameters['center_type'])
if 'center_rarity' in parameters and parameters['center_rarity']:
queryset = queryset.filter(center__card__i_rarity=parameters['center_rarity'])
if 'accept_friend_requests' in parameters and parameters['accept_friend_requests']:
if parameters['accept_friend_requests'] == '2':
queryset = queryset.filter(accept_friend_requests=True)
elif parameters['accept_friend_requests'] == '3':
queryset = queryset.filter(accept_friend_requests=False)
if 'ordering' in parameters:
if parameters['ordering'] == 'level':
queryset = queryset.exclude(level=0).exclude(level=None)
if parameters['ordering'] == 'start_date':
queryset = queryset.exclude(start_date=None)
return queryset
############################################################
# Events
def filterEvents(queryset, parameters, request):
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(name__icontains=term)
| Q(translated_name__icontains=term)
)
if 'i_kind' in parameters and parameters['i_kind']:
queryset = queryset.filter(i_kind=parameters['i_kind'])
if 'idol' in parameters and parameters['idol']:
queryset = queryset.filter(cards__idol=parameters['idol'])
return queryset
############################################################
# Owned Cards
def filterFavoriteCards(queryset, parameters, request):
if 'owner' in parameters:
queryset = queryset.filter(owner_id=parameters['owner'])
else:
raise PermissionDenied()
return queryset
############################################################
# Owned Cards
def filterOwnedCards(queryset, parameters, request):
if 'account' in parameters:
queryset = queryset.filter(account_id=parameters['account'])
elif 'ids' in parameters and parameters['ids']:
queryset = queryset.filter(id__in=parameters['ids'].split(','))
else:
raise PermissionDenied()
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(card__title__icontains=term)
| Q(card__idol__name__icontains=term)
)
if 'i_rarity' in parameters and parameters['i_rarity']:
queryset = queryset.filter(card__i_rarity=parameters['i_rarity'])
if 'is_event' in parameters and parameters['is_event']:
if parameters['is_event'] == '2':
queryset = queryset.filter(card__event__isnull=False)
elif parameters['is_event'] == '3':
queryset = queryset.filter(card__event__isnull=True)
if 'type' in parameters and parameters['type']:
queryset = queryset.filter(card__idol__i_type=parameters['type'])
if 'i_skill' in parameters and parameters['i_skill']:
queryset = queryset.filter(card__i_skill=parameters['i_skill'])
return queryset
| apache-2.0 | 6,829,232,682,938,441,000 | 52.741294 | 144 | 0.583873 | false |
virajs/selenium-1 | py/test/selenium/webdriver/common/cookie_tests.py | 28 | 3282 | import calendar
import time
import unittest
import random
import pytest
from selenium.test.selenium.webdriver.common import utils
class CookieTest(unittest.TestCase):
def setUp(self):
self._loadPage("simpleTest")
# Set the cookie to expire in 30 minutes
timestamp = calendar.timegm(time.gmtime()) + (30 * 60)
self.COOKIE_A = {"name": "foo",
"value": "bar",
"path": "/",
"secure": False}
def tearDown(self):
self.driver.delete_all_cookies()
def testAddCookie(self):
self.driver.execute_script("return document.cookie")
self.driver.add_cookie(self.COOKIE_A)
cookie_returned = str(self.driver.execute_script("return document.cookie"))
self.assertTrue(self.COOKIE_A["name"] in cookie_returned)
def testAddingACookieThatExpiredInThePast(self):
if self.driver.name == 'internet explorer':
pytest.skip("Issue needs investigating")
cookie = self.COOKIE_A.copy()
cookie["expiry"] = calendar.timegm(time.gmtime()) - 1
self.driver.add_cookie(cookie)
cookies = self.driver.get_cookies()
self.assertEquals(0, len(cookies))
def testDeleteAllCookie(self):
self.driver.add_cookie(utils.convert_cookie_to_json(self.COOKIE_A))
self.driver.delete_all_cookies()
self.assertFalse(self.driver.get_cookies())
def testDeleteCookie(self):
self.driver.add_cookie(utils.convert_cookie_to_json(self.COOKIE_A))
self.driver.delete_cookie("foo")
self.assertFalse(self.driver.get_cookies())
def testShouldGetCookieByName(self):
key = "key_%d" % int(random.random()*10000000)
self.driver.execute_script("document.cookie = arguments[0] + '=set';", key)
cookie = self.driver.get_cookie(key)
self.assertEquals("set", cookie["value"])
def testGetAllCookies(self):
key1 = "key_%d" % int(random.random()*10000000)
key2 = "key_%d" % int(random.random()*10000000)
cookies = self.driver.get_cookies()
count = len(cookies)
one = {"name" :key1,
"value": "value"}
two = {"name":key2,
"value": "value"}
self.driver.add_cookie(one)
self.driver.add_cookie(two)
self._loadPage("simpleTest")
cookies = self.driver.get_cookies()
self.assertEquals(count + 2, len(cookies))
def testShouldNotDeleteCookiesWithASimilarName(self):
cookieOneName = "fish"
cookie1 = {"name" :cookieOneName,
"value":"cod"}
cookie2 = {"name" :cookieOneName + "x",
"value": "earth"}
self.driver.add_cookie(cookie1)
self.driver.add_cookie(cookie2)
self.driver.delete_cookie(cookieOneName)
cookies = self.driver.get_cookies()
self.assertFalse(cookie1["name"] == cookies[0]["name"], msg=str(cookies))
self.assertEquals(cookie2["name"] , cookies[0]["name"], msg=str(cookies))
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
| apache-2.0 | -5,163,137,454,098,457,000 | 33.547368 | 83 | 0.602681 | false |
theju/safebrowsing-python | safebrowsing/backend.py | 2 | 5534 | import conf
from base import BaseDbObj
class SqliteDbObj(BaseDbObj):
def __init__(self):
try:
import sqlite3 as sqlite
except ImportError:
from pysqlite2 import dbapi2 as sqlite
self.connection = sqlite.connect(self.db_name)
self.cursor = self.connection.cursor()
def get_version(self, badware_type):
self.cursor.execute("select * from %s_version;" %(badware_type))
row = self.cursor.fetchall()
if not row:
return None
return row[0][0]
def insert_version_row(self, badware_type, version_number):
self.cursor.execute("INSERT INTO %s_version (version_number) VALUES "
"('%s');" %(badware_type, version_number))
def update_version_row(self, badware_type, new_version_number, version_number):
self.cursor.execute("UPDATE %s_version SET version_number='%s' WHERE "
"version_number='%s';" %(badware_type, new_version_number,
version_number))
def insert_rows(self, url_hash_dict):
for (url_hash, badware_code) in url_hash_dict.items():
self.cursor.execute("INSERT INTO url_hashes_table (badware_type,url_hash) "
"VALUES ('%s','%s');" %(badware_code, url_hash))
self.connection.commit()
self.connection.close()
def delete_rows(self, url_hash_dict):
for (url_hash, badware_code) in url_hash_dict.items():
self.cursor.execute("DELETE FROM url_hashes_table WHERE badware_type='%s' "
"AND url_hash='%s';" %(badware_code, url_hash))
def lookup_by_md5(self, md5_hash_list):
for md5_hash in md5_hash_list:
self.cursor.execute("SELECT * FROM url_hashes_table WHERE url_hash='%s';" %(md5_hash))
row = self.cursor.fetchall()
if not row:
continue
# If row is non-empty then the URL is in
# database and stop operation by returning 1
return row[0][0]
class MySqlDbObj(SqliteDbObj):
def __init__(self):
try:
import MySQLDb
except ImportError:
raise Exception("Python Db library (MySQLDb) not found.")
kwargs = {}
if self.db_user:
kwargs['user'] = self.db_user
if self.db_name:
kwargs['Db'] = self.db_name
if self.db_password:
kwargs['passwd'] = self.db_password
if self.db_host.startswith('/'):
kwargs['unix_socket'] = self.db_host
elif self.db_host:
kwargs['host'] = self.db_host
if self.db_port:
kwargs['port'] = int(self.db_port)
self.connection = MySQLDb.connect(**kwargs)
self.cursor = self.connection.cursor()
class PostgresqlDbObj(SqliteDbObj):
def __init__(self):
try:
import psycopg2 as Database
except ImportError:
try:
import psycopg as Database
except ImportError:
raise Exception("Libraries psycopg2/psycopg not found.")
conn_string = ""
if not self.db_name:
raise Exception("Database name not specified.")
conn_string += "dbname=%s" %self.db_name
if self.db_user:
conn_string += " user=%s %s" %(self.db_user, conn_string)
if self.db_password:
conn_string += " password='%s'" %self.db_password
if self.db_host:
conn_string += " host=%s" %self.db_host
if self.db_port:
conn_string += " port=%s" % self.db_port
self.connection = Database.connect(conn_string)
self.cursor = self.connection.cursor()
class MemcachedDbObj(BaseDbObj):
def __init__(self):
try:
import memcache
except ImportError:
raise Exception("Could not find the memcached module.")
if isinstance(self.db_host, (str, unicode)):
self.db_host = [self.db_host,]
if isinstance(self.db_port, (int, str, unicode)):
self.db_port = [self.db_port, ]
servers = ["%s:%s" %(ii[0], ii[1]) for ii in zip(self.db_host, self.db_port)]
self.client = memcache.Client(servers)
def get_version(self, badware_type):
return self.client.get("%s_version" %(badware_type))
def insert_version_row(self, badware_type, version_number):
self.client.set("%s_version" %badware_type, version_number)
def update_version_row(self, badware_type, new_version_number, version_number):
self.client.set("%s_version" %badware_type, version_number)
def insert_rows(self, url_hash_dict):
self.client.set_multi(url_hash_dict)
def delete_rows(self, url_hash_dict):
self.client.delete_multi(url_hash_dict.keys())
def lookup_by_md5(self, md5_hash_list):
hash_row = self.client.get_multi(md5_hash_list)
if not hash_row:
return None
return hash_row.values()[0]
DB_BACKENDS = {'sqlite3' : SqliteDbObj,
'mysql' : MySqlDbObj,
'postgresql' : PostgresqlDbObj,
'memcached' : MemcachedDbObj,}
class DbObj(object):
def __init__(self):
backend = getattr(conf, 'DATABASE_ENGINE')
if not backend in DB_BACKENDS:
raise Exception("The DATABASE_ENGINE is not among the supported backends.")
self.backend = DB_BACKENDS[backend]()
| mit | 8,084,922,264,472,806,000 | 36.646259 | 98 | 0.572461 | false |
Changaco/oh-mainline | vendor/packages/mechanize/test/test_pickle.py | 22 | 1042 | import cPickle
import cStringIO as StringIO
import pickle
import mechanize
import mechanize._response
import mechanize._testcase
def pickle_and_unpickle(obj, implementation):
return implementation.loads(implementation.dumps(obj))
def test_pickling(obj, check=lambda unpickled: None):
check(pickle_and_unpickle(obj, cPickle))
check(pickle_and_unpickle(obj, pickle))
class PickleTest(mechanize._testcase.TestCase):
def test_pickle_cookie(self):
cookiejar = mechanize.CookieJar()
url = "http://example.com/"
request = mechanize.Request(url)
response = mechanize._response.test_response(
headers=[("Set-Cookie", "spam=eggs")],
url=url)
[cookie] = cookiejar.make_cookies(response, request)
check_equality = lambda unpickled: self.assertEqual(unpickled, cookie)
test_pickling(cookie, check_equality)
def test_pickle_cookiejar(self):
test_pickling(mechanize.CookieJar())
if __name__ == "__main__":
mechanize._testcase.main()
| agpl-3.0 | 6,419,300,007,856,486,000 | 27.162162 | 78 | 0.6881 | false |
lmorchard/django | tests/null_queries/tests.py | 290 | 2928 | from __future__ import unicode_literals
from django.core.exceptions import FieldError
from django.test import TestCase
from .models import Choice, Inner, OuterA, OuterB, Poll
class NullQueriesTests(TestCase):
def test_none_as_null(self):
"""
Regression test for the use of None as a query value.
None is interpreted as an SQL NULL, but only in __exact and __iexact
queries.
Set up some initial polls and choices
"""
p1 = Poll(question='Why?')
p1.save()
c1 = Choice(poll=p1, choice='Because.')
c1.save()
c2 = Choice(poll=p1, choice='Why Not?')
c2.save()
# Exact query with value None returns nothing ("is NULL" in sql,
# but every 'id' field has a value).
self.assertQuerysetEqual(Choice.objects.filter(choice__exact=None), [])
# The same behavior for iexact query.
self.assertQuerysetEqual(Choice.objects.filter(choice__iexact=None), [])
# Excluding the previous result returns everything.
self.assertQuerysetEqual(
Choice.objects.exclude(choice=None).order_by('id'),
[
'<Choice: Choice: Because. in poll Q: Why? >',
'<Choice: Choice: Why Not? in poll Q: Why? >'
]
)
# Valid query, but fails because foo isn't a keyword
self.assertRaises(FieldError, Choice.objects.filter, foo__exact=None)
# Can't use None on anything other than __exact and __iexact
self.assertRaises(ValueError, Choice.objects.filter, id__gt=None)
# Related managers use __exact=None implicitly if the object hasn't been saved.
p2 = Poll(question="How?")
self.assertEqual(repr(p2.choice_set.all()), '[]')
def test_reverse_relations(self):
"""
Querying across reverse relations and then another relation should
insert outer joins correctly so as not to exclude results.
"""
obj = OuterA.objects.create()
self.assertQuerysetEqual(
OuterA.objects.filter(inner__third=None),
['<OuterA: OuterA object>']
)
self.assertQuerysetEqual(
OuterA.objects.filter(inner__third__data=None),
['<OuterA: OuterA object>']
)
Inner.objects.create(first=obj)
self.assertQuerysetEqual(
Inner.objects.filter(first__inner__third=None),
['<Inner: Inner object>']
)
# Ticket #13815: check if <reverse>_isnull=False does not produce
# faulty empty lists
OuterB.objects.create(data="reverse")
self.assertQuerysetEqual(
OuterB.objects.filter(inner__isnull=False),
[]
)
Inner.objects.create(first=obj)
self.assertQuerysetEqual(
OuterB.objects.exclude(inner__isnull=False),
['<OuterB: OuterB object>']
)
| bsd-3-clause | -5,441,647,390,344,643,000 | 33.857143 | 87 | 0.601434 | false |
michael-yin/scrapy | scrapy/core/scraper.py | 6 | 8972 | """This module implements the Scraper component which parses responses and
extracts information from them"""
from collections import deque
from twisted.python.failure import Failure
from twisted.internet import defer
from scrapy.utils.defer import defer_result, defer_succeed, parallel, iter_errback
from scrapy.utils.spider import iterate_spider_output
from scrapy.utils.misc import load_object
from scrapy.exceptions import CloseSpider, DropItem, IgnoreRequest
from scrapy import signals
from scrapy.http import Request, Response
from scrapy.item import BaseItem
from scrapy.core.spidermw import SpiderMiddlewareManager
from scrapy import log
class Slot(object):
"""Scraper slot (one per running spider)"""
MIN_RESPONSE_SIZE = 1024
def __init__(self, max_active_size=5000000):
self.max_active_size = max_active_size
self.queue = deque()
self.active = set()
self.active_size = 0
self.itemproc_size = 0
self.closing = None
def add_response_request(self, response, request):
deferred = defer.Deferred()
self.queue.append((response, request, deferred))
if isinstance(response, Response):
self.active_size += max(len(response.body), self.MIN_RESPONSE_SIZE)
else:
self.active_size += self.MIN_RESPONSE_SIZE
return deferred
def next_response_request_deferred(self):
response, request, deferred = self.queue.popleft()
self.active.add(request)
return response, request, deferred
def finish_response(self, response, request):
self.active.remove(request)
if isinstance(response, Response):
self.active_size -= max(len(response.body), self.MIN_RESPONSE_SIZE)
else:
self.active_size -= self.MIN_RESPONSE_SIZE
def is_idle(self):
return not (self.queue or self.active)
def needs_backout(self):
return self.active_size > self.max_active_size
class Scraper(object):
def __init__(self, crawler):
self.slot = None
self.spidermw = SpiderMiddlewareManager.from_crawler(crawler)
itemproc_cls = load_object(crawler.settings['ITEM_PROCESSOR'])
self.itemproc = itemproc_cls.from_crawler(crawler)
self.concurrent_items = crawler.settings.getint('CONCURRENT_ITEMS')
self.crawler = crawler
self.signals = crawler.signals
self.logformatter = crawler.logformatter
@defer.inlineCallbacks
def open_spider(self, spider):
"""Open the given spider for scraping and allocate resources for it"""
self.slot = Slot()
yield self.itemproc.open_spider(spider)
def close_spider(self, spider):
"""Close a spider being scraped and release its resources"""
slot = self.slot
slot.closing = defer.Deferred()
slot.closing.addCallback(self.itemproc.close_spider)
self._check_if_closing(spider, slot)
return slot.closing
def is_idle(self):
"""Return True if there isn't any more spiders to process"""
return not self.slot
def _check_if_closing(self, spider, slot):
if slot.closing and slot.is_idle():
slot.closing.callback(spider)
def enqueue_scrape(self, response, request, spider):
slot = self.slot
dfd = slot.add_response_request(response, request)
def finish_scraping(_):
slot.finish_response(response, request)
self._check_if_closing(spider, slot)
self._scrape_next(spider, slot)
return _
dfd.addBoth(finish_scraping)
dfd.addErrback(log.err, 'Scraper bug processing %s' % request, \
spider=spider)
self._scrape_next(spider, slot)
return dfd
def _scrape_next(self, spider, slot):
while slot.queue:
response, request, deferred = slot.next_response_request_deferred()
self._scrape(response, request, spider).chainDeferred(deferred)
def _scrape(self, response, request, spider):
"""Handle the downloaded response or failure trough the spider
callback/errback"""
assert isinstance(response, (Response, Failure))
dfd = self._scrape2(response, request, spider) # returns spiders processed output
dfd.addErrback(self.handle_spider_error, request, response, spider)
dfd.addCallback(self.handle_spider_output, request, response, spider)
return dfd
def _scrape2(self, request_result, request, spider):
"""Handle the diferent cases of request's result been a Response or a
Failure"""
if not isinstance(request_result, Failure):
return self.spidermw.scrape_response(self.call_spider, \
request_result, request, spider)
else:
# FIXME: don't ignore errors in spider middleware
dfd = self.call_spider(request_result, request, spider)
return dfd.addErrback(self._log_download_errors, \
request_result, request, spider)
def call_spider(self, result, request, spider):
result.request = request
dfd = defer_result(result)
dfd.addCallbacks(request.callback or spider.parse, request.errback)
return dfd.addCallback(iterate_spider_output)
def handle_spider_error(self, _failure, request, response, spider):
exc = _failure.value
if isinstance(exc, CloseSpider):
self.crawler.engine.close_spider(spider, exc.reason or 'cancelled')
return
log.err(_failure, "Spider error processing %s" % request, spider=spider)
self.signals.send_catch_log(signal=signals.spider_error, failure=_failure, response=response, \
spider=spider)
self.crawler.stats.inc_value("spider_exceptions/%s" % _failure.value.__class__.__name__, \
spider=spider)
def handle_spider_output(self, result, request, response, spider):
if not result:
return defer_succeed(None)
it = iter_errback(result, self.handle_spider_error, request, response, spider)
dfd = parallel(it, self.concurrent_items,
self._process_spidermw_output, request, response, spider)
return dfd
def _process_spidermw_output(self, output, request, response, spider):
"""Process each Request/Item (given in the output parameter) returned
from the given spider
"""
if isinstance(output, Request):
self.crawler.engine.crawl(request=output, spider=spider)
elif isinstance(output, BaseItem):
self.slot.itemproc_size += 1
dfd = self.itemproc.process_item(output, spider)
dfd.addBoth(self._itemproc_finished, output, response, spider)
return dfd
elif output is None:
pass
else:
typename = type(output).__name__
log.msg(format='Spider must return Request, BaseItem or None, '
'got %(typename)r in %(request)s',
level=log.ERROR, spider=spider, request=request, typename=typename)
def _log_download_errors(self, spider_failure, download_failure, request, spider):
"""Log and silence errors that come from the engine (typically download
errors that got propagated thru here)
"""
if isinstance(download_failure, Failure) \
and not download_failure.check(IgnoreRequest):
if download_failure.frames:
log.err(download_failure, 'Error downloading %s' % request,
spider=spider)
else:
errmsg = download_failure.getErrorMessage()
if errmsg:
log.msg(format='Error downloading %(request)s: %(errmsg)s',
level=log.ERROR, spider=spider, request=request,
errmsg=errmsg)
if spider_failure is not download_failure:
return spider_failure
def _itemproc_finished(self, output, item, response, spider):
"""ItemProcessor finished for the given ``item`` and returned ``output``
"""
self.slot.itemproc_size -= 1
if isinstance(output, Failure):
ex = output.value
if isinstance(ex, DropItem):
logkws = self.logformatter.dropped(item, ex, response, spider)
log.msg(spider=spider, **logkws)
return self.signals.send_catch_log_deferred(signal=signals.item_dropped, \
item=item, spider=spider, exception=output.value)
else:
log.err(output, 'Error processing %s' % item, spider=spider)
else:
logkws = self.logformatter.scraped(output, response, spider)
log.msg(spider=spider, **logkws)
return self.signals.send_catch_log_deferred(signal=signals.item_scraped, \
item=output, response=response, spider=spider)
| bsd-3-clause | 5,703,035,545,897,099,000 | 40.537037 | 103 | 0.636313 | false |
jeremyh/agdc | contrib/agdc_workshop_exercises/ndvi_analysis_stacker-finished.py | 5 | 7285 | '''
Created on 21/02/2013
@author: u76345
'''
import os
import sys
import logging
import re
import numpy
from datetime import datetime, time
from osgeo import gdal
from agdc.stacker import Stacker
from EOtools.utils import log_multiline
from EOtools.stats import temporal_stats
SCALE_FACTOR = 10000
# Set top level standard output
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.INFO)
console_formatter = logging.Formatter('%(message)s')
console_handler.setFormatter(console_formatter)
logger = logging.getLogger(__name__)
if not logger.level:
logger.setLevel(logging.DEBUG) # Default logging level for all modules
logger.addHandler(console_handler)
class NDVIStacker(Stacker):
""" Subclass of Stacker
Used to implement specific functionality to create stacks of derived datasets.
"""
def derive_datasets(self, input_dataset_dict, stack_output_info, tile_type_info):
assert type(input_dataset_dict) == dict, 'input_dataset_dict must be a dict'
log_multiline(logger.debug, input_dataset_dict, 'input_dataset_dict', '\t')
output_dataset_dict = {}
nbar_dataset_info = input_dataset_dict['NBAR'] # Only need NBAR data for NDVI
nbar_dataset_path = nbar_dataset_info['tile_pathname']
# Get a boolean mask from the PQA dataset (use default parameters for mask and dilation)
pqa_mask = self.get_pqa_mask(input_dataset_dict['PQA']['tile_pathname'])
nbar_dataset = gdal.Open(nbar_dataset_path)
assert nbar_dataset, 'Unable to open dataset %s' % nbar_dataset
logger.debug('Opened NBAR dataset %s', nbar_dataset_path)
#no_data_value = nbar_dataset_info['nodata_value']
no_data_value = -32767 # Need a value outside the scaled range -10000 - +10000
output_stack_path = os.path.join(self.output_dir, 'NDVI_pqa_masked.vrt')
output_tile_path = os.path.join(self.output_dir, re.sub('\.\w+$',
'_NDVI%s' % (tile_type_info['file_extension']),
os.path.basename(nbar_dataset_path)
)
)
# Copy metadata for eventual inclusion in stack file output
# This could also be written to the output tile if required
output_dataset_info = dict(nbar_dataset_info)
output_dataset_info['tile_pathname'] = output_tile_path # This is the most important modification - used to find
output_dataset_info['band_name'] = 'NDVI with PQA mask applied'
output_dataset_info['band_tag'] = 'NDVI-PQA'
output_dataset_info['tile_layer'] = 1
# NBAR bands into 2D NumPy arrays.
near_ir_band_data = nbar_dataset.GetRasterBand(4).ReadAsArray() # Near Infrared light
visible_band_data = nbar_dataset.GetRasterBand(3).ReadAsArray() # Red Visible Light
logger.debug('near_ir_band_data = %s', near_ir_band_data)
logger.debug('visible_band_data = %s', visible_band_data)
logger.debug('SCALE_FACTOR = %s', SCALE_FACTOR)
# Calculate NDVI for every element in the array using
# ((NIR - VIS) / (NIR + VIS)) * SCALE_FACTOR
# HINT - Use numpy.true_divide(numerator, denominator) to avoid divide by 0 errors
data_array = numpy.true_divide(near_ir_band_data - visible_band_data, (near_ir_band_data + visible_band_data)) * SCALE_FACTOR
self.apply_pqa_mask(data_array, pqa_mask, no_data_value)
# Create our output file
gdal_driver = gdal.GetDriverByName(tile_type_info['file_format'])
output_dataset = gdal_driver.Create(output_tile_path,
nbar_dataset.RasterXSize, nbar_dataset.RasterYSize,
1, nbar_dataset.GetRasterBand(1).DataType,
tile_type_info['format_options'].split(','))
assert output_dataset, 'Unable to open output dataset %s'% output_dataset
output_dataset.SetGeoTransform(nbar_dataset.GetGeoTransform())
output_dataset.SetProjection(nbar_dataset.GetProjection())
output_band = output_dataset.GetRasterBand(1)
output_band.WriteArray(data_array)
output_band.SetNoDataValue(no_data_value)
output_band.FlushCache()
# This is not strictly necessary - copy metadata to output dataset
output_dataset_metadata = nbar_dataset.GetMetadata()
if output_dataset_metadata:
output_dataset.SetMetadata(output_dataset_metadata)
log_multiline(logger.debug, output_dataset_metadata, 'output_dataset_metadata', '\t')
output_dataset.FlushCache()
logger.info('Finished writing %s', output_tile_path)
output_dataset_dict[output_stack_path] = output_dataset_info
# NDVI dataset processed - return info
return output_dataset_dict
if __name__ == '__main__':
def date2datetime(input_date, time_offset=time.min):
if not input_date:
return None
return datetime.combine(input_date, time_offset)
# Stacker class takes care of command line parameters
ndvi_stacker = NDVIStacker()
if ndvi_stacker.debug:
console_handler.setLevel(logging.DEBUG)
# Check for required command line parameters
assert (ndvi_stacker.x_index and ndvi_stacker.y_index), 'You must specify Tile X/Y-index (-x/-y or --x_index/--y_index)'
assert ndvi_stacker.output_dir, 'Output directory not specified (-o or --output)'
# Create derived datasets
stack_info_dict = ndvi_stacker.stack_derived(x_index=ndvi_stacker.x_index,
y_index=ndvi_stacker.y_index,
stack_output_dir=ndvi_stacker.output_dir,
start_datetime=date2datetime(ndvi_stacker.start_date, time.min),
end_datetime=date2datetime(ndvi_stacker.end_date, time.max),
satellite=ndvi_stacker.satellite,
sensor=ndvi_stacker.sensor)
log_multiline(logger.debug, stack_info_dict, 'stack_info_dict', '\t')
logger.info('Finished creating %d temporal stack files in %s.', len(stack_info_dict), ndvi_stacker.output_dir)
# Create statistics on derived datasets
logger.info('Beginning creation of statistics')
for vrt_stack_path in stack_info_dict:
# Find a place to write the stats
stats_dataset_path = vrt_stack_path.replace('.vrt', '_stats_envi')
# Calculate and write the stats
temporal_stats_numexpr_module.main(vrt_stack_path, stats_dataset_path,
noData=stack_info_dict[vrt_stack_path][0]['nodata_value'],
provenance=True)
logger.info('Finished creating stats file %s', stats_dataset_path)
| bsd-3-clause | -5,477,643,518,214,314,000 | 45.698718 | 133 | 0.607275 | false |
ehazlett/sensu-py | examples/mail.py | 1 | 2439 | #!/usr/bin/env python
import sys
import smtplib
from optparse import OptionParser
from email.mime.text import MIMEText
import json
from datetime import datetime
try:
from sensu import Handler
except ImportError:
print('You must have the sensu Python module i.e.: pip install sensu')
sys.exit(1)
class MailHandler(Handler):
def handle(self):
subj = self.settings.get('mail', {}).get('subject', 'Sensu Alert')
to = self.settings.get('mail', {}).get('to', 'root@localhost')
from_addr = self.settings.get('mail', {}).get('from', 'sensu@localhost')
host = self.settings.get('mail', {}).get('host', 'localhost')
port = self.settings.get('mail', {}).get('port', 25)
user = self.settings.get('mail', {}).get('user', None)
password = self.settings.get('mail', {}).get('password', None)
self.send(subj, to, from_addr, host, port, user, password)
def send(self, subj=None, to_addr=None, from_addr=None, host='localhost',
port=25, user=None, password=None):
# attempt to parse sensu message
try:
data = self.event
client_host = data.get('client', {}).get('name')
check_name = data.get('check', {}).get('name')
check_action = data.get('action')
timestamp = data.get('check', {}).get('issued')
check_date = datetime.fromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
parts = (
'Date: {0}'.format(check_date),
'Host: {0}'.format(client_host),
'Address: {0}'.format(data.get('client', {}).get('address')),
'Action: {0}'.format(check_action),
'Name: {0}'.format(check_name),
'Command: {0}'.format(data.get('check', {}).get('command')),
'Output: {0}'.format(data.get('check', {}).get('output')),
)
text = '\n'.join(parts)
subj = '{0} [{1}: {2} ({3})]'.format(subj, client_host, check_name, check_action)
except Exception, e:
text = str(e)
msg = MIMEText(text)
msg['Subject'] = subj
msg['To'] = to_addr
msg['From'] = from_addr
s = smtplib.SMTP(host, int(port))
if user:
s.login(user, password)
s.sendmail(from_addr, [to_addr], msg.as_string())
s.quit()
if __name__=='__main__':
m = MailHandler()
sys.exit(0)
| mit | 4,495,367,844,926,648,300 | 38.983607 | 93 | 0.547765 | false |
stingaci/heat-tutorial | partVI/lib/elements/heat-config-salt/install.d/hook-salt.py | 4 | 3759 | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import os
import sys
import salt.cli.caller
import salt.config
from salt import exceptions
import yaml
WORKING_DIR = os.environ.get('HEAT_SALT_WORKING',
'/var/lib/heat-config/heat-config-salt')
SALT_MINION_CONFIG = os.environ.get('SALT_MINION_CONFIG',
'/etc/salt/minion')
def prepare_dir(path):
if not os.path.isdir(path):
os.makedirs(path, 0o700)
def main(argv=sys.argv):
log = logging.getLogger('heat-config')
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(
logging.Formatter(
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
log.addHandler(handler)
log.setLevel('DEBUG')
prepare_dir(WORKING_DIR)
os.chdir(WORKING_DIR)
c = json.load(sys.stdin)
opts = salt.config.minion_config(SALT_MINION_CONFIG)
opts['file_roots'] = {'base': [WORKING_DIR]}
opts['file_client'] = 'local'
opts['local'] = 'local'
opts['fun'] = 'state.sls'
opts['arg'] = [c['id']]
for input in c['inputs']:
key = input['name']
opts[key] = input.get('value', '')
state_file = '%s.sls' % c['id']
config = c.get('config', '')
if isinstance(config, dict):
yaml_config = yaml.safe_dump(config, default_flow_style=False)
else:
yaml_config = config
fn = os.path.join(WORKING_DIR, state_file)
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY, 0o700), 'w') as f:
f.write(yaml_config.encode('utf-8'))
caller = salt.cli.caller.Caller.factory(opts)
log.debug('Applying Salt state %s' % state_file)
stdout, stderr = None, None
ret = {}
try:
ret = caller.call()
except exceptions.SaltInvocationError as err:
log.error(
'Salt invocation error while applying Salt sate %s' % state_file)
stderr = err
if ret:
log.info('Results: %s' % ret)
output = yaml.safe_dump(ret['return'])
# returncode of 0 means there were successful changes
if ret['retcode'] == 0:
log.info('Completed applying salt state %s' % state_file)
stdout = output
else:
# Salt doesn't always return sane return codes so we have to check
# individual results
runfailed = False
for state, data in ret['return'].items():
if not data['result']:
runfailed = True
break
if runfailed:
log.error('Error applying Salt state %s. [%s]\n'
% (state_file, ret['retcode']))
stderr = output
else:
ret['retcode'] = 0
stdout = output
response = {}
for output in c.get('outputs', []):
output_name = output['name']
response[output_name] = ret.get(output_name)
response.update({
'deploy_stdout': stdout,
'deploy_stderr': stderr,
'deploy_status_code': ret['retcode'],
})
json.dump(response, sys.stdout)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 | -4,892,159,450,856,662,000 | 28.367188 | 78 | 0.58606 | false |
veger/ansible | lib/ansible/modules/network/f5/bigip_wait.py | 21 | 11508 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_wait
short_description: Wait for a BIG-IP condition before continuing
description:
- You can wait for BIG-IP to be "ready". By "ready", we mean that BIG-IP is ready
to accept configuration.
- This module can take into account situations where the device is in the middle
of rebooting due to a configuration change.
version_added: 2.5
options:
timeout:
description:
- Maximum number of seconds to wait for.
- When used without other conditions it is equivalent of just sleeping.
- The default timeout is deliberately set to 2 hours because no individual
REST API.
default: 7200
delay:
description:
- Number of seconds to wait before starting to poll.
default: 0
sleep:
default: 1
description:
- Number of seconds to sleep between checks, before 2.3 this was hardcoded to 1 second.
msg:
description:
- This overrides the normal error message from a failure to meet the required conditions.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Wait for BIG-IP to be ready to take configuration
bigip_wait:
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Wait a maximum of 300 seconds for BIG-IP to be ready to take configuration
bigip_wait:
timeout: 300
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Wait for BIG-IP to be ready, don't start checking for 10 seconds
bigip_wait:
delay: 10
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
# only common fields returned
'''
import datetime
import signal
import time
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
def hard_timeout(module, want, start):
elapsed = datetime.datetime.utcnow() - start
module.fail_json(
msg=want.msg or "Timeout when waiting for BIG-IP", elapsed=elapsed.seconds
)
class Parameters(AnsibleF5Parameters):
returnables = [
'elapsed'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
@property
def delay(self):
if self._values['delay'] is None:
return None
return int(self._values['delay'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def sleep(self):
if self._values['sleep'] is None:
return None
return int(self._values['sleep'])
class Changes(Parameters):
pass
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Parameters()
def exec_module(self):
result = dict()
changed = self.execute()
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _get_client_connection(self):
return F5RestClient(**self.module.params)
def execute(self):
signal.signal(
signal.SIGALRM,
lambda sig, frame: hard_timeout(self.module, self.want, start)
)
# setup handler before scheduling signal, to eliminate a race
signal.alarm(int(self.want.timeout))
start = datetime.datetime.utcnow()
if self.want.delay:
time.sleep(float(self.want.delay))
end = start + datetime.timedelta(seconds=int(self.want.timeout))
while datetime.datetime.utcnow() < end:
time.sleep(int(self.want.sleep))
try:
# The first test verifies that the REST API is available; this is done
# by repeatedly trying to login to it.
self.client = self._get_client_connection()
if not self.client:
continue
if self._device_is_rebooting():
# Wait for the reboot to happen and then start from the beginning
# of the waiting.
continue
if self._is_mprov_running_on_device():
self._wait_for_module_provisioning()
break
except Exception as ex:
if 'Failed to validate the SSL' in str(ex):
raise F5ModuleError(str(ex))
# The types of exception's we're handling here are "REST API is not
# ready" exceptions.
#
# For example,
#
# Typically caused by device starting up:
#
# icontrol.exceptions.iControlUnexpectedHTTPError: 404 Unexpected Error:
# Not Found for uri: https://localhost:10443/mgmt/tm/sys/
# icontrol.exceptions.iControlUnexpectedHTTPError: 503 Unexpected Error:
# Service Temporarily Unavailable for uri: https://localhost:10443/mgmt/tm/sys/
#
#
# Typically caused by a device being down
#
# requests.exceptions.SSLError: HTTPSConnectionPool(host='localhost', port=10443):
# Max retries exceeded with url: /mgmt/tm/sys/ (Caused by SSLError(
# SSLError("bad handshake: SysCallError(-1, 'Unexpected EOF')",),))
#
#
# Typically caused by device still booting
#
# raise SSLError(e, request=request)\nrequests.exceptions.SSLError:
# HTTPSConnectionPool(host='localhost', port=10443): Max retries
# exceeded with url: /mgmt/shared/authn/login (Caused by
# SSLError(SSLError(\"bad handshake: SysCallError(-1, 'Unexpected EOF')\",),)),
continue
else:
elapsed = datetime.datetime.utcnow() - start
self.module.fail_json(
msg=self.want.msg or "Timeout when waiting for BIG-IP", elapsed=elapsed.seconds
)
elapsed = datetime.datetime.utcnow() - start
self.changes.update({'elapsed': elapsed.seconds})
return False
def _device_is_rebooting(self):
params = {
"command": "run",
"utilCmdArgs": '-c "runlevel"'
}
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if 'commandResult' in response and '6' in response['commandResult']:
return True
return False
def _wait_for_module_provisioning(self):
# To prevent things from running forever, the hack is to check
# for mprov's status twice. If mprov is finished, then in most
# cases (not ASM) the provisioning is probably ready.
nops = 0
# Sleep a little to let provisioning settle and begin properly
time.sleep(5)
while nops < 4:
try:
if not self._is_mprov_running_on_device():
nops += 1
else:
nops = 0
except Exception as ex:
# This can be caused by restjavad restarting.
pass
time.sleep(10)
def _is_mprov_running_on_device(self):
params = {
"command": "run",
"utilCmdArgs": '-c "ps aux | grep \'[m]prov\'"'
}
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if 'commandResult' in response:
return True
return False
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
timeout=dict(default=7200, type='int'),
delay=dict(default=0, type='int'),
sleep=dict(default=1, type='int'),
msg=dict()
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
exit_json(module, results, client)
except F5ModuleError as ex:
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,129,279,440,215,894,000 | 32.068966 | 101 | 0.592718 | false |
creative-workflow/pi-setup | services/webiopi/src/python/webiopi/__init__.py | 5 | 1082 | # Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import sleep
from webiopi.utils.version import BOARD_REVISION, VERSION
from webiopi.utils.logger import setInfo, setDebug, info, debug, warn, error, exception
from webiopi.utils.thread import runLoop
from webiopi.server import Server
from webiopi.devices.instance import deviceInstance
from webiopi.decorators.rest import macro
from webiopi.devices import bus as _bus
try:
import _webiopi.GPIO as GPIO
except:
pass
setInfo()
_bus.checkAllBus()
| mit | 8,976,172,486,218,438,000 | 29.914286 | 87 | 0.760628 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.