repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
laichunpongben/machine_learning | linear_regression_csv.py | 1 | 1550 | import csv
import numpy as np
class TestCase:
def __init__(self):
dataset_file = "dataset.csv"
self.col_count = 47
self.training_data_count = 10000
self.training_set = np.loadtxt(open(dataset_file,"rb"),delimiter=",",skiprows=1)
self.test_set = np.loadtxt(open(dataset_file,"rb"),delimiter=",",skiprows=self.training_data_count+1)
self.X = self.training_set[:self.training_data_count,1:self.col_count-1]
self.y = self.training_set[:self.training_data_count,self.col_count-1]
self.X_pinv = np.linalg.pinv(self.X)
self.w = np.dot(self.X_pinv, self.y)
self.header = next(csv.reader(open(dataset_file)))[1:self.col_count-1]
def g(self, x):
return np.dot(x, self.w)
def estimate_training_error(self):
error = 0
for i in range(len(self.y)):
error += (self.y[i] - self.g(self.X[i]))**2
return (error / len(self.y))**0.5
def estimate_test_error(self):
error = 0
X = self.test_set[:,1:self.col_count-1]
y = self.test_set[:,self.col_count-1]
for i in range(len(y)):
error += (y[i] - self.g(X[i]))**2
return (error / len(y))**0.5
test_case = TestCase()
result = [h + ': ' + '%.4f' % w for h,w in zip(test_case.header, test_case.w)]
print('Result hypothesis weights:')
for x in result: print(x)
print('-'*60)
print('Training error: ' + '%.4f' % test_case.estimate_training_error())
print('Test error: ' + '%.4f' % test_case.estimate_test_error())
| apache-2.0 | 4,191,865,069,148,466,000 | 35.904762 | 109 | 0.585806 | false |
who-emro/meerkat_api | meerkat_api/resources/epi_week.py | 1 | 1398 | """
Resource to deal with epi-weeks
"""
import datetime
from dateutil.parser import parse
from flask import jsonify
from flask_restful import Resource
from meerkat_api.extensions import api
import meerkat_abacus.util.epi_week as epi_week_util
class EpiWeek(Resource):
"""
Get epi week of date(defaults to today)
Args:\n
date: date to get epi-week\n
Returns:\n
epi-week: epi-week\n
"""
def get(self, date=None):
if date:
date = parse(date)
else:
date = datetime.datetime.today()
_epi_year, _epi_week_number = epi_week_util.epi_week_for_date(date)
_epi_year_start_day_weekday = epi_week_util.epi_year_start_date(date).weekday()
return jsonify(epi_week=_epi_week_number,
year=_epi_year,
offset=_epi_year_start_day_weekday)
class EpiWeekStart(Resource):
"""
Return the start date of an epi week in the given year
Args:\n
epi-week: epi week\n
year: year\n
Returns:
start-date: start-date\n
"""
def get(self, year, epi_week):
_epi_week_start_date = epi_week_util.epi_week_start_date(year, epi_week)
return jsonify(start_date=_epi_week_start_date)
api.add_resource(EpiWeek, "/epi_week", "/epi_week/<date>")
api.add_resource(EpiWeekStart, "/epi_week_start/<year>/<epi_week>")
| mit | 5,886,391,900,665,190,000 | 25.377358 | 87 | 0.620887 | false |
aftersight/After-Sight-Model-1 | keyPress.py | 1 | 13557 | class _Getch:
"""Gets a single character from standard input. Does not echo to the
screen. From http://code.activestate.com/recipes/134892/"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except(AttributeError, ImportError):
self.impl = _GetchUnix()
def __call__(self): return self.impl()
class _GetchUnix:
def __init__(self):
import tty, sys, termios # import termios now or else you'll get the Unix version on the Mac
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon:
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt #see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0]==0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
import threading
# From http://stackoverflow.com/a/2022629/2924421
class Event(list):
def __call__(self, *args, **kwargs):
for f in self:
f(*args, **kwargs)
def __repr__(self):
return "Event(%s)" % list.__repr__(self)
def getKey():
inkey = _Getch()
import sys
for i in xrange(sys.maxint):
k=inkey()
if k<>'':break
return k
class KeyCallbackFunction():
callbackParam = None
actualFunction = None
def __init__(self, actualFunction, callbackParam):
self.actualFunction = actualFunction
self.callbackParam = callbackParam
def doCallback(self, inputKey):
if not self.actualFunction is None:
if self.callbackParam is None:
callbackFunctionThread = threading.Thread(target=self.actualFunction, args=(inputKey,))
else:
callbackFunctionThread = threading.Thread(target=self.actualFunction, args=(inputKey,self.callbackParam))
callbackFunctionThread.daemon = True
callbackFunctionThread.start()
class KeyCapture():
gotKeyLock = threading.Lock()
gotKeys = []
gotKeyEvent = threading.Event()
keyBlockingSetKeyLock = threading.Lock()
addingEventsLock = threading.Lock()
keyReceiveEvents = Event()
keysGotLock = threading.Lock()
keysGot = []
keyBlockingKeyLockLossy = threading.Lock()
keyBlockingKeyLossy = None
keyBlockingEventLossy = threading.Event()
keysBlockingGotLock = threading.Lock()
keysBlockingGot = []
keyBlockingGotEvent = threading.Event()
wantToStopLock = threading.Lock()
wantToStop = False
stoppedLock = threading.Lock()
stopped = True
isRunningEvent = False
getKeyThread = None
keyFunction = None
keyArgs = None
# Begin capturing keys. A seperate thread is launched that
# captures key presses, and then these can be received via get,
# getAsync, and adding an event via addEvent. Note that this
# will prevent the system to accept keys as normal (say, if
# you are in a python shell) because it overrides that key
# capturing behavior.
# If you start capture when it's already been started, a
# InterruptedError("Keys are still being captured")
# will be thrown
# Note that get(), getAsync() and events are independent, so if a key is pressed:
#
# 1: Any calls to get() that are waiting, with lossy on, will return
# that key
# 2: It will be stored in the queue of get keys, so that get() with lossy
# off will return the oldest key pressed not returned by get() yet.
# 3: All events will be fired with that key as their input
# 4: It will be stored in the list of getAsync() keys, where that list
# will be returned and set to empty list on the next call to getAsync().
# get() call with it, aand add it to the getAsync() list.
def startCapture(self, keyFunction=None, args=None):
# Make sure we aren't already capturing keys
self.stoppedLock.acquire()
if not self.stopped:
self.stoppedLock.release()
raise InterruptedError("Keys are still being captured")
return
self.stopped = False
self.stoppedLock.release()
# If we have captured before, we need to allow the get() calls to actually
# wait for key presses now by clearing the event
if self.keyBlockingEventLossy.is_set():
self.keyBlockingEventLossy.clear()
# Have one function that we call every time a key is captured, intended for stopping capture
# as desired
self.keyFunction = keyFunction
self.keyArgs = args
# Begin capturing keys (in a seperate thread)
self.getKeyThread = threading.Thread(target=self._threadProcessKeyPresses)
self.getKeyThread.daemon = True
self.getKeyThread.start()
# Process key captures (in a seperate thread)
self.getKeyThread = threading.Thread(target=self._threadStoreKeyPresses)
self.getKeyThread.daemon = True
self.getKeyThread.start()
def capturing(self):
self.stoppedLock.acquire()
isCapturing = not self.stopped
self.stoppedLock.release()
return isCapturing
# Stops the thread that is capturing keys on the first opporunity
# has to do so. It usually can't stop immediately because getting a key
# is a blocking process, so this will probably stop capturing after the
# next key is pressed.
#
# However, Sometimes if you call stopCapture it will stop before starting capturing the
# next key, due to multithreading race conditions. So if you want to stop capturing
# reliably, call stopCapture in a function added via addEvent. Then you are
# guaranteed that capturing will stop immediately after the rest of the callback
# functions are called (before starting to capture the next key).
def stopCapture(self):
self.wantToStopLock.acquire()
self.wantToStop = True
self.wantToStopLock.release()
# Takes in a function that will be called every time a key is pressed (with that
# key passed in as the first paramater in that function)
def addEvent(self, keyPressEventFunction, args=None):
self.addingEventsLock.acquire()
callbackHolder = KeyCallbackFunction(keyPressEventFunction, args)
self.keyReceiveEvents.append(callbackHolder.doCallback)
self.addingEventsLock.release()
def clearEvents(self):
self.addingEventsLock.acquire()
self.keyReceiveEvents = Event()
self.addingEventsLock.release()
# Gets a key captured by this KeyCapture, blocking until a key is pressed.
# There is an optional lossy paramater:
# If True all keys before this call are ignored, and the next pressed key
# will be returned.
# If False this will return the oldest key captured that hasn't
# been returned by get yet. False is the default.
def get(self, lossy=False):
if lossy:
# Wait for the next key to be pressed
self.keyBlockingEventLossy.wait()
self.keyBlockingKeyLockLossy.acquire()
keyReceived = self.keyBlockingKeyLossy
self.keyBlockingKeyLockLossy.release()
return keyReceived
else:
while True:
# Wait until a key is pressed
self.keyBlockingGotEvent.wait()
# Get the key pressed
readKey = None
self.keysBlockingGotLock.acquire()
# Get a key if it exists
if len(self.keysBlockingGot) != 0:
readKey = self.keysBlockingGot.pop(0)
# If we got the last one, tell us to wait
if len(self.keysBlockingGot) == 0:
self.keyBlockingGotEvent.clear()
self.keysBlockingGotLock.release()
# Process the key (if it actually exists)
if not readKey is None:
return readKey
# Exit if we are stopping
self.wantToStopLock.acquire()
if self.wantToStop:
self.wantToStopLock.release()
return None
self.wantToStopLock.release()
def clearGetList(self):
self.keysBlockingGotLock.acquire()
self.keysBlockingGot = []
self.keysBlockingGotLock.release()
# Gets a list of all keys pressed since the last call to getAsync, in order
# from first pressed, second pressed, .., most recent pressed
def getAsync(self):
self.keysGotLock.acquire();
keysPressedList = list(self.keysGot)
self.keysGot = []
self.keysGotLock.release()
return keysPressedList
def clearAsyncList(self):
self.keysGotLock.acquire();
self.keysGot = []
self.keysGotLock.release();
def _processKey(self, readKey):
# Append to list for GetKeyAsync
self.keysGotLock.acquire()
self.keysGot.append(readKey)
self.keysGotLock.release()
# Call lossy blocking key events
self.keyBlockingKeyLockLossy.acquire()
self.keyBlockingKeyLossy = readKey
self.keyBlockingEventLossy.set()
self.keyBlockingEventLossy.clear()
self.keyBlockingKeyLockLossy.release()
# Call non-lossy blocking key events
self.keysBlockingGotLock.acquire()
self.keysBlockingGot.append(readKey)
if len(self.keysBlockingGot) == 1:
self.keyBlockingGotEvent.set()
self.keysBlockingGotLock.release()
# Call events added by AddEvent
self.addingEventsLock.acquire()
self.keyReceiveEvents(readKey)
self.addingEventsLock.release()
def _threadProcessKeyPresses(self):
while True:
# Wait until a key is pressed
self.gotKeyEvent.wait()
# Get the key pressed
readKey = None
self.gotKeyLock.acquire()
# Get a key if it exists
if len(self.gotKeys) != 0:
readKey = self.gotKeys.pop(0)
# If we got the last one, tell us to wait
if len(self.gotKeys) == 0:
self.gotKeyEvent.clear()
self.gotKeyLock.release()
# Process the key (if it actually exists)
if not readKey is None:
self._processKey(readKey)
# Exit if we are stopping
self.wantToStopLock.acquire()
if self.wantToStop:
self.wantToStopLock.release()
break
self.wantToStopLock.release()
def _threadStoreKeyPresses(self):
while True:
# Get a key
readKey = getKey()
# Run the potential shut down function
if not self.keyFunction is None:
self.keyFunction(readKey, self.keyArgs)
# Add the key to the list of pressed keys
self.gotKeyLock.acquire()
self.gotKeys.append(readKey)
if len(self.gotKeys) == 1:
self.gotKeyEvent.set()
self.gotKeyLock.release()
# Exit if we are stopping
self.wantToStopLock.acquire()
if self.wantToStop:
self.wantToStopLock.release()
self.gotKeyEvent.set()
break
self.wantToStopLock.release()
# If we have reached here we stopped capturing
# All we need to do to clean up is ensure that
# all the calls to .get() now return None.
# To ensure no calls are stuck never returning,
# we will leave the event set so any tasks waiting
# for it immediately exit. This will be unset upon
# starting key capturing again.
self.stoppedLock.acquire()
# We also need to set this to True so we can start up
# capturing again.
self.stopped = True
self.stopped = True
self.keyBlockingKeyLockLossy.acquire()
self.keyBlockingKeyLossy = None
self.keyBlockingEventLossy.set()
self.keyBlockingKeyLockLossy.release()
self.keysBlockingGotLock.acquire()
self.keyBlockingGotEvent.set()
self.keysBlockingGotLock.release()
self.stoppedLock.release()
| cc0-1.0 | -7,249,865,575,136,879,000 | 33.321519 | 121 | 0.619827 | false |
rpedde/python-carbon-buildpkg | lib/carbon/writer.py | 1 | 5802 | """Copyright 2009 Chris Davis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import os
import time
from os.path import join, exists, dirname, basename
import whisper
from carbon import state
from carbon.cache import MetricCache
from carbon.storage import getFilesystemPath, loadStorageSchemas, loadAggregationSchemas
from carbon.conf import settings
from carbon import log, events, instrumentation
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from twisted.application.service import Service
lastCreateInterval = 0
createCount = 0
schemas = loadStorageSchemas()
agg_schemas = loadAggregationSchemas()
CACHE_SIZE_LOW_WATERMARK = settings.MAX_CACHE_SIZE * 0.95
def optimalWriteOrder():
"Generates metrics with the most cached values first and applies a soft rate limit on new metrics"
global lastCreateInterval
global createCount
metrics = MetricCache.counts()
t = time.time()
metrics.sort(key=lambda item: item[1], reverse=True) # by queue size, descending
log.msg("Sorted %d cache queues in %.6f seconds" % (len(metrics), time.time() - t))
if state.cacheTooFull and MetricCache.size < CACHE_SIZE_LOW_WATERMARK:
events.cacheSpaceAvailable()
for metric, queueSize in metrics:
dbFilePath = getFilesystemPath(metric)
dbFileExists = exists(dbFilePath)
if not dbFileExists:
createCount += 1
now = time.time()
if now - lastCreateInterval >= 60:
lastCreateInterval = now
createCount = 1
elif createCount >= settings.MAX_CREATES_PER_MINUTE:
# dropping queued up datapoints for new metrics prevents filling up the entire cache
# when a bunch of new metrics are received.
try:
MetricCache.pop(metric)
except KeyError:
pass
continue
try: # metrics can momentarily disappear from the MetricCache due to the implementation of MetricCache.store()
datapoints = MetricCache.pop(metric)
except KeyError:
log.msg("MetricCache contention, skipping %s update for now" % metric)
continue # we simply move on to the next metric when this race condition occurs
yield (metric, datapoints, dbFilePath, dbFileExists)
def writeCachedDataPoints():
"Write datapoints until the MetricCache is completely empty"
updates = 0
lastSecond = 0
while MetricCache:
dataWritten = False
for (metric, datapoints, dbFilePath, dbFileExists) in optimalWriteOrder():
dataWritten = True
if not dbFileExists:
archiveConfig = None
xFilesFactor, aggregationMethod = None, None
for schema in schemas:
if schema.matches(metric):
log.creates('new metric %s matched schema %s' % (metric, schema.name))
archiveConfig = [archive.getTuple() for archive in schema.archives]
break
for schema in agg_schemas:
if schema.matches(metric):
log.creates('new metric %s matched aggregation schema %s' % (metric, schema.name))
xFilesFactor, aggregationMethod = schema.archives
break
if not archiveConfig:
raise Exception("No storage schema matched the metric '%s', check your storage-schemas.conf file." % metric)
dbDir = dirname(dbFilePath)
os.system("mkdir -p -m 755 '%s'" % dbDir)
log.creates("creating database file %s (archive=%s xff=%s agg=%s)" %
(dbFilePath, archiveConfig, xFilesFactor, aggregationMethod))
whisper.create(dbFilePath, archiveConfig, xFilesFactor, aggregationMethod)
os.chmod(dbFilePath, 0755)
instrumentation.increment('creates')
try:
t1 = time.time()
whisper.update_many(dbFilePath, datapoints)
t2 = time.time()
updateTime = t2 - t1
except:
log.err()
instrumentation.increment('errors')
else:
pointCount = len(datapoints)
instrumentation.increment('committedPoints', pointCount)
instrumentation.append('updateTimes', updateTime)
if settings.LOG_UPDATES:
log.updates("wrote %d datapoints for %s in %.5f seconds" % (pointCount, metric, updateTime))
# Rate limit update operations
thisSecond = int(t2)
if thisSecond != lastSecond:
lastSecond = thisSecond
updates = 0
else:
updates += 1
if updates >= settings.MAX_UPDATES_PER_SECOND:
time.sleep( int(t2 + 1) - t2 )
# Avoid churning CPU when only new metrics are in the cache
if not dataWritten:
time.sleep(0.1)
def writeForever():
while reactor.running:
try:
writeCachedDataPoints()
except:
log.err()
time.sleep(1) # The writer thread only sleeps when the cache is empty or an error occurs
def reloadStorageSchemas():
global schemas
try:
schemas = loadStorageSchemas()
except:
log.msg("Failed to reload storage schemas")
log.err()
class WriterService(Service):
def __init__(self):
self.reload_task = LoopingCall(reloadStorageSchemas)
def startService(self):
self.reload_task.start(60, False)
reactor.callInThread(writeForever)
Service.startService(self)
def stopService(self):
self.reload_task.stop()
Service.stopService(self)
| apache-2.0 | 4,351,749,073,916,351,000 | 30.362162 | 118 | 0.685453 | false |
trustcircleglobal/tcg-gae | tcg_gae/tests/meta.py | 1 | 2487 | import unittest
class MetaTestCase(type):
def __init__(cls, name, bases, classdict):
super(MetaTestCase, cls).__init__(name, bases, classdict)
cls._auto_create_methods(bases)
class ObjectSchema(object):
def __init__(self, *args, **kwargs):
self.fields = args
self.nested_schemas = kwargs.get('nested_schemas', [])
def _is_object(schema):
def wrapper(self, data):
return self.assert_object(data, schema)
return wrapper
def _is_response_object(schema):
def wrapper(self, res):
return self.assert_response_object(res, schema)
return wrapper
def _is_object_list(schema):
def wrapper(self, data, size):
return self.assert_object_list(data, schema, size)
return wrapper
def _is_response_object_list(schema):
def wrapper(self, res, size):
return self.assert_response_object_list(res, schema, size)
return wrapper
class BaseTestCase(unittest.TestCase):
@classmethod
def _find_object_schemas(cls, bases):
# get schema from object schema fields
cls._schemas = {
name: value
for name, value in cls.__dict__.items()
if isinstance(value, ObjectSchema)
}
# get schema from super classes
for base in bases:
cls._schemas.update(getattr(base, '_schemas', {}))
@classmethod
def _auto_create_is_object_methods(cls):
for name, schema in cls._schemas.items():
setattr(
cls, 'is_%s' % name,
_is_object(schema))
setattr(
cls, '_is_%s' % name,
_is_object(schema))
setattr(
cls, 'is_%s_list' % name,
_is_object_list(schema))
setattr(
cls, '_is_%s_list' % name,
_is_object_list(schema))
setattr(
cls, 'is_response_%s' % name,
_is_response_object(schema))
setattr(
cls, '_is_response_%s' % name,
_is_response_object(schema))
setattr(
cls, 'is_response_%s_list' % name,
_is_response_object_list(schema))
setattr(
cls, '_is_response_%s_list' % name,
_is_response_object_list(schema))
@classmethod
def _auto_create_methods(cls, bases):
cls._find_object_schemas(bases)
cls._auto_create_is_object_methods()
| isc | 8,509,413,952,663,443,000 | 27.261364 | 66 | 0.548854 | false |
shellderp/sublime-robot-plugin | lib/robot/utils/argumentparser.py | 1 | 15983 | # Copyright 2008-2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import getopt # optparse was not supported by Jython 2.2
import os
import re
import sys
import glob
import string
import codecs
import textwrap
from robot.errors import DataError, Information, FrameworkError
from robot.version import get_full_version
from .misc import plural_or_not
from .encoding import decode_output, decode_from_system, utf8open
ESCAPES = dict(
space = ' ', apos = "'", quot = '"', lt = '<', gt = '>',
pipe = '|', star = '*', comma = ',', slash = '/', semic = ';',
colon = ':', quest = '?', hash = '#', amp = '&', dollar = '$',
percent = '%', at = '@', exclam = '!', paren1 = '(', paren2 = ')',
square1 = '[', square2 = ']', curly1 = '{', curly2 = '}', bslash = '\\'
)
class ArgumentParser:
_opt_line_re = re.compile('''
^\s{1,4} # 1-4 spaces in the beginning of the line
((-\S\s)*) # all possible short options incl. spaces (group 1)
--(\S{2,}) # required long option (group 3)
(\s\S+)? # optional value (group 4)
(\s\*)? # optional '*' telling option allowed multiple times (group 5)
''', re.VERBOSE)
def __init__(self, usage, name=None, version=None, arg_limits=None,
validator=None, auto_help=True, auto_version=True,
auto_escape=True, auto_pythonpath=True, auto_argumentfile=True):
"""Available options and tool name are read from the usage.
Tool name is got from the first row of the usage. It is either the
whole row or anything before first ' -- '.
"""
if not usage:
raise FrameworkError('Usage cannot be empty')
self.name = name or usage.splitlines()[0].split(' -- ')[0].strip()
self.version = version or get_full_version()
self._usage = usage
self._arg_limit_validator = ArgLimitValidator(arg_limits)
self._validator = validator
self._auto_help = auto_help
self._auto_version = auto_version
self._auto_escape = auto_escape
self._auto_pythonpath = auto_pythonpath
self._auto_argumentfile = auto_argumentfile
self._short_opts = ''
self._long_opts = []
self._multi_opts = []
self._toggle_opts = []
self._names = []
self._short_to_long = {}
self._expected_args = ()
self._create_options(usage)
def parse_args(self, args_list):
"""Parse given arguments and return options and positional arguments.
Arguments must be given as a list and are typically sys.argv[1:].
Options are retuned as a dictionary where long options are keys. Value
is a string for those options that can be given only one time (if they
are given multiple times the last value is used) or None if the option
is not used at all. Value for options that can be given multiple times
(denoted with '*' in the usage) is a list which contains all the given
values and is empty if options are not used. Options not taken
arguments have value False when they are not set and True otherwise.
Positional arguments are returned as a list in the order they are given.
If 'check_args' is True, this method will automatically check that
correct number of arguments, as parsed from the usage line, are given.
If the last argument in the usage line ends with the character 's',
the maximum number of arguments is infinite.
Possible errors in processing arguments are reported using DataError.
Some options have a special meaning and are handled automatically
if defined in the usage and given from the command line:
--escape option can be used to automatically unescape problematic
characters given in an escaped format.
--argumentfile can be used to automatically read arguments from
a specified file. When --argumentfile is used, the parser always
allows using it multiple times. Adding '*' to denote that is thus
recommend. A special value 'stdin' can be used to read arguments from
stdin instead of a file.
--pythonpath can be used to add extra path(s) to sys.path.
--help and --version automatically generate help and version messages.
Version is generated based on the tool name and version -- see __init__
for information how to set them. Help contains the whole usage given to
__init__. Possible <VERSION> text in the usage is replaced with the
given version. Possible <--ESCAPES--> is replaced with available
escapes so that they are wrapped to multiple lines but take the same
amount of horizontal space as <---ESCAPES--->. Both help and version
are wrapped to Information exception.
"""
args_list = [decode_from_system(a) for a in args_list]
if self._auto_argumentfile:
args_list = self._process_possible_argfile(args_list)
opts, args = self._parse_args(args_list)
opts, args = self._handle_special_options(opts, args)
self._arg_limit_validator(args)
if self._validator:
opts, args = self._validator(opts, args)
return opts, args
def _handle_special_options(self, opts, args):
if self._auto_escape and opts.get('escape'):
opts, args = self._unescape_opts_and_args(opts, args)
if self._auto_help and opts.get('help'):
self._raise_help()
if self._auto_version and opts.get('version'):
self._raise_version()
if self._auto_pythonpath and opts.get('pythonpath'):
sys.path = self._get_pythonpath(opts['pythonpath']) + sys.path
for auto, opt in [(self._auto_help, 'help'),
(self._auto_version, 'version'),
(self._auto_escape, 'escape'),
(self._auto_pythonpath, 'pythonpath'),
(self._auto_argumentfile, 'argumentfile')]:
if auto and opt in opts:
opts.pop(opt)
return opts, args
def _parse_args(self, args):
args = [self._lowercase_long_option(a) for a in args]
try:
opts, args = getopt.getopt(args, self._short_opts, self._long_opts)
except getopt.GetoptError, err:
raise DataError(err.msg)
return self._process_opts(opts), self._glob_args(args)
def _lowercase_long_option(self, opt):
if not opt.startswith('--'):
return opt
if '=' not in opt:
return opt.lower()
opt, value = opt.split('=', 1)
return '%s=%s' % (opt.lower(), value)
def _unescape_opts_and_args(self, opts, args):
try:
escape_strings = opts['escape']
except KeyError:
raise FrameworkError("No 'escape' in options")
escapes = self._get_escapes(escape_strings)
for name, value in opts.items():
if name != 'escape':
opts[name] = self._unescape(value, escapes)
return opts, [self._unescape(arg, escapes) for arg in args]
def _process_possible_argfile(self, args):
argfile_opts = ['--argumentfile']
for sopt, lopt in self._short_to_long.items():
if lopt == 'argumentfile':
argfile_opts.append('-'+sopt)
while True:
try:
index = self._get_argfile_index(args, argfile_opts)
path = args[index+1]
except IndexError:
break
args[index:index+2] = self._get_args_from_file(path)
return args
def _get_argfile_index(self, args, argfile_opts):
for opt in argfile_opts:
if opt in args:
return args.index(opt)
raise IndexError
def _get_args_from_file(self, path):
if path.upper() != 'STDIN':
content = self._read_argfile(path)
else:
content = self._read_argfile_from_stdin()
return self._process_argfile(content)
def _read_argfile(self, path):
try:
with utf8open(path) as f:
content = f.read()
except (IOError, UnicodeError), err:
raise DataError("Opening argument file '%s' failed: %s"
% (path, err))
if content.startswith(codecs.BOM_UTF8.decode('UTF-8')):
content = content[1:]
return content
def _read_argfile_from_stdin(self):
content = sys.__stdin__.read()
if sys.platform != 'cli':
content = decode_output(content)
return content
def _process_argfile(self, content):
args = []
for line in content.splitlines():
line = line.strip()
if line.startswith('-'):
args.extend(line.split(' ', 1))
elif line and not line.startswith('#'):
args.append(line)
return args
def _get_escapes(self, escape_strings):
escapes = {}
for estr in escape_strings:
try:
name, value = estr.split(':', 1)
except ValueError:
raise DataError("Invalid escape string syntax '%s'. "
"Expected: what:with" % estr)
try:
escapes[value] = ESCAPES[name.lower()]
except KeyError:
raise DataError("Invalid escape '%s'. Available: %s"
% (name, self._get_available_escapes()))
return escapes
def _unescape(self, value, escapes):
if value in [None, True, False]:
return value
if isinstance(value, list):
return [self._unescape(item, escapes) for item in value]
for esc_name, esc_value in escapes.items():
if esc_name in value:
value = value.replace(esc_name, esc_value)
return value
def _process_opts(self, opt_tuple):
opts = self._init_opts()
for name, value in opt_tuple:
name = self._get_name(name)
if name in self._multi_opts:
opts[name].append(value)
elif name in self._toggle_opts:
opts[name] = not opts[name]
else:
opts[name] = value
return opts
def _glob_args(self, args):
temp = []
for path in args:
paths = sorted(glob.glob(path))
if paths:
temp.extend(paths)
else:
temp.append(path)
return temp
def _init_opts(self):
opts = {}
for name in self._names:
if name in self._multi_opts:
opts[name] = []
elif name in self._toggle_opts:
opts[name] = False
else:
opts[name] = None
return opts
def _get_name(self, name):
name = name.lstrip('-')
try:
return self._short_to_long[name]
except KeyError:
return name
def _create_options(self, usage):
for line in usage.splitlines():
res = self._opt_line_re.match(line)
if res:
self._create_option(short_opts=[o[1] for o in res.group(1).split()],
long_opt=res.group(3).lower(),
takes_arg=bool(res.group(4)),
is_multi=bool(res.group(5)))
def _create_option(self, short_opts, long_opt, takes_arg, is_multi):
if long_opt in self._names:
self._raise_option_multiple_times_in_usage('--' + long_opt)
self._names.append(long_opt)
for sopt in short_opts:
if self._short_to_long.has_key(sopt):
self._raise_option_multiple_times_in_usage('-' + sopt)
self._short_to_long[sopt] = long_opt
if is_multi:
self._multi_opts.append(long_opt)
if takes_arg:
long_opt += '='
short_opts = [sopt+':' for sopt in short_opts]
else:
self._toggle_opts.append(long_opt)
self._long_opts.append(long_opt)
self._short_opts += (''.join(short_opts))
def _get_pythonpath(self, paths):
if isinstance(paths, basestring):
paths = [paths]
temp = []
for path in self._split_pythonpath(paths):
temp.extend(glob.glob(path))
return [os.path.abspath(path) for path in temp if path]
def _split_pythonpath(self, paths):
# paths may already contain ':' as separator
tokens = ':'.join(paths).split(':')
if os.sep == '/':
return tokens
# Fix paths split like 'c:\temp' -> 'c', '\temp'
ret = []
drive = ''
for item in tokens:
item = item.replace('/', '\\')
if drive and item.startswith('\\'):
ret.append('%s:%s' % (drive, item))
drive = ''
continue
if drive:
ret.append(drive)
drive = ''
if len(item) == 1 and item in string.letters:
drive = item
else:
ret.append(item)
if drive:
ret.append(drive)
return ret
def _get_available_escapes(self):
names = sorted(ESCAPES.keys(), key=str.lower)
return ', '.join('%s (%s)' % (n, ESCAPES[n]) for n in names)
def _raise_help(self):
msg = self._usage
if self.version:
msg = msg.replace('<VERSION>', self.version)
def replace_escapes(res):
escapes = 'Available escapes: ' + self._get_available_escapes()
lines = textwrap.wrap(escapes, width=len(res.group(2)))
indent = ' ' * len(res.group(1))
return '\n'.join(indent + line for line in lines)
msg = re.sub('( *)(<-+ESCAPES-+>)', replace_escapes, msg)
raise Information(msg)
def _raise_version(self):
raise Information('%s %s' % (self.name, self.version))
def _raise_option_multiple_times_in_usage(self, opt):
raise FrameworkError("Option '%s' multiple times in usage" % opt)
class ArgLimitValidator(object):
def __init__(self, arg_limits):
self._min_args, self._max_args = self._parse_arg_limits(arg_limits)
def _parse_arg_limits(self, arg_limits):
if arg_limits is None:
return 0, sys.maxint
if isinstance(arg_limits, int):
return arg_limits, arg_limits
if len(arg_limits) == 1:
return arg_limits[0], sys.maxint
return arg_limits[0], arg_limits[1]
def __call__(self, args):
if not (self._min_args <= len(args) <= self._max_args):
self._raise_invalid_args(self._min_args, self._max_args, len(args))
def _raise_invalid_args(self, min_args, max_args, arg_count):
min_end = plural_or_not(min_args)
if min_args == max_args:
expectation = "%d argument%s" % (min_args, min_end)
elif max_args != sys.maxint:
expectation = "%d to %d arguments" % (min_args, max_args)
else:
expectation = "at least %d argument%s" % (min_args, min_end)
raise DataError("Expected %s, got %d." % (expectation, arg_count))
| apache-2.0 | 2,738,208,641,721,807,400 | 38.27027 | 84 | 0.565789 | false |
googleapis/googleapis-gen | google/devtools/clouderrorreporting/v1beta1/devtools-clouderrorreporting-v1beta1-py/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py | 1 | 48720 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.errorreporting_v1beta1.services.report_errors_service import ReportErrorsServiceAsyncClient
from google.cloud.errorreporting_v1beta1.services.report_errors_service import ReportErrorsServiceClient
from google.cloud.errorreporting_v1beta1.services.report_errors_service import transports
from google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.base import _GOOGLE_AUTH_VERSION
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from google.oauth2 import service_account
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ReportErrorsServiceClient._get_default_mtls_endpoint(None) is None
assert ReportErrorsServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [
ReportErrorsServiceClient,
ReportErrorsServiceAsyncClient,
])
def test_report_errors_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'clouderrorreporting.googleapis.com:443'
@pytest.mark.parametrize("client_class", [
ReportErrorsServiceClient,
ReportErrorsServiceAsyncClient,
])
def test_report_errors_service_client_service_account_always_use_jwt(client_class):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
client = client_class(credentials=creds)
use_jwt.assert_not_called()
@pytest.mark.parametrize("transport_class,transport_name", [
(transports.ReportErrorsServiceGrpcTransport, "grpc"),
(transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_report_errors_service_client_service_account_always_use_jwt_true(transport_class, transport_name):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
@pytest.mark.parametrize("client_class", [
ReportErrorsServiceClient,
ReportErrorsServiceAsyncClient,
])
def test_report_errors_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'clouderrorreporting.googleapis.com:443'
def test_report_errors_service_client_get_transport_class():
transport = ReportErrorsServiceClient.get_transport_class()
available_transports = [
transports.ReportErrorsServiceGrpcTransport,
]
assert transport in available_transports
transport = ReportErrorsServiceClient.get_transport_class("grpc")
assert transport == transports.ReportErrorsServiceGrpcTransport
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
@mock.patch.object(ReportErrorsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceClient))
@mock.patch.object(ReportErrorsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceAsyncClient))
def test_report_errors_service_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ReportErrorsServiceClient, 'get_transport_class') as gtc:
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials()
)
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ReportErrorsServiceClient, 'get_transport_class') as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc", "true"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc", "false"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
])
@mock.patch.object(ReportErrorsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceClient))
@mock.patch.object(ReportErrorsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceAsyncClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_report_errors_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_report_errors_service_client_client_options_scopes(client_class, transport_class, transport_name):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_report_errors_service_client_client_options_credentials_file(client_class, transport_class, transport_name):
# Check the case credentials file is provided.
options = client_options.ClientOptions(
credentials_file="credentials.json"
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_report_errors_service_client_client_options_from_dict():
with mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = ReportErrorsServiceClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_report_error_event(transport: str = 'grpc', request_type=report_errors_service.ReportErrorEventRequest):
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = report_errors_service.ReportErrorEventResponse(
)
response = client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == report_errors_service.ReportErrorEventRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, report_errors_service.ReportErrorEventResponse)
def test_report_error_event_from_dict():
test_report_error_event(request_type=dict)
def test_report_error_event_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
client.report_error_event()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == report_errors_service.ReportErrorEventRequest()
@pytest.mark.asyncio
async def test_report_error_event_async(transport: str = 'grpc_asyncio', request_type=report_errors_service.ReportErrorEventRequest):
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(report_errors_service.ReportErrorEventResponse(
))
response = await client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == report_errors_service.ReportErrorEventRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, report_errors_service.ReportErrorEventResponse)
@pytest.mark.asyncio
async def test_report_error_event_async_from_dict():
await test_report_error_event_async(request_type=dict)
def test_report_error_event_field_headers():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = report_errors_service.ReportErrorEventRequest()
request.project_name = 'project_name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
call.return_value = report_errors_service.ReportErrorEventResponse()
client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'project_name=project_name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_report_error_event_field_headers_async():
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = report_errors_service.ReportErrorEventRequest()
request.project_name = 'project_name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(report_errors_service.ReportErrorEventResponse())
await client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'project_name=project_name/value',
) in kw['metadata']
def test_report_error_event_flattened():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = report_errors_service.ReportErrorEventResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.report_error_event(
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].project_name == 'project_name_value'
assert args[0].event == report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751))
def test_report_error_event_flattened_error():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.report_error_event(
report_errors_service.ReportErrorEventRequest(),
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
@pytest.mark.asyncio
async def test_report_error_event_flattened_async():
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = report_errors_service.ReportErrorEventResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(report_errors_service.ReportErrorEventResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.report_error_event(
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].project_name == 'project_name_value'
assert args[0].event == report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751))
@pytest.mark.asyncio
async def test_report_error_event_flattened_error_async():
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.report_error_event(
report_errors_service.ReportErrorEventRequest(),
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReportErrorsServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReportErrorsServiceClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ReportErrorsServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ReportErrorsServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize("transport_class", [
transports.ReportErrorsServiceGrpcTransport,
transports.ReportErrorsServiceGrpcAsyncIOTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.ReportErrorsServiceGrpcTransport,
)
def test_report_errors_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ReportErrorsServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json"
)
def test_report_errors_service_base_transport():
# Instantiate the base transport.
with mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.ReportErrorsServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'report_error_event',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
@requires_google_auth_gte_1_25_0
def test_report_errors_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReportErrorsServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json",
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_report_errors_service_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReportErrorsServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json", scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
def test_report_errors_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReportErrorsServiceTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_report_errors_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReportErrorsServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_report_errors_service_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReportErrorsServiceClient()
adc.assert_called_once_with(
scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReportErrorsServiceGrpcTransport,
transports.ReportErrorsServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_report_errors_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReportErrorsServiceGrpcTransport,
transports.ReportErrorsServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_report_errors_service_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ReportErrorsServiceGrpcTransport, grpc_helpers),
(transports.ReportErrorsServiceGrpcAsyncIOTransport, grpc_helpers_async)
],
)
def test_report_errors_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(
quota_project_id="octopus",
scopes=["1", "2"]
)
create_channel.assert_called_with(
"clouderrorreporting.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
scopes=["1", "2"],
default_host="clouderrorreporting.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("transport_class", [transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport])
def test_report_errors_service_grpc_transport_client_cert_source_for_mtls(
transport_class
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert,
private_key=expected_key
)
def test_report_errors_service_host_no_port():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='clouderrorreporting.googleapis.com'),
)
assert client.transport._host == 'clouderrorreporting.googleapis.com:443'
def test_report_errors_service_host_with_port():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='clouderrorreporting.googleapis.com:8000'),
)
assert client.transport._host == 'clouderrorreporting.googleapis.com:8000'
def test_report_errors_service_grpc_transport_channel():
channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReportErrorsServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_report_errors_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReportErrorsServiceGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport])
def test_report_errors_service_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport])
def test_report_errors_service_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = ReportErrorsServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ReportErrorsServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder, )
actual = ReportErrorsServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ReportErrorsServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization, )
actual = ReportErrorsServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ReportErrorsServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project, )
actual = ReportErrorsServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ReportErrorsServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = ReportErrorsServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReportErrorsServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.ReportErrorsServiceTransport, '_prep_wrapped_messages') as prep:
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.ReportErrorsServiceTransport, '_prep_wrapped_messages') as prep:
transport_class = ReportErrorsServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
| apache-2.0 | -5,023,281,741,818,850,000 | 41.550218 | 257 | 0.680398 | false |
Yelp/paasta | paasta_tools/contrib/utilization_check.py | 1 | 2057 | #!/usr/bin/env python
"""Reads a list of hosts to stdin and produces
a utilization report for those hosts.
"""
import functools
import json
import sys
from typing import Sequence
from a_sync import block
from paasta_tools.mesos.exceptions import MasterNotAvailableException
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.metrics.metastatus_lib import (
calculate_resource_utilization_for_slaves,
)
from paasta_tools.metrics.metastatus_lib import filter_tasks_for_slaves
from paasta_tools.metrics.metastatus_lib import get_all_tasks_from_state
from paasta_tools.metrics.metastatus_lib import (
resource_utillizations_from_resource_info,
)
from paasta_tools.utils import PaastaColors
def main(hostnames: Sequence[str]) -> None:
master = get_mesos_master()
try:
mesos_state = block(master.state)
except MasterNotAvailableException as e:
print(PaastaColors.red("CRITICAL: %s" % e.message))
sys.exit(2)
slaves = [
slave
for slave in mesos_state.get("slaves", [])
if slave["hostname"] in hostnames
]
tasks = get_all_tasks_from_state(mesos_state, include_orphans=True)
filtered_tasks = filter_tasks_for_slaves(slaves, tasks)
resource_info_dict = calculate_resource_utilization_for_slaves(
slaves, filtered_tasks
)
resource_utilizations = resource_utillizations_from_resource_info(
total=resource_info_dict["total"], free=resource_info_dict["free"]
)
output = {}
for metric in resource_utilizations:
utilization = metric.total - metric.free
if int(metric.total) == 0:
utilization_perc = 100
else:
utilization_perc = utilization / float(metric.total) * 100
output[metric.metric] = {
"total": metric.total,
"used": utilization,
"perc": utilization_perc,
}
print(json.dumps(output))
if __name__ == "__main__":
hostnames = functools.reduce(lambda x, y: x + [y.strip()], sys.stdin, [])
main(hostnames)
| apache-2.0 | 2,211,955,582,909,433,000 | 32.177419 | 77 | 0.679144 | false |
indictranstech/osmosis-erpnext | erpnext/manufacturing/doctype/production_planning_tool/production_planning_tool.py | 1 | 17555 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, flt, cint, nowdate, add_days, comma_and
from frappe import msgprint, _
from frappe.model.document import Document
from erpnext.manufacturing.doctype.bom.bom import validate_bom_no
from erpnext.manufacturing.doctype.production_order.production_order import get_item_details
class ProductionPlanningTool(Document):
def __init__(self, arg1, arg2=None):
super(ProductionPlanningTool, self).__init__(arg1, arg2)
self.item_dict = {}
def clear_table(self, table_name):
self.set(table_name, [])
def validate_company(self):
if not self.company:
frappe.throw(_("Please enter Company"))
def get_open_sales_orders(self):
""" Pull sales orders which are pending to deliver based on criteria selected"""
so_filter = item_filter = ""
if self.from_date:
so_filter += " and so.transaction_date >= %(from_date)s"
if self.to_date:
so_filter += " and so.transaction_date <= %(to_date)s"
if self.customer:
so_filter += " and so.customer = %(customer)s"
if self.fg_item:
item_filter += " and item.name = %(item)s"
open_so = frappe.db.sql("""
select distinct so.name, so.transaction_date, so.customer, so.base_grand_total
from `tabSales Order` so, `tabSales Order Item` so_item
where so_item.parent = so.name
and so.docstatus = 1 and so.status != "Stopped"
and so.company = %(company)s
and so_item.qty > so_item.delivered_qty {0}
and (exists (select name from `tabItem` item where item.name=so_item.item_code
and ((item.is_pro_applicable = 1 or item.is_sub_contracted_item = 1) {1}))
or exists (select name from `tabPacked Item` pi
where pi.parent = so.name and pi.parent_item = so_item.item_code
and exists (select name from `tabItem` item where item.name=pi.item_code
and (item.is_pro_applicable = 1 or item.is_sub_contracted_item = 1) {2})))
""".format(so_filter, item_filter, item_filter), {
"from_date": self.from_date,
"to_date": self.to_date,
"customer": self.customer,
"item": self.fg_item,
"company": self.company
}, as_dict=1)
self.add_so_in_table(open_so)
def add_so_in_table(self, open_so):
""" Add sales orders in the table"""
self.clear_table("sales_orders")
so_list = []
for r in open_so:
if cstr(r['name']) not in so_list:
pp_so = self.append('sales_orders', {})
pp_so.sales_order = r['name']
pp_so.sales_order_date = cstr(r['transaction_date'])
pp_so.customer = cstr(r['customer'])
pp_so.grand_total = flt(r['base_grand_total'])
def get_pending_material_requests(self):
""" Pull Material Requests that are pending based on criteria selected"""
mr_filter = item_filter = ""
if self.from_date:
mr_filter += " and mr.transaction_date >= %(from_date)s"
if self.to_date:
mr_filter += " and mr.transaction_date <= %(to_date)s"
if self.warehouse:
mr_filter += " and mr_item.warehouse = %(warehouse)s"
if self.fg_item:
item_filter += " and item.name = %(item)s"
pending_mr = frappe.db.sql("""
select distinct mr.name, mr.transaction_date
from `tabMaterial Request` mr, `tabMaterial Request Item` mr_item
where mr_item.parent = mr.name
and mr.material_request_type = "Manufacture"
and mr.docstatus = 1
and mr_item.qty > mr_item.ordered_qty {0}
and (exists (select name from `tabItem` item where item.name=mr_item.item_code
and (item.is_pro_applicable = 1 or item.is_sub_contracted_item = 1 {1})))
""".format(mr_filter, item_filter), {
"from_date": self.from_date,
"to_date": self.to_date,
"warehouse": self.warehouse,
"item": self.fg_item
}, as_dict=1)
self.add_mr_in_table(pending_mr)
def add_mr_in_table(self, pending_mr):
""" Add Material Requests in the table"""
self.clear_table("material_requests")
mr_list = []
for r in pending_mr:
if cstr(r['name']) not in mr_list:
mr = self.append('material_requests', {})
mr.material_request = r['name']
mr.material_request_date = cstr(r['transaction_date'])
def get_items(self):
if self.get_items_from == "Sales Order":
self.get_so_items()
elif self.get_items_from == "Material Request":
self.get_mr_items()
def get_so_items(self):
so_list = [d.sales_order for d in self.get('sales_orders') if d.sales_order]
if not so_list:
msgprint(_("Please enter Sales Orders in the above table"))
return []
item_condition = ""
if self.fg_item:
item_condition = ' and so_item.item_code = "{0}"'.format(frappe.db.escape(self.fg_item))
items = frappe.db.sql("""select distinct parent, item_code, warehouse,
(qty - delivered_qty) as pending_qty
from `tabSales Order Item` so_item
where parent in (%s) and docstatus = 1 and qty > delivered_qty
and exists (select * from `tabItem` item where item.name=so_item.item_code
and item.is_pro_applicable = 1) %s""" % \
(", ".join(["%s"] * len(so_list)), item_condition), tuple(so_list), as_dict=1)
if self.fg_item:
item_condition = ' and pi.item_code = "{0}"'.format(frappe.db.escape(self.fg_item))
packed_items = frappe.db.sql("""select distinct pi.parent, pi.item_code, pi.warehouse as warehouse,
(((so_item.qty - so_item.delivered_qty) * pi.qty) / so_item.qty)
as pending_qty
from `tabSales Order Item` so_item, `tabPacked Item` pi
where so_item.parent = pi.parent and so_item.docstatus = 1
and pi.parent_item = so_item.item_code
and so_item.parent in (%s) and so_item.qty > so_item.delivered_qty
and exists (select * from `tabItem` item where item.name=pi.item_code
and item.is_pro_applicable = 1) %s""" % \
(", ".join(["%s"] * len(so_list)), item_condition), tuple(so_list), as_dict=1)
self.add_items(items + packed_items)
def get_mr_items(self):
mr_list = [d.material_request for d in self.get('material_requests') if d.material_request]
if not mr_list:
msgprint(_("Please enter Material Requests in the above table"))
return []
item_condition = ""
if self.fg_item:
item_condition = ' and mr_item.item_code = "' + frappe.db.escape(self.fg_item, percent=False) + '"'
items = frappe.db.sql("""select distinct parent, name, item_code, warehouse,
(qty - ordered_qty) as pending_qty
from `tabMaterial Request Item` mr_item
where parent in (%s) and docstatus = 1 and qty > ordered_qty
and exists (select * from `tabItem` item where item.name=mr_item.item_code
and item.is_pro_applicable = 1) %s""" % \
(", ".join(["%s"] * len(mr_list)), item_condition), tuple(mr_list), as_dict=1)
self.add_items(items)
def add_items(self, items):
self.clear_table("items")
for p in items:
item_details = get_item_details(p['item_code'])
pi = self.append('items', {})
pi.warehouse = p['warehouse']
pi.item_code = p['item_code']
pi.description = item_details and item_details.description or ''
pi.stock_uom = item_details and item_details.stock_uom or ''
pi.bom_no = item_details and item_details.bom_no or ''
pi.planned_qty = flt(p['pending_qty'])
pi.pending_qty = flt(p['pending_qty'])
if self.get_items_from == "Sales Order":
pi.sales_order = p['parent']
elif self.get_items_from == "Material Request":
pi.material_request = p['parent']
pi.material_request_item = p['name']
def validate_data(self):
self.validate_company()
for d in self.get('items'):
if not d.bom_no:
frappe.throw(_("Please select BOM for Item in Row {0}".format(d.idx)))
else:
validate_bom_no(d.item_code, d.bom_no)
if not flt(d.planned_qty):
frappe.throw(_("Please enter Planned Qty for Item {0} at row {1}").format(d.item_code, d.idx))
def raise_production_orders(self):
"""It will raise production order (Draft) for all distinct FG items"""
self.validate_data()
from erpnext.utilities.transaction_base import validate_uom_is_integer
validate_uom_is_integer(self, "stock_uom", "planned_qty")
items = self.get_production_items()
pro_list = []
frappe.flags.mute_messages = True
for key in items:
production_order = self.create_production_order(items[key])
if production_order:
pro_list.append(production_order)
frappe.flags.mute_messages = False
if pro_list:
pro_list = ["""<a href="#Form/Production Order/%s" target="_blank">%s</a>""" % \
(p, p) for p in pro_list]
msgprint(_("{0} created").format(comma_and(pro_list)))
else :
msgprint(_("No Production Orders created"))
def get_production_items(self):
item_dict = {}
for d in self.get("items"):
item_details= {
"production_item" : d.item_code,
"sales_order" : d.sales_order,
"material_request" : d.material_request,
"material_request_item" : d.material_request_item,
"bom_no" : d.bom_no,
"description" : d.description,
"stock_uom" : d.stock_uom,
"company" : self.company,
"wip_warehouse" : "",
"fg_warehouse" : d.warehouse,
"status" : "Draft",
}
""" Club similar BOM and item for processing in case of Sales Orders """
if self.get_items_from == "Material Request":
item_details.update({
"qty": d.planned_qty
})
item_dict[(d.item_code, d.material_request_item, d.warehouse)] = item_details
else:
item_details.update({
"qty":flt(item_dict.get((d.item_code, d.sales_order, d.warehouse),{})
.get("qty")) + flt(d.planned_qty)
})
item_dict[(d.item_code, d.sales_order, d.warehouse)] = item_details
return item_dict
def create_production_order(self, item_dict):
"""Create production order. Called from Production Planning Tool"""
from erpnext.manufacturing.doctype.production_order.production_order import OverProductionError, get_default_warehouse
warehouse = get_default_warehouse()
pro = frappe.new_doc("Production Order")
pro.update(item_dict)
pro.set_production_order_operations()
if warehouse:
pro.wip_warehouse = warehouse.get('wip_warehouse')
if not pro.fg_warehouse:
pro.fg_warehouse = warehouse.get('fg_warehouse')
try:
pro.insert()
return pro.name
except OverProductionError:
pass
def get_so_wise_planned_qty(self):
"""
bom_dict {
bom_no: ['sales_order', 'qty']
}
"""
bom_dict = {}
for d in self.get("items"):
if self.get_items_from == "Material Request":
bom_dict.setdefault(d.bom_no, []).append([d.material_request_item, flt(d.planned_qty)])
else:
bom_dict.setdefault(d.bom_no, []).append([d.sales_order, flt(d.planned_qty)])
return bom_dict
def download_raw_materials(self):
""" Create csv data for required raw material to produce finished goods"""
self.validate_data()
bom_dict = self.get_so_wise_planned_qty()
self.get_raw_materials(bom_dict)
return self.get_csv()
def get_raw_materials(self, bom_dict):
""" Get raw materials considering sub-assembly items
{
"item_code": [qty_required, description, stock_uom, min_order_qty]
}
"""
item_list = []
for bom, so_wise_qty in bom_dict.items():
bom_wise_item_details = {}
if self.use_multi_level_bom:
# get all raw materials with sub assembly childs
# Did not use qty_consumed_per_unit in the query, as it leads to rounding loss
for d in frappe.db.sql("""select fb.item_code,
ifnull(sum(fb.qty/ifnull(bom.quantity, 1)), 0) as qty,
fb.description, fb.stock_uom, it.min_order_qty
from `tabBOM Explosion Item` fb, `tabBOM` bom, `tabItem` it
where bom.name = fb.parent and it.name = fb.item_code
and (is_pro_applicable = 0 or ifnull(default_bom, "")="")
and (is_sub_contracted_item = 0 or ifnull(default_bom, "")="")
and is_stock_item = 1
and fb.docstatus<2 and bom.name=%s
group by item_code, stock_uom""", bom, as_dict=1):
bom_wise_item_details.setdefault(d.item_code, d)
else:
# Get all raw materials considering SA items as raw materials,
# so no childs of SA items
for d in frappe.db.sql("""select bom_item.item_code,
ifnull(sum(bom_item.qty/ifnull(bom.quantity, 1)), 0) as qty,
bom_item.description, bom_item.stock_uom, item.min_order_qty
from `tabBOM Item` bom_item, `tabBOM` bom, tabItem item
where bom.name = bom_item.parent and bom.name = %s and bom_item.docstatus < 2
and bom_item.item_code = item.name
and item.is_stock_item = 1
group by item_code""", bom, as_dict=1):
bom_wise_item_details.setdefault(d.item_code, d)
for item, item_details in bom_wise_item_details.items():
for so_qty in so_wise_qty:
item_list.append([item, flt(item_details.qty) * so_qty[1], item_details.description,
item_details.stock_uom, item_details.min_order_qty, so_qty[0]])
self.make_items_dict(item_list)
def make_items_dict(self, item_list):
for i in item_list:
self.item_dict.setdefault(i[0], []).append([flt(i[1]), i[2], i[3], i[4], i[5]])
def get_csv(self):
item_list = [['Item Code', 'Description', 'Stock UOM', 'Required Qty', 'Warehouse',
'Quantity Requested for Purchase', 'Ordered Qty', 'Actual Qty']]
for item in self.item_dict:
total_qty = sum([flt(d[0]) for d in self.item_dict[item]])
item_list.append([item, self.item_dict[item][0][1], self.item_dict[item][0][2], total_qty])
item_qty = frappe.db.sql("""select warehouse, indented_qty, ordered_qty, actual_qty
from `tabBin` where item_code = %s""", item, as_dict=1)
i_qty, o_qty, a_qty = 0, 0, 0
for w in item_qty:
i_qty, o_qty, a_qty = i_qty + flt(w.indented_qty), o_qty + flt(w.ordered_qty), a_qty + flt(w.actual_qty)
item_list.append(['', '', '', '', w.warehouse, flt(w.indented_qty),
flt(w.ordered_qty), flt(w.actual_qty)])
if item_qty:
item_list.append(['', '', '', '', 'Total', i_qty, o_qty, a_qty])
return item_list
def raise_material_requests(self):
"""
Raise Material Request if projected qty is less than qty required
Requested qty should be shortage qty considering minimum order qty
"""
self.validate_data()
if not self.purchase_request_for_warehouse:
frappe.throw(_("Please enter Warehouse for which Material Request will be raised"))
bom_dict = self.get_so_wise_planned_qty()
self.get_raw_materials(bom_dict)
if self.item_dict:
self.create_material_request()
def get_requested_items(self):
item_projected_qty = self.get_projected_qty()
items_to_be_requested = frappe._dict()
for item, so_item_qty in self.item_dict.items():
requested_qty = 0
total_qty = sum([flt(d[0]) for d in so_item_qty])
if total_qty > item_projected_qty.get(item, 0):
# shortage
requested_qty = total_qty - flt(item_projected_qty.get(item))
# consider minimum order qty
if requested_qty < flt(so_item_qty[0][3]):
requested_qty = flt(so_item_qty[0][3])
# distribute requested qty SO wise
for item_details in so_item_qty:
if requested_qty:
sales_order = item_details[4] or "No Sales Order"
if self.get_items_from == "Material Request":
sales_order = "No Sales Order"
if requested_qty <= item_details[0]:
adjusted_qty = requested_qty
else:
adjusted_qty = item_details[0]
items_to_be_requested.setdefault(item, {}).setdefault(sales_order, 0)
items_to_be_requested[item][sales_order] += adjusted_qty
requested_qty -= adjusted_qty
else:
break
# requested qty >= total so qty, due to minimum order qty
if requested_qty:
items_to_be_requested.setdefault(item, {}).setdefault("No Sales Order", 0)
items_to_be_requested[item]["No Sales Order"] += requested_qty
return items_to_be_requested
def get_projected_qty(self):
items = self.item_dict.keys()
item_projected_qty = frappe.db.sql("""select item_code, sum(projected_qty)
from `tabBin` where item_code in (%s) and warehouse=%s group by item_code""" %
(", ".join(["%s"]*len(items)), '%s'), tuple(items + [self.purchase_request_for_warehouse]))
return dict(item_projected_qty)
def create_material_request(self):
items_to_be_requested = self.get_requested_items()
material_request_list = []
if items_to_be_requested:
for item in items_to_be_requested:
item_wrapper = frappe.get_doc("Item", item)
material_request = frappe.new_doc("Material Request")
material_request.update({
"transaction_date": nowdate(),
"status": "Draft",
"company": self.company,
"requested_by": frappe.session.user,
"material_request_type": "Purchase"
})
for sales_order, requested_qty in items_to_be_requested[item].items():
material_request.append("items", {
"doctype": "Material Request Item",
"__islocal": 1,
"item_code": item,
"item_name": item_wrapper.item_name,
"description": item_wrapper.description,
"uom": item_wrapper.stock_uom,
"item_group": item_wrapper.item_group,
"brand": item_wrapper.brand,
"qty": requested_qty,
"schedule_date": add_days(nowdate(), cint(item_wrapper.lead_time_days)),
"warehouse": self.purchase_request_for_warehouse,
"sales_order": sales_order if sales_order!="No Sales Order" else None
})
material_request.flags.ignore_permissions = 1
material_request.submit()
material_request_list.append(material_request.name)
if material_request_list:
message = ["""<a href="#Form/Material Request/%s" target="_blank">%s</a>""" % \
(p, p) for p in material_request_list]
msgprint(_("Material Requests {0} created").format(comma_and(message)))
else:
msgprint(_("Nothing to request"))
| agpl-3.0 | -528,266,665,888,626,050 | 36.114165 | 120 | 0.659641 | false |
simotek/tanko-bot | src/test-client.py | 1 | 1418 |
# test client - Simon Lees [email protected]
# Copyright (C) 2015 Simon Lees
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from PyLibs.uiclient import UiClient, UiClientCallbacks
import time
if __name__ == '__main__':
clientCallbacks = UiClientCallbacks()
uiClient = UiClient(clientCallbacks)
count = 0
# Main app event loop
while True:
uiClient.processMessages()
time.sleep(0.01)
count = count+1
if count > 2000:
count = 0
uiClient.sendDriveMotorSpeed(0,0)
elif count == 500:
uiClient.sendDriveMotorSpeed(60,60)
elif count == 1000:
uiClient.sendDriveMotorSpeed(-60,-60)
elif count == 1500:
uiClient.sendDriveMotorSpeed(60,-60)
| lgpl-2.1 | -5,472,748,171,853,558,000 | 28.541667 | 80 | 0.708745 | false |
adamwulf/verlet-nn | neuralnetTests/keras-linear.py | 1 | 1816 | # begin
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from tensorflow import set_random_seed
set_random_seed(1337)
from sys import exit
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Input, Reshape, Dense, Activation, Embedding
from keras.layers import LSTM
from keras.optimizers import SGD
from keras.datasets import imdb
from keras.models import model_from_json
print('Loading data...')
X_train = np.array([[ 1, 0, 1 ]])
y_train = np.array([[ 1 ]])
X_test = X_train
y_test = y_train
print(len(X_train), 'train sequences')
print(len(X_test), 'test sequences')
print('X_train shape:', X_train.shape)
print('X_test shape:', X_test.shape)
print('Build model...')
model = Sequential()
model.add(Dense(3, input_shape=(3,), activation='linear', bias=False))
model.add(Dense(1, activation='linear', bias=False))
print('Compile...')
sgd = SGD(lr=0.1, momentum=0.0, decay=0.0, nesterov=False)
model.compile(loss='mean_squared_error', optimizer=sgd)
print('Evaluate 1...')
score = model.evaluate(X_test, y_test, batch_size=1, verbose=2)
print(model.metrics_names[0], ':', score)
prediction = model.predict(X_test, batch_size=1, verbose=2)
print('prediction:', prediction)
print('Pre-Train Weights...')
for layer in model.layers:
weights = layer.get_weights()
print(weights)
print('Train...')
model.fit(X_train, y_train, nb_epoch=1, batch_size=1, verbose=2)
print('Post-Train Weights...')
for layer in model.layers:
weights = layer.get_weights()
print(weights)
print('Evaluate 2...')
score = model.evaluate(X_test, y_test, batch_size=1, verbose=2)
print(model.metrics_names[0], ':', score)
prediction = model.predict(X_test, batch_size=1, verbose=2)
print('prediction:', prediction)
| mit | -474,862,961,147,950,500 | 26.104478 | 70 | 0.715859 | false |
google-coral/pycoral | tests/imprinting_engine_test.py | 1 | 6413 | # Lint as: python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from PIL import Image
from pycoral.adapters import classify
from pycoral.adapters import common
from pycoral.learn.imprinting.engine import ImprintingEngine
from pycoral.utils.edgetpu import make_interpreter
from tests import test_utils
import unittest
_MODEL_LIST = [
'mobilenet_v1_1.0_224_l2norm_quant.tflite',
'mobilenet_v1_1.0_224_l2norm_quant_edgetpu.tflite'
]
TrainPoint = collections.namedtuple('TainPoint', ['images', 'class_id'])
TestPoint = collections.namedtuple('TainPoint', ['image', 'class_id', 'score'])
def set_input(interpreter, image):
size = common.input_size(interpreter)
common.set_input(interpreter, image.resize(size, Image.NEAREST))
class TestImprintingEnginePythonAPI(unittest.TestCase):
def _train_and_test(self, model_path, train_points, test_points,
keep_classes):
# Train.
engine = ImprintingEngine(model_path, keep_classes)
extractor = make_interpreter(
engine.serialize_extractor_model(), device=':0')
extractor.allocate_tensors()
for point in train_points:
for image in point.images:
with test_utils.test_image('imprinting', image) as img:
set_input(extractor, img)
extractor.invoke()
embedding = classify.get_scores(extractor)
self.assertEqual(len(embedding), engine.embedding_dim)
engine.train(embedding, point.class_id)
# Test.
trained_model = engine.serialize_model()
classifier = make_interpreter(trained_model, device=':0')
classifier.allocate_tensors()
self.assertEqual(len(classifier.get_output_details()), 1)
if not keep_classes:
self.assertEqual(len(train_points), classify.num_classes(classifier))
for point in test_points:
with test_utils.test_image('imprinting', point.image) as img:
set_input(classifier, img)
classifier.invoke()
top = classify.get_classes(classifier, top_k=1)[0]
self.assertEqual(top.id, point.class_id)
self.assertGreater(top.score, point.score)
return trained_model
# Test full model, not keeping base model classes.
def test_training_l2_norm_model_not_keep_classes(self):
train_points = [
TrainPoint(images=['cat_train_0.bmp'], class_id=0),
TrainPoint(images=['dog_train_0.bmp'], class_id=1),
TrainPoint(
images=['hotdog_train_0.bmp', 'hotdog_train_1.bmp'], class_id=2),
]
test_points = [
TestPoint(image='cat_test_0.bmp', class_id=0, score=0.99),
TestPoint(image='dog_test_0.bmp', class_id=1, score=0.99),
TestPoint(image='hotdog_test_0.bmp', class_id=2, score=0.99)
]
for model_path in _MODEL_LIST:
with self.subTest(model_path=model_path):
self._train_and_test(
test_utils.test_data_path(model_path),
train_points,
test_points,
keep_classes=False)
# Test full model, keeping base model classes.
def test_training_l2_norm_model_keep_classes(self):
train_points = [
TrainPoint(images=['cat_train_0.bmp'], class_id=1001),
TrainPoint(images=['dog_train_0.bmp'], class_id=1002),
TrainPoint(
images=['hotdog_train_0.bmp', 'hotdog_train_1.bmp'], class_id=1003)
]
test_points = [
TestPoint(image='cat_test_0.bmp', class_id=1001, score=0.99),
TestPoint(image='hotdog_test_0.bmp', class_id=1003, score=0.92)
]
for model_path in _MODEL_LIST:
with self.subTest(model_path=model_path):
self._train_and_test(
test_utils.test_data_path(model_path),
train_points,
test_points,
keep_classes=True)
def test_incremental_training(self):
train_points = [TrainPoint(images=['cat_train_0.bmp'], class_id=0)]
retrain_points = [
TrainPoint(images=['dog_train_0.bmp'], class_id=1),
TrainPoint(
images=['hotdog_train_0.bmp', 'hotdog_train_1.bmp'], class_id=2)
]
test_points = [
TestPoint(image='cat_test_0.bmp', class_id=0, score=0.99),
TestPoint(image='dog_test_0.bmp', class_id=1, score=0.99),
TestPoint(image='hotdog_test_0.bmp', class_id=2, score=0.99)
]
for model_path in _MODEL_LIST:
with self.subTest(model_path=model_path):
model = self._train_and_test(
test_utils.test_data_path(model_path),
train_points, [],
keep_classes=False)
with test_utils.temporary_file(suffix='.tflite') as new_model_file:
new_model_file.write(model)
# Retrain based on cat only model.
self._train_and_test(
new_model_file.name,
retrain_points,
test_points,
keep_classes=True)
def test_imprinting_engine_saving_without_training(self):
model_list = [
'mobilenet_v1_1.0_224_l2norm_quant.tflite',
'mobilenet_v1_1.0_224_l2norm_quant_edgetpu.tflite'
]
for model in model_list:
engine = ImprintingEngine(
test_utils.test_data_path(model), keep_classes=False)
with self.assertRaisesRegex(RuntimeError, 'Model is not trained.'):
engine.serialize_model()
def test_imprinting_engine_invalid_model_path(self):
with self.assertRaisesRegex(
ValueError, 'Failed to open file: invalid_model_path.tflite'):
ImprintingEngine('invalid_model_path.tflite')
def test_imprinting_engine_load_extractor_with_wrong_format(self):
expected_message = ('Unsupported model architecture. Input model must have '
'an L2Norm layer.')
with self.assertRaisesRegex(ValueError, expected_message):
ImprintingEngine(
test_utils.test_data_path('mobilenet_v1_1.0_224_quant.tflite'))
if __name__ == '__main__':
test_utils.coral_test_main()
| apache-2.0 | -5,368,133,874,632,009,000 | 35.856322 | 80 | 0.655076 | false |
hanya/BookmarksMenu | pythonpath/bookmarks/tools.py | 1 | 14635 | # Copyright 2012 Tsutomu Uchino
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uno
import unohelper
from com.sun.star.beans import PropertyValue, StringPair
from com.sun.star.lang import Locale
from com.sun.star.task import XInteractionHandler
def create_service(ctx, name, args=None):
""" Create service with args if required. """
smgr = ctx.getServiceManager()
if args:
return smgr.createInstanceWithArgumentsAndContext(name, args, ctx)
else:
return smgr.createInstanceWithContext(name, ctx)
def get_desktop(ctx):
""" Get instance of css.frame.Destkop"""
return create_service(ctx, "com.sun.star.frame.Desktop")
def get_config(ctx, nodepath, modifiable=False):
""" Get configuration node. """
cp = create_service(ctx, "com.sun.star.configuration.ConfigurationProvider")
node = PropertyValue("nodepath", -1, nodepath, 0)
if modifiable:
name = "com.sun.star.configuration.ConfigurationUpdateAccess"
else:
name = "com.sun.star.configuration.ConfigurationAccess"
return cp.createInstanceWithArguments(name, (node,))
def get_config_value(ctx, nodepath, name):
""" Get value from specific configuration node. """
config = get_config(ctx, nodepath)
return config.getPropertyValue(name)
def get_current_locale(ctx):
""" Get current locale. """
config = get_config(ctx, "/org.openoffice.Setup/L10N")
locale = config.getPropertyValue("ooLocale")
parts = locale.split("-")
lang = parts[0]
country = ""
if len(parts) == 2:
country = parts[1]
return Locale(lang, country, "")
def get_user_config(ctx):
""" Get writable user's config. """
return create_service(ctx, "com.sun.star.util.PathSettings").UserConfig_writable
def get_user_backup(ctx):
""" Get writable user's backup. """
return create_service(ctx, "com.sun.star.util.PathSettings").Backup_writable
def get_extension_dirurl(ctx, extid):
"""Get extension directory url from the extension id."""
pip_name = "/singletons/com.sun.star.deployment.PackageInformationProvider"
if ctx.hasByName(pip_name):
pip = ctx.getByName(pip_name)
try:
return pip.getPackageLocation(extid)
except:
pass # ubuntu-2d
return ""
def load_resource(ctx, dir_url, file_name, locale, read_only=True):
""" Load resource file. """
class DummyHandler(unohelper.Base, XInteractionHandler):
def handle(self, request): pass
res = create_service(ctx,
"com.sun.star.resource.StringResourceWithLocation")
res.initialize((dir_url, read_only, locale, file_name, "", DummyHandler()))
return res
def load_resource_as_dict(ctx, dir_url, file_name, locale, include_id=False):
""" Load resource as dict. """
res = load_resource(ctx, dir_url, file_name, locale)
strings = {}
default_locale = res.getDefaultLocale()
for id in res.getResourceIDs():
str_id = res.resolveStringForLocale(id, default_locale)
resolved = res.resolveString(id)
strings[str_id] = resolved
if include_id:
strings[id] = resolved
return strings
def get_current_resource(ctx, dir_url, file_name):
""" Get resource for current locale. """
return load_resource_as_dict(ctx, dir_url, file_name, get_current_locale(ctx))
from com.sun.star.awt import Rectangle
def show_message(ctx, frame, message, title="", type="messbox", buttons=1, labels=None):
""" Show text in message box. """
try:
peer = frame.getContainerWindow()
except:
peer = frame
older_imple = check_method_parameter(
ctx, "com.sun.star.awt.XMessageBoxFactory",
"createMessageBox", 1, "com.sun.star.awt.Rectangle")
if older_imple:
box = peer.getToolkit().createMessageBox(
peer, Rectangle(), type, buttons, title, message)
else:
if type == "messbox":
name = "MESSAGEBOX"
elif type == "infobox":
name = "INFOBOX"
elif type == "warningbox":
name = "WARNINGBOX"
elif type == "errorbox":
name = "ERRORBOX"
elif type == "querybox":
name = "QUERYBOX"
type = uno.getConstantByName("com.sun.star.awt.MessageBoxType." + name)
box = peer.getToolkit().createMessageBox(
peer, type, buttons, title, message)
ws = box.getWindows()
if labels and len(ws) == len(labels):
for label, w in zip(labels, ws):
w.Label = label
n = box.execute()
box.dispose()
return n
def create_script(ctx, uri):
""" Create script object. """
return ctx.getValueByName(
"/singletons/com.sun.star.script.provider.theMasterScriptProviderFactory").\
createScriptProvider("").getScript(uri)
def get_module_name(ctx, obj):
""" Get document module name. """
try:
return create_service(ctx, "com.sun.star.frame.ModuleManger").identify(obj)
except:
pass
return ""
# VclResourceLoader is gone on LibreOffice 3.5.
#def get_resource(ctx, module, method, id):
# """ Load something from resource file. """
# # helper basic code required, because of VclResourceLoader has problem with PyUNO
# RES_LOADER_URI = "vnd.sun.star.script:mytools_bookmarks.Res.LoadResource?language=Basic&location=application"
# script = create_script(ctx, RES_LOADER_URI)
# resources, dummy, dummy = script.invoke((module, method, id), (), ())
# return resources
def get_popup_names(ctx):
""" Get list of popup menu controller names. """
config = get_config(ctx,
"/org.openoffice.Office.UI.Controller/Registered/PopupMenu")
popup_menus = {}
for name in config.getElementNames():
item = config.getByName(name)
popup_menus[item.Command] = item.Controller
return popup_menus
def create_graphic(ctx, url):
""" Create graphic instance for image URL. """
return create_service(
ctx, "com.sun.star.graphic.GraphicProvider").\
queryGraphic((PropertyValue("URL", -1, url, 0),))
def join_url(dir_url, *names):
""" Append names with directory URL. """
if dir_url.endswith("/"):
return dir_url + "/".join(names)
else:
return dir_url + "/" + "/".join(names)
def dir_url(file_url):
""" Get directory URL. """
n = file_url.rfind("/")
if n > -1:
return file_url[0:n]
return file_url
def copy_file(ctx, source, dest, overwrite=False):
""" Copy files to destination. """
try:
sfa = create_service(ctx, "com.sun.star.ucb.SimpleFileAccess")
if sfa.exists(dest):
if not overwrite:
return
if sfa.exists(source):
sfa.copy(source, dest)
except Exception as e:
if not sfa.exists(dir_url(dest)):
sfa.createFolder(dir_url(dest))
if sfa.exists(source):
sfa.copy(source, dest)
def get_text_content(ctx, file_url, encoding="utf-8"):
sfa = create_service(ctx, "com.sun.star.ucb.SimpleFileAccess")
if sfa.exists(file_url):
textio = create_service(ctx, "com.sun.star.io.TextInputStream")
try:
io = sfa.openFileRead(file_url)
textio.setInputStream(io)
textio.setEncoding(encoding)
lines = []
while not textio.isEOF():
lines.append(textio.readLine())
io.closeInput()
return "\n".join(lines)
except:
pass
return None
def check_interface(ctx, interface_name, method_names):
""" Check the interface is implemented or methods are implemented. """
cr = create_service(ctx, "com.sun.star.reflection.CoreReflection")
try:
idl = cr.forName(interface_name)
for name in method_names:
r = idl.getMethod(name)
if r is None:
return False
except:
return False
return True
def check_method_parameter(ctx, interface_name, method_name, param_index, param_type):
""" Check the method has specific type parameter at the specific position. """
cr = create_service(ctx, "com.sun.star.reflection.CoreReflection")
try:
idl = cr.forName(interface_name)
m = idl.getMethod(method_name)
if m:
info = m.getParameterInfos()[param_index]
return info.aType.getName() == param_type
except:
pass
return False
def get_extension_package(ctx, ext_id):
""" Get extension package for extension id. """
repositories = ("user", "shared", "bundle")
manager_name = "/singletons/com.sun.star.deployment.ExtensionManager"
manager = None
if ctx.hasByName(manager_name):
# 3.3 is required
manager = ctx.getByName(manager_name)
else:
return None
package = None
for repository in repositories:
package = manager.getDeployedExtension(repository, ext_id, "", None)
if package:
break
return package
def get_package_info(ctx, ext_id):
""" Returns package name and version. """
package = get_extension_package(ctx, ext_id)
if package:
return package.getDisplayName(), package.getVersion()
return "", ""
class FileFilterManager(object):
""" Generate list of filters and fills file picker with it. """
FILTER_QUERY = "getSortedFilterList():module=:iflags=1:eflags=266248"
TYPES = "/org.openoffice.TypeDetection.Types/Types"
CLASSIFICATION = "/org.openoffice.Office.UI/FilterClassification"
LOCAL_CATEGORIES = "/org.openoffice.Office.UI/FilterClassification/LocalFilters/Classes"
FORMULA_NAME1 = "com.sun.star.formula.FormularProperties"
FORMULA_NAME2 = "com.sun.star.formula.FormulaProperties"
def __init__(self, ctx, all_files="All files (*.*)"):
self.ctx = ctx
self.all_files = all_files
self.filter_groups = None
def set_filters(self, fp):
""" Fill list of file type of file picker dialog. """
if not self.filter_groups:
self._init()
sp = StringPair
fp.appendFilterGroup("all", (sp(self.all_files, "*.*"),))
for group in self.filter_groups:
fp.appendFilterGroup(group[0], tuple([sp(uiname, filter)
for uiname, name, filter in group[1]]))
def get_internal_name(self, uiname):
""" Get internal name of the filter from its UI name. """
if not self.filter_groups:
self._init()
if uiname == self.all_files:
return ""
for group in self.filter_groups:
for f in group[1]:
if f[0] == uiname:
return f[1]
return None
def get_ui_name(self, name):
""" Get UI name from its internal name. """
if not self.filter_groups:
self._init()
if name == "":
return self.all_files
for group in self.filter_groups:
for f in group[1]:
if f[1] == name:
return f[0]
return None
def _init(self):
if not self.filter_groups:
self._init_filters()
def _init_filters(self):
def get_values(item):
name = ""
uiname = ""
type = ""
service = ""
for i in item:
if i.Name == "Name":
name = i.Value
elif i.Name == "UIName":
uiname = i.Value
elif i.Name == "Type":
type = i.Value
elif i.Name == "DocumentService":
service = i.Value
return name, uiname, type, service
ctx = self.ctx
ff = ctx.getServiceManager().createInstanceWithContext(
"com.sun.star.document.FilterFactory", ctx)
filters_enume = ff.createSubSetEnumerationByQuery(self.FILTER_QUERY)
types = get_config(ctx, self.TYPES)
ordered_filter_names = []
filters = {}
while filters_enume.hasMoreElements():
f = filters_enume.nextElement()
name, uiname, type, service = get_values(f)
try:
ext = ";".join(["*." + ext
for ext in types.getByName(type).Extensions])
except:
ext = ()
filters[name] = (uiname, ext, service)
ordered_filter_names.append(name)
classification = get_config(ctx, self.CLASSIFICATION)
# order to show filters in the listbox
module_order = list(classification.getHierarchicalPropertyValue("GlobalFilters/Order"))
try:
module_order[module_order.index(self.FORMULA_NAME1)] = self.FORMULA_NAME2
except:
pass
modules = {}
for name in module_order:
modules[name] = []
modules["other"] = []
for name in ordered_filter_names:
try:
v = filters[name]
except:
continue
try:
mod = modules[v[2]]
except:
mod = modules["other"]
uiname = v[0]
file_filter = v[1]
if file_filter:
uiname = v[0] + (" (%s)" % file_filter)
mod.append((uiname, name, file_filter))
# categories
classify = classification.getHierarchicalPropertyValue("GlobalFilters/Classes")
group_names = {}
for name in classify.getElementNames():
if name == self.FORMULA_NAME1:
_name = self.FORMULA_NAME2
else:
_name = name
group_names[_name] = classify.getByName(name).DisplayName
filter_groups = [(group_names[name], modules[name])
for name in module_order]
if modules["other"]:
filter_groups.append(("other", modules["other"]))
self.filter_groups = filter_groups
| apache-2.0 | 2,302,242,109,002,232,800 | 32.261364 | 114 | 0.59672 | false |
fritz0705/lglass | lglass/object.py | 1 | 14874 | class Object(object):
def __init__(self, data=None):
self._data = []
if data is not None:
self.extend(data)
@property
def data(self):
"""List of key-value-tuples."""
return self._data
@property
def object_class(self):
"""Object class of this object."""
return self.data[0][0]
@object_class.setter
def object_class(self, new_class):
"""Set object class to new value."""
self.data[0] = (new_class, self.object_key)
@property
def object_key(self):
"""Object key of this object."""
return self.data[0][1]
@object_key.setter
def object_key(self, new_key):
"""Set object key to new value."""
self.data[0] = (self.object_class, new_key)
@property
def type(self):
"""Alias of `object_class`."""
return self.object_class
@property
def key(self):
"""Alias of `object_key`."""
return self.object_key
@property
def primary_key(self):
"""Primary key of this object. This is the concatenation of all
primary key field values."""
return "".join(self[k] for k in self.primary_key_fields)
@property
def primary_key_fields(self):
"""List of primary key fields."""
return [self.object_class]
def primary_key_object(self):
"""Return object which consists only of the primary key fields."""
return self.__class__(
[(k, v) for k, v in self.data if k in self.primary_key_fields])
def extend(self, ex, append_group=False):
"""Extend object with another object or list."""
if isinstance(ex, str):
ex = parse_object(ex.splitlines())
self._data.extend(map(tuple, ex))
def __getitem__(self, key):
if isinstance(key, str):
key = key.replace("_", "-")
try:
return list(self.get(key))[0]
except IndexError:
raise KeyError(repr(key))
elif isinstance(key, (int, slice)):
return self.data[key]
raise TypeError(
"Expected key to be str or int, got {}".format(
type(key)))
def __setitem__(self, key, value):
if isinstance(value, (list, slice, set)):
for val in value:
self.append(key, val)
return
if isinstance(key, (int, slice)):
self.data[key] = value
elif isinstance(key, str):
key = key.replace("_", "-")
if key not in self:
self.append(key, value)
else:
index = self.indices(key)[0]
self.remove(key)
self.insert(index, key, value)
def __delitem__(self, key):
if isinstance(key, (int, slice)):
key = key.replace("_", "-")
del self.data[key]
else:
self.remove(key)
def __contains__(self, key):
""" Checks whether a given key is contained in the object instance. """
return key in set(self.keys())
def __len__(self):
return len(self.data)
def get(self, key):
"""Return a list of values for a given key."""
return [v for k, v in self._data if k == key]
def getitems(self, key):
"""Returns a list of key-value-tuples for a given key."""
return [kv for kv in self._data if kv[0] == key]
def getfirst(self, key, default=None):
"""Returns the first occurence of a field with matching key. Supports
the `default` keyword."""
try:
return self.get(key)[0]
except IndexError:
return default
def add(self, key, value, index=None):
"""Append or insert a new field."""
value = str(value)
if index is not None:
self._data.insert(index, (key, value))
else:
self._data.append((key, value))
def append(self, key, value):
return self.add(key, value)
def append_group(self, key, value):
"""Appends a field to the last group of fields of the same key."""
try:
idx = self.indices(key)[-1] + 1
return self.insert(idx, key, value)
except IndexError:
return self.append(key, value)
def insert(self, index, key, value):
return self.add(key, value, index)
def indices(self, key):
"""Returns a list of indices of fields with a given key."""
return [i for i, (k, v) in enumerate(self.data) if k == key]
def remove(self, key):
"""Remove all occurences of a key or remove a field with a given
index."""
if isinstance(key, int):
del self._data[key]
return
self._data = [kvpair for kvpair in self._data if kvpair[0] != key]
def items(self):
"""Returns an iterator of key-value-tuples."""
return iter(self.data)
def keys(self):
"""Returns an iterator of field keys."""
return (key for key, _ in self.items())
def values(self):
"""Returns an iterator of field values."""
return (value for _, value in self.items())
def pretty_print(self, min_padding=0, add_padding=8):
"""Generates a pretty-printed version of the object serialization."""
padding = max(max((len(k) for k in self.keys()),
default=0), min_padding) + add_padding
for key, value in self:
value_lines = value.splitlines() or [""]
record = "{key}:{pad}{value}\n".format(
key=key,
pad=" " * (padding - len(key)),
value=value_lines[0])
for line in value_lines[1:]:
if not line:
record += "+\n"
continue
record += "{pad}{value}\n".format(
pad=" " * (padding + 1),
value=line)
yield record
def __str__(self):
return "".join(self.pretty_print())
def __repr__(self):
return "<{module_name}.{class_name} {object_class}: {object_key}>".format(
module_name=type(self).__module__,
class_name=type(self).__name__,
object_class=self.object_class,
object_key=self.object_key)
def __eq__(self, other):
if not isinstance(other, Object):
return NotImplemented
return self.data == other.data
def __ne__(self, other):
return not self == other
def __bool__(self):
return bool(self.data)
def copy(self):
"""Creates new object with same content."""
return self.__class__(self.data)
def to_json(self):
return list(map(list, self.data))
@classmethod
def from_file(cls, fh):
"""Creates an object from a file stream."""
return cls(fh.read())
@classmethod
def from_str(cls, string):
"""Creates an object from a string representation."""
return cls(string)
def parse_objects(lines, pragmas={}):
lines_iter = iter(lines)
obj = []
for line in lines_iter:
if not line.strip() and obj:
obj = parse_object(obj, pragmas=pragmas)
if obj:
yield obj
obj = []
else:
obj.append(line)
if obj:
obj = parse_object(obj, pragmas=pragmas)
if obj:
yield obj
# TODO rewrite object parser
def parse_object(lines, pragmas={}):
r'''This is a simple RPSL parser which expects an iterable which yields lines.
This parser processes the object format, not the policy format. The object
format used by this parser is similar to the format described by the RFC:
Each line consists of key and value, which are separated by a colon ':'.
The ':' can be surrounded by whitespace characters including line breaks,
because this parser doesn't split the input into lines; it's newline unaware.
The format also supports line continuations by beginning a new line of input
with a whitespace character. This whitespace character is stripped, but the
parser will produce a '\n' in the resulting value. Line continuations are
only possible for the value part, which means, that the key and ':' must be
on the same line of input.
We also support an extended format using pragmas, which can define the
processing rules like line-break type, and whitespace preservation. Pragmas
are on their own line, which must begin with "%!", followed by any
amount of whitespace, "pragma", at least one whitespace, followed by the
pragma-specific part.
The following pragmas are supported:
``%! pragma whitespace-preserve [on|off]``
Preserve any whitespace of input in keys and values and don't strip
whitespace.
``%! pragma newline-type [cr|lf|crlf|none]``
Define type of newline by choosing between cr "Mac OS 9", lf "Unix",
crlf "Windows" and none.
``%! pragma rfc``
Reset all pragmas to the RFC-conform values.
``%! pragma stop-at-empty-line [on|off]``
Enforces the parser to stop at an empty line
``%! pragma condense-whitespace [on|off]``
Replace any sequence of whitespace characters with simple space (' ')
``%! pragma strict-ripe [on|off]``
Do completely RIPE database compilant parsing, e.g. don't allow any
space between key and the colon.
``%! pragma hash-comment [on|off]``
Recognize hash '#' as beginning of comment
'''
result = []
default_pragmas = {
"whitespace-preserve": False,
"newline-type": "lf",
"stop-at-empty-line": False,
"condense-whitespace": False,
"strict-ripe": False,
"hash-comment": False
}
_pragmas = dict(default_pragmas)
_pragmas.update(pragmas)
pragmas = _pragmas
for line in lines:
if line.startswith("%!"):
# this line defines a parser instruction, which should be a pragma
values = line[2:].strip().split()
if len(values) <= 1:
raise ValueError(
"Syntax error: Expected pragma type after 'pragma'")
if values[0] != "pragma":
raise ValueError(
"Syntax error: Only pragmas are allowed as parser instructions")
if values[1] == "rfc":
pragmas.update(default_pragmas)
elif values[1] in {"whitespace-preserve", "stop-at-empty-line",
"condense-whitespace", "strict-ripe", "hash-comment"}:
try:
if values[2] not in {"on", "off"}:
raise ValueError(
"Syntax error: Expected 'on' or 'off' as value for '{}' pragma".format(
values[1]))
pragmas[values[1]] = True if values[2] == "on" else False
except IndexError:
raise ValueError(
"Syntax error: Expected value after '{}'".format(
values[1]))
elif values[1] == "newline-type":
try:
if values[2] not in ["cr", "lf", "crlf", "none"]:
raise ValueError(
"Syntax error: Expected 'cr', 'lf', 'crlf' or 'none' as value for 'newline-type' pragma")
pragmas["newline-type"] = values[2]
except IndexError:
raise ValueError(
"Syntax error: Expected value after 'newline-type'")
else:
raise ValueError(
"Syntax error: Unknown pragma: {}".format(values))
continue
# continue if line is empty
if not line.strip():
if pragmas["stop-at-empty-line"]:
break
continue
# remove any comments (text after % and #)
line = line.split("%")[0]
if pragmas["hash-comment"]:
line = line.split("#")[0]
if not line.strip():
continue
# check for line continuations
if line[0] in [' ', '\t', "+"]:
line = line[1:]
if not pragmas["whitespace-preserve"]:
line = line.strip()
entry = result.pop()
value = ({
"cr": "\r",
"lf": "\n",
"crlf": "\r\n",
"none": ""
}[pragmas["newline-type"]]).join([entry[1], line])
result.append((entry[0], value))
continue
try:
key, value = line.split(":", 1)
except ValueError:
raise ValueError("Syntax error: Missing value")
if pragmas["strict-ripe"]:
import re
if not re.match("^[a-zA-Z0-9-]+$", key):
raise ValueError(
"Syntax error: Key doesn't match RIPE database requirements")
if not pragmas["whitespace-preserve"]:
key = key.strip()
value = value.strip()
if pragmas["condense-whitespace"]:
import re
value = re.sub(r"[\s]+", " ", value, flags=re.M | re.S)
result.append((key, value))
return result
def main():
import argparse
import sys
argparser = argparse.ArgumentParser(description="Pretty-print objects")
argparser.add_argument(
"--min-padding",
help="Minimal padding between key and value",
type=int,
default=0)
argparser.add_argument(
"--add-padding",
help="Additional padding between key and value",
type=int,
default=8)
argparser.add_argument("--whois-format", action="store_true")
argparser.add_argument("--tee", "-T", action="store_true")
argparser.add_argument("--inplace", "-i", action="store_true")
argparser.add_argument("files", nargs='*', help="Input files")
args = argparser.parse_args()
options = dict(
min_padding=args.min_padding,
add_padding=args.add_padding)
if args.whois_format:
options["min_padding"] = 16
options["add_padding"] = 0
if not args.files:
obj = Object.from_file(sys.stdin)
print("".join(obj.pretty_print(**options)))
return
for f in args.files:
with open(f, "r") as fh:
obj = Object.from_file(fh)
if args.inplace:
with open(f, "w") as fh:
fh.write("".join(obj.pretty_print(**options)))
if args.tee or not args.inplace:
print("".join(obj.pretty_print(**options)))
if __name__ == "__main__":
main()
| mit | 5,814,712,941,590,227,000 | 32.804545 | 117 | 0.538994 | false |
timbuchwaldt/bundlewrap | bundlewrap/items/users.py | 1 | 11714 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from logging import ERROR, getLogger
from pipes import quote
from string import ascii_lowercase, digits
from passlib.hash import bcrypt, md5_crypt, sha256_crypt, sha512_crypt
from bundlewrap.exceptions import BundleError
from bundlewrap.items import BUILTIN_ITEM_ATTRIBUTES, Item
from bundlewrap.utils.text import force_text, mark_for_translation as _
getLogger('passlib').setLevel(ERROR)
_ATTRIBUTE_NAMES = {
'full_name': _("full name"),
'gid': _("GID"),
'groups': _("groups"),
'home': _("home dir"),
'password_hash': _("password hash"),
'shell': _("shell"),
'uid': _("UID"),
}
_ATTRIBUTE_OPTIONS = {
'full_name': "-c",
'gid': "-g",
'groups': "-G",
'home': "-d",
'password_hash': "-p",
'shell': "-s",
'uid': "-u",
}
# a random static salt if users don't provide one
_DEFAULT_SALT = "uJzJlYdG"
# bcrypt needs special salts. 22 characters long, ending in ".", "O", "e", "u"
# see https://bitbucket.org/ecollins/passlib/issues/25
_DEFAULT_BCRYPT_SALT = "oo2ahgheen9Tei0IeJohTO"
HASH_METHODS = {
'md5': md5_crypt,
'sha256': sha256_crypt,
'sha512': sha512_crypt,
'bcrypt': bcrypt
}
_USERNAME_VALID_CHARACTERS = ascii_lowercase + digits + "-_"
def _group_name_for_gid(node, gid):
"""
Returns the group name that matches the gid.
"""
group_output = node.run("grep -e ':{}:[^:]*$' /etc/group".format(gid), may_fail=True)
if group_output.return_code != 0:
return None
else:
return group_output.stdout_text.split(":")[0]
def _groups_for_user(node, username):
"""
Returns the list of group names for the given username on the given
node.
"""
groups = node.run("id -Gn {}".format(username)).stdout_text.strip().split(" ")
primary_group = node.run("id -gn {}".format(username)).stdout_text.strip()
groups.remove(primary_group)
return groups
def _parse_passwd_line(line, entries):
"""
Parses a line from /etc/passwd and returns the information as a
dictionary.
"""
result = dict(zip(
entries,
line.strip().split(":"),
))
result['full_name'] = result['gecos'].split(",")[0]
return result
class User(Item):
"""
A user account.
"""
BUNDLE_ATTRIBUTE_NAME = "users"
ITEM_ATTRIBUTES = {
'delete': False,
'full_name': None,
'gid': None,
'groups': None,
'hash_method': 'sha512',
'home': None,
'password': None,
'password_hash': None,
'salt': None,
'shell': None,
'uid': None,
'use_shadow': None,
}
ITEM_TYPE_NAME = "user"
@classmethod
def block_concurrent(cls, node_os, node_os_version):
# https://github.com/bundlewrap/bundlewrap/issues/367
if node_os == 'openbsd':
return [cls.ITEM_TYPE_NAME]
else:
return []
def __repr__(self):
return "<User name:{}>".format(self.name)
def cdict(self):
if self.attributes['delete']:
return None
cdict = self.attributes.copy()
del cdict['delete']
del cdict['hash_method']
del cdict['password']
del cdict['salt']
del cdict['use_shadow']
for key in list(cdict.keys()):
if cdict[key] is None:
del cdict[key]
if 'groups' in cdict:
cdict['groups'] = set(cdict['groups'])
return cdict
def fix(self, status):
if status.must_be_deleted:
self.node.run("userdel {}".format(self.name), may_fail=True)
else:
command = "useradd " if status.must_be_created else "usermod "
for attr, option in sorted(_ATTRIBUTE_OPTIONS.items()):
if (attr in status.keys_to_fix or status.must_be_created) and \
self.attributes[attr] is not None:
if attr == 'groups':
value = ",".join(self.attributes[attr])
else:
value = str(self.attributes[attr])
command += "{} {} ".format(option, quote(value))
command += self.name
self.node.run(command, may_fail=True)
def display_dicts(self, cdict, sdict, keys):
for attr_name, attr_display_name in _ATTRIBUTE_NAMES.items():
if attr_name == attr_display_name:
# Don't change anything; the `del`s below would
# always remove the key entirely!
continue
try:
keys.remove(attr_name)
except ValueError:
pass
else:
keys.append(attr_display_name)
cdict[attr_display_name] = cdict[attr_name]
sdict[attr_display_name] = sdict[attr_name]
del cdict[attr_name]
del sdict[attr_name]
return (cdict, sdict, keys)
def get_auto_deps(self, items):
deps = []
groups = self.attributes['groups'] or []
for item in items:
if item.ITEM_TYPE_NAME == "group":
if not (item.name in groups or (
self.attributes['gid'] in [item.attributes['gid'], item.name] and
self.attributes['gid'] is not None
)):
# we don't need to depend on this group
continue
elif item.attributes['delete']:
raise BundleError(_(
"{item1} (from bundle '{bundle1}') depends on item "
"{item2} (from bundle '{bundle2}') which is set to be deleted"
).format(
item1=self.id,
bundle1=self.bundle.name,
item2=item.id,
bundle2=item.bundle.name,
))
else:
deps.append(item.id)
return deps
def sdict(self):
# verify content of /etc/passwd
if self.node.os in self.node.OS_FAMILY_BSD:
password_command = "grep -ae '^{}:' /etc/master.passwd"
else:
password_command = "grep -ae '^{}:' /etc/passwd"
passwd_grep_result = self.node.run(
password_command.format(self.name),
may_fail=True,
)
if passwd_grep_result.return_code != 0:
return None
if self.node.os in self.node.OS_FAMILY_BSD:
entries = (
'username',
'passwd_hash',
'uid',
'gid',
'class',
'change',
'expire',
'gecos',
'home',
'shell',
)
else:
entries = ('username', 'passwd_hash', 'uid', 'gid', 'gecos', 'home', 'shell')
sdict = _parse_passwd_line(passwd_grep_result.stdout_text, entries)
if self.attributes['gid'] is not None and not self.attributes['gid'].isdigit():
sdict['gid'] = _group_name_for_gid(self.node, sdict['gid'])
if self.attributes['password_hash'] is not None:
if self.attributes['use_shadow'] and self.node.os not in self.node.OS_FAMILY_BSD:
# verify content of /etc/shadow unless we are on OpenBSD
shadow_grep_result = self.node.run(
"grep -e '^{}:' /etc/shadow".format(self.name),
may_fail=True,
)
if shadow_grep_result.return_code != 0:
sdict['password_hash'] = None
else:
sdict['password_hash'] = shadow_grep_result.stdout_text.split(":")[1]
else:
sdict['password_hash'] = sdict['passwd_hash']
del sdict['passwd_hash']
# verify content of /etc/group
sdict['groups'] = set(_groups_for_user(self.node, self.name))
return sdict
def patch_attributes(self, attributes):
if attributes.get('password', None) is not None:
# defaults aren't set yet
hash_method = HASH_METHODS[attributes.get(
'hash_method',
self.ITEM_ATTRIBUTES['hash_method'],
)]
salt = attributes.get('salt', None)
if self.node.os in self.node.OS_FAMILY_BSD:
attributes['password_hash'] = bcrypt.encrypt(
force_text(attributes['password']),
rounds=8, # default rounds for OpenBSD accounts
salt=_DEFAULT_BCRYPT_SALT if salt is None else salt,
)
elif attributes.get('hash_method') == 'md5':
attributes['password_hash'] = hash_method.encrypt(
force_text(attributes['password']),
salt=_DEFAULT_SALT if salt is None else salt,
)
else:
attributes['password_hash'] = hash_method.encrypt(
force_text(attributes['password']),
rounds=5000, # default from glibc
salt=_DEFAULT_SALT if salt is None else salt,
)
if 'use_shadow' not in attributes:
attributes['use_shadow'] = self.node.use_shadow_passwords
for attr in ('gid', 'uid'):
if isinstance(attributes.get(attr), int):
attributes[attr] = str(attributes[attr])
return attributes
@classmethod
def validate_attributes(cls, bundle, item_id, attributes):
if attributes.get('delete', False):
for attr in attributes.keys():
if attr not in ['delete'] + list(BUILTIN_ITEM_ATTRIBUTES.keys()):
raise BundleError(_(
"{item} from bundle '{bundle}' cannot have other "
"attributes besides 'delete'"
).format(item=item_id, bundle=bundle.name))
if 'hash_method' in attributes and \
attributes['hash_method'] not in HASH_METHODS:
raise BundleError(
_("Invalid hash method for {item} in bundle '{bundle}': '{method}'").format(
bundle=bundle.name,
item=item_id,
method=attributes['hash_method'],
)
)
if 'password_hash' in attributes and (
'password' in attributes or
'salt' in attributes
):
raise BundleError(_(
"{item} in bundle '{bundle}': 'password_hash' "
"cannot be used with 'password' or 'salt'"
).format(bundle=bundle.name, item=item_id))
if 'salt' in attributes and 'password' not in attributes:
raise BundleError(
_("{}: salt given without a password").format(item_id)
)
@classmethod
def validate_name(cls, bundle, name):
for char in name:
if char not in _USERNAME_VALID_CHARACTERS:
raise BundleError(_(
"Invalid character in username '{user}': {char} (bundle '{bundle}')"
).format(bundle=bundle.name, char=char, user=name))
if name.endswith("_") or name.endswith("-"):
raise BundleError(_(
"Username '{user}' must not end in dash or underscore (bundle '{bundle}')"
).format(bundle=bundle.name, user=name))
if len(name) > 30:
raise BundleError(_(
"Username '{user}' is longer than 30 characters (bundle '{bundle}')"
).format(bundle=bundle.name, user=name))
| gpl-3.0 | -5,239,586,966,625,551,000 | 33.863095 | 93 | 0.522452 | false |
windelbouwman/ppci-mirror | test/test_hexutil.py | 1 | 2515 | import unittest
import tempfile
import io
import os
from unittest.mock import patch
from helper_util import relpath, do_long_tests
from ppci.cli.hexutil import hexutil
def new_temp_file(suffix):
""" Generate a new temporary filename """
handle, filename = tempfile.mkstemp(suffix=suffix)
os.close(handle)
return filename
@unittest.skipUnless(do_long_tests('any'), 'skipping slow tests')
class HexutilTestCase(unittest.TestCase):
@patch('sys.stdout', new_callable=io.StringIO)
def test_hexutil_help(self, mock_stdout):
""" Check hexutil help message """
with self.assertRaises(SystemExit) as cm:
hexutil(['-h'])
self.assertEqual(0, cm.exception.code)
self.assertIn('info,new,merge', mock_stdout.getvalue())
@patch('sys.stderr', new_callable=io.StringIO)
def test_hexutil_address_format(self, mock_stderr):
file1 = new_temp_file('.hex')
datafile = relpath('..', 'examples', 'build.xml')
with self.assertRaises(SystemExit) as cm:
hexutil(['new', file1, '10000000', datafile])
self.assertEqual(2, cm.exception.code)
self.assertIn('argument address', mock_stderr.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def test_hexutil_no_command(self, mock_stdout):
""" No command given """
with self.assertRaises(SystemExit) as cm:
hexutil([])
self.assertNotEqual(0, cm.exception.code)
@patch('sys.stdout', new_callable=io.StringIO)
def test_hexutil_merge(self, mock_stdout):
""" Create three hexfiles and manipulate those """
file1 = new_temp_file('file1.hex')
file2 = new_temp_file('file2.hex')
file3 = new_temp_file('file3.hex')
datafile = relpath('..', 'docs', 'logo', 'logo.png')
hexutil(['new', file1, '0x10000000', datafile])
hexutil(['new', file2, '0x20000000', datafile])
hexutil(['merge', file1, file2, file3])
hexutil(['info', file3])
self.assertIn("Hexfile containing 2832 bytes", mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def test_hexutil_info(self, mock_stdout):
file1 = new_temp_file('file1.hex')
datafile = relpath('..', 'docs', 'logo', 'logo.png')
hexutil(['new', file1, '0x10000000', datafile])
hexutil(['info', file1])
self.assertIn("Hexfile containing 1416 bytes", mock_stdout.getvalue())
if __name__ == '__main__':
unittest.main(verbosity=2)
| bsd-2-clause | 3,596,360,861,681,178,600 | 36.537313 | 78 | 0.638171 | false |
bertothunder/coursera-algorithms | quickfind.py | 1 | 1155 | #!/usr/bin/env python3
class QuickFindUF(object):
def __init__(self, N):
self.__id__ = [i for i in range(N)]
self.__count__ = N
def union(self, p, q):
# Check indices
if (p > self.__count__ or q > self.__count__):
print('Indices do not exist')
elif (self.__id__[q] != self.__id__[p]): # Not connected yet
pidp = self.__id__[p]
pidq = self.__id__[q]
self.__id__[p] = pidq
for n in range(self.__count__):
if self.__id__[n] == pidp: # or self.__id__[n] == pidq:
print("Writing {} to {}".format(q, n))
self.__id__[n] = pidq
print("New values: {}".format(self.__id__))
else:
print("Something went wrong!")
def connected(self, p, q):
if (p > self.__count__ or q > self.__count__):
print("Out of indices")
return false
return (self.__id__[p] == self.__id__[q])
if __name__ == '__main__':
uf = QuickFindUF(50)
uf.union(1,49)
uf.union(0,1)
uf.union(45,4)
uf.union(46,45)
print("0:49 => {}".format(uf.connected(0,49))) #true
print("45:46 => {}".format(uf.connected(45,46))) #true
print("1:2 => {}".format(uf.connected(1,2))) #false
print("49:48 => {}".format(uf.connected(49, 48))) #false | gpl-3.0 | 641,932,581,429,565,000 | 27.9 | 62 | 0.550649 | false |
fuziontech/pgshovel | src/main/python/pgshovel/relay/handlers/kafka.py | 1 | 2002 | from __future__ import absolute_import
import functools
import threading
import click
from pgshovel.interfaces.streams_pb2 import Message
from pgshovel.relay.entrypoint import entrypoint
from pgshovel.utilities import import_extras
from pgshovel.utilities.protobuf import BinaryCodec
with import_extras('kafka'):
from kafka.client import KafkaClient
from kafka.producer.simple import SimpleProducer
class KafkaWriter(object):
def __init__(self, producer, topic, codec):
self.producer = producer
self.topic = topic
self.codec = codec
# TODO: Might not need to be thread safe any more?
self.__lock = threading.Lock()
self.producer.client.ensure_topic_exists(topic)
def __str__(self):
return 'Kafka writer (topic: %s, codec: %s)' % (self.topic, type(self.codec).__name__)
def __repr__(self):
return '<%s: %s on %r>' % (
type(self).__name__,
self.topic,
[':'.join(map(str, h)) for h in self.producer.client.hosts]
)
def push(self, messages):
with self.__lock: # TODO: ensure this is required, better safe than sorry
self.producer.send_messages(self.topic, *map(self.codec.encode, messages))
@click.command(
help="Publishes mutation batches to the specified Kafka topic.",
)
@click.option(
'--kafka-hosts',
default='127.0.0.1:9092',
help="Kafka broker connection string (as a comma separated list of hosts.)",
)
@click.option(
'--kafka-topic',
default='{cluster}.{set}.mutations',
help="Destination Topic for mutation batch publishing.",
)
@entrypoint
def main(cluster, set, kafka_hosts, kafka_topic):
client = KafkaClient(kafka_hosts)
producer = SimpleProducer(client)
topic = kafka_topic.format(cluster=cluster.name, set=set)
return KafkaWriter(producer, topic, BinaryCodec(Message))
__main__ = functools.partial(main, auto_envvar_prefix='PGSHOVEL')
if __name__ == '__main__':
__main__()
| apache-2.0 | -3,662,419,686,862,684,000 | 28.441176 | 94 | 0.663836 | false |
imiolek-ireneusz/pysiogame | game_boards/game044.py | 1 | 8476 | # -*- coding: utf-8 -*-
import os
import pygame
import random
import classes.board
import classes.extras as ex
import classes.game_driver as gd
import classes.level_controller as lc
class Board(gd.BoardGame):
def __init__(self, mainloop, speaker, config, screen_w, screen_h):
self.level = lc.Level(self, mainloop, 1, 20)
gd.BoardGame.__init__(self, mainloop, speaker, config, screen_w, screen_h, 13, 9)
def create_game_objects(self, level=1):
self.allow_unit_animations = False
self.allow_teleport = False
self.board.decolorable = False
self.vis_buttons = [0, 1, 1, 1, 1, 1, 1, 1, 0]
self.mainloop.info.hide_buttonsa(self.vis_buttons)
self.board.draw_grid = False
outline_color = (150, 150, 150)
white = (255, 255, 255)
if self.mainloop.scheme is not None and self.mainloop.scheme.dark:
white = (0, 0, 0)
# setting level variable
# data = [x_count, y_count, number_count, top_limit, ordered]
data = [7, 6, 8, 3, 3]
self.chapters = [1, 5, 10, 15, 20]
# rescale the number of squares horizontally to better match the screen width
data[0] = self.get_x_count(data[1], even=False)
self.data = data
self.points = 9
self.layout.update_layout(data[0], data[1])
self.board.level_start(data[0], data[1], self.layout.scale)
if self.mainloop.m.game_variant == 0:
if self.mainloop.scheme is None or not self.mainloop.scheme.dark:
image_src = [os.path.join('memory', "m_img%da.png" % (i)) for i in range(1, 21)]
grey_image_src = [os.path.join('memory', "m_img%db.png" % (i)) for i in range(1, 22)]
else:
image_src = [os.path.join('schemes', "black", "match_animals", "m_img%da.png" % (i)) for i in
range(1, 21)]
grey_image_src = [os.path.join('schemes', "black", "match_animals", "m_img%db.png" % (i)) for i in
range(1, 22)]
elif self.mainloop.m.game_variant == 1:
image_src = [os.path.join('memory', "f_img%da.png" % (i)) for i in range(1, 21)]
grey_image_src = [os.path.join('memory', "m_img22b.png")]
elif self.mainloop.m.game_variant == 2:
image_src = [os.path.join('memory', "n_img%da.png" % (i)) for i in range(2, 22)]
grey_image_src = [os.path.join('memory', "m_img22b.png")]
self.bg_img_src = image_src[self.level.lvl - 1] # os.path.join('memory', "m_img13a.png")
if len(grey_image_src) > 1:
self.bg_img_grey_src = grey_image_src[self.level.lvl - 1] # os.path.join('memory', "m_img13b.png")
else:
self.bg_img_grey_src = "" # grey_image_src[0]
self.bg_img = classes.board.ImgSurf(self.board, 3, 3, white, self.bg_img_src)
self.finished = False
self.choice_list = [x for x in range(1, data[2] + 1)]
self.shuffled = self.choice_list[:]
random.shuffle(self.shuffled)
inversions = ex.inversions(self.shuffled)
if inversions % 2 != 0: # if number of inversions is odd it is unsolvable
# in unsolvable combinations swapping 2 squares will make it solvable
temp = self.shuffled[0]
self.shuffled[0] = self.shuffled[1]
self.shuffled[1] = temp
h1 = (data[1] - data[4]) // 2 # height of the top margin
h2 = data[1] - h1 - data[4] - 1 # height of the bottom margin minus 1 (game label)
w2 = (data[0] - data[3]) // 2 # side margin width
self.check = [h1, h2, w2]
self.board.add_door(w2, h1, data[3], data[4], classes.board.Door, "", white, self.bg_img_grey_src)
self.board.units[0].image.set_colorkey((1, 2, 3))
# create table to store 'binary' solution
# find position of first door square
x = w2
y = h1
self.mini_grid = []
# add objects to the board
line = []
h_start = random.randrange(0, 155, 5)
h_step = 100 // (data[2])
for i in range(data[2]):
h = (h_start + (self.shuffled[i] - 1) * h_step)
caption = str(self.shuffled[i])
self.board.add_unit(x, y, 1, 1, classes.board.ImgShip, caption, white, self.bg_img_src)
self.board.ships[-1].img = self.bg_img.img.copy()
self.board.ships[-1].readable = False
offset_x = 0
offset_y = 0
if self.shuffled[i] in [2, 5, 8]:
offset_x = self.board.scale - 0
elif self.shuffled[i] in [3, 6]:
offset_x = (self.board.scale - 0) * 2
if self.shuffled[i] in [4, 5, 6]:
offset_y = self.board.scale - 0
elif self.shuffled[i] in [7, 8]:
offset_y = (self.board.scale - 0) * 2
self.board.ships[-1].img_pos = (-offset_x, -offset_y)
line.append(i)
x += 1
if x >= w2 + data[3] or i == data[2] - 1:
x = w2
y += 1
self.mini_grid.append(line)
line = []
# mini img below game
self.board.add_unit(w2 + data[3] - 2, data[1] - 1, 1, 1, classes.board.ImgShip, "", white, self.bg_img_src)
self.preview = self.board.ships[-1]
self.preview.immobilize()
self.preview.outline = False
# draw 4 lines on the mini preview
step = self.board.scale // 3
pygame.draw.line(self.preview.img, outline_color, [step, 0], [step, step * 3], 1)
pygame.draw.line(self.preview.img, outline_color, [step * 2, 0], [step * 2, step * 3], 1)
pygame.draw.line(self.preview.img, outline_color, [0, step], [step * 3, step], 1)
pygame.draw.line(self.preview.img, outline_color, [0, step * 2], [step * 3, step * 2], 1)
self.preview.update_me = True
self.outline_all(outline_color, 1)
# horizontal
self.board.add_unit(0, 0, data[0], 1, classes.board.Obstacle, "", white, "", 7) # top
self.board.add_unit(0, h1 + data[4], data[0], 1, classes.board.Obstacle, "", white, "", 7) # bottom 1
# side obstacles
self.board.add_unit(0, h1, w2, data[4], classes.board.Obstacle, "", white, "", 7) # left
self.board.add_unit(w2 + data[3], h1, w2, data[4], classes.board.Obstacle, "", white, "", 7) # right
# self.board.all_sprites_list.move_to_front(self.board.units[0])
self.board.all_sprites_list.move_to_back(self.board.units[0])
self.board.all_sprites_list.move_to_back(self.board.board_bg)
def handle(self, event):
gd.BoardGame.handle(self, event) # send event handling up
if event.type == pygame.MOUSEBUTTONUP:
self.check_result()
def update(self, game):
game.fill((255, 255, 255))
gd.BoardGame.update(self, game) # rest of painting done by parent
def check_result(self):
if self.changed_since_check and self.finished == False:
ships = []
current = [x for x in range(self.data[2] + 1)] # self.choice_list[:]
# collect value and x position on the grid from ships list
for i in range(len(self.board.ships) - 1):
x = self.board.ships[i].grid_x - self.check[2]
y = self.board.ships[i].grid_y - self.check[0]
w = self.data[3]
h = self.data[4]
pos = x + (y * w)
current[pos] = int(self.board.ships[i].value)
del (current[-1])
if self.choice_list == current:
# self.update_score(self.points)
self.mainloop.db.update_completion(self.mainloop.userid, self.active_game.dbgameid, self.level.lvl)
self.level.update_level_dictx()
self.mainloop.redraw_needed[1] = True
self.finished = True
self.board.units[0].img = self.bg_img.img.copy()
self.board.all_sprites_list.move_to_front(self.board.units[0])
self.board.units[0].update_me = True
# copied from level controller:
index = random.randrange(0, len(self.dp["Great job!"]))
praise = self.dp["Great job!"][index]
self.say(praise, 6)
self.board.units[2].value = praise
self.board.units[2].update_me = True
| gpl-3.0 | 6,823,931,356,465,523,000 | 43.846561 | 115 | 0.549788 | false |
badp/ganeti | test/py/ganeti.rpc_unittest.py | 1 | 34586 | #!/usr/bin/python
#
# Copyright (C) 2010, 2011, 2012, 2013 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for testing ganeti.rpc"""
import os
import sys
import unittest
import random
import tempfile
from ganeti import constants
from ganeti import compat
from ganeti.rpc import node as rpc
from ganeti import rpc_defs
from ganeti import http
from ganeti import errors
from ganeti import serializer
from ganeti import objects
from ganeti import backend
import testutils
import mocks
class _FakeRequestProcessor:
def __init__(self, response_fn):
self._response_fn = response_fn
self.reqcount = 0
def __call__(self, reqs, lock_monitor_cb=None):
assert lock_monitor_cb is None or callable(lock_monitor_cb)
for req in reqs:
self.reqcount += 1
self._response_fn(req)
def GetFakeSimpleStoreClass(fn):
class FakeSimpleStore:
GetNodePrimaryIPList = fn
GetPrimaryIPFamily = lambda _: None
return FakeSimpleStore
def _RaiseNotImplemented():
"""Simple wrapper to raise NotImplementedError.
"""
raise NotImplementedError
class TestRpcProcessor(unittest.TestCase):
def _FakeAddressLookup(self, map):
return lambda node_list: [map.get(node) for node in node_list]
def _GetVersionResponse(self, req):
self.assertEqual(req.host, "127.0.0.1")
self.assertEqual(req.port, 24094)
self.assertEqual(req.path, "/version")
self.assertEqual(req.read_timeout, constants.RPC_TMO_URGENT)
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, 123))
def testVersionSuccess(self):
resolver = rpc._StaticResolver(["127.0.0.1"])
http_proc = _FakeRequestProcessor(self._GetVersionResponse)
proc = rpc._RpcProcessor(resolver, 24094)
result = proc(["localhost"], "version", {"localhost": ""}, 60,
NotImplemented, _req_process_fn=http_proc)
self.assertEqual(result.keys(), ["localhost"])
lhresp = result["localhost"]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, "localhost")
self.assertFalse(lhresp.fail_msg)
self.assertEqual(lhresp.payload, 123)
self.assertEqual(lhresp.call, "version")
lhresp.Raise("should not raise")
self.assertEqual(http_proc.reqcount, 1)
def _ReadTimeoutResponse(self, req):
self.assertEqual(req.host, "192.0.2.13")
self.assertEqual(req.port, 19176)
self.assertEqual(req.path, "/version")
self.assertEqual(req.read_timeout, 12356)
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, -1))
def testReadTimeout(self):
resolver = rpc._StaticResolver(["192.0.2.13"])
http_proc = _FakeRequestProcessor(self._ReadTimeoutResponse)
proc = rpc._RpcProcessor(resolver, 19176)
host = "node31856"
body = {host: ""}
result = proc([host], "version", body, 12356, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(result.keys(), [host])
lhresp = result[host]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, host)
self.assertFalse(lhresp.fail_msg)
self.assertEqual(lhresp.payload, -1)
self.assertEqual(lhresp.call, "version")
lhresp.Raise("should not raise")
self.assertEqual(http_proc.reqcount, 1)
def testOfflineNode(self):
resolver = rpc._StaticResolver([rpc._OFFLINE])
http_proc = _FakeRequestProcessor(NotImplemented)
proc = rpc._RpcProcessor(resolver, 30668)
host = "n17296"
body = {host: ""}
result = proc([host], "version", body, 60, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(result.keys(), [host])
lhresp = result[host]
self.assertTrue(lhresp.offline)
self.assertEqual(lhresp.node, host)
self.assertTrue(lhresp.fail_msg)
self.assertFalse(lhresp.payload)
self.assertEqual(lhresp.call, "version")
# With a message
self.assertRaises(errors.OpExecError, lhresp.Raise, "should raise")
# No message
self.assertRaises(errors.OpExecError, lhresp.Raise, None)
self.assertEqual(http_proc.reqcount, 0)
def _GetMultiVersionResponse(self, req):
self.assert_(req.host.startswith("node"))
self.assertEqual(req.port, 23245)
self.assertEqual(req.path, "/version")
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, 987))
def testMultiVersionSuccess(self):
nodes = ["node%s" % i for i in range(50)]
body = dict((n, "") for n in nodes)
resolver = rpc._StaticResolver(nodes)
http_proc = _FakeRequestProcessor(self._GetMultiVersionResponse)
proc = rpc._RpcProcessor(resolver, 23245)
result = proc(nodes, "version", body, 60, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(sorted(result.keys()), sorted(nodes))
for name in nodes:
lhresp = result[name]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, name)
self.assertFalse(lhresp.fail_msg)
self.assertEqual(lhresp.payload, 987)
self.assertEqual(lhresp.call, "version")
lhresp.Raise("should not raise")
self.assertEqual(http_proc.reqcount, len(nodes))
def _GetVersionResponseFail(self, errinfo, req):
self.assertEqual(req.path, "/version")
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((False, errinfo))
def testVersionFailure(self):
resolver = rpc._StaticResolver(["aef9ur4i.example.com"])
proc = rpc._RpcProcessor(resolver, 5903)
for errinfo in [None, "Unknown error"]:
http_proc = \
_FakeRequestProcessor(compat.partial(self._GetVersionResponseFail,
errinfo))
host = "aef9ur4i.example.com"
body = {host: ""}
result = proc(body.keys(), "version", body, 60, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(result.keys(), [host])
lhresp = result[host]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, host)
self.assert_(lhresp.fail_msg)
self.assertFalse(lhresp.payload)
self.assertEqual(lhresp.call, "version")
self.assertRaises(errors.OpExecError, lhresp.Raise, "failed")
self.assertEqual(http_proc.reqcount, 1)
def _GetHttpErrorResponse(self, httperrnodes, failnodes, req):
self.assertEqual(req.path, "/vg_list")
self.assertEqual(req.port, 15165)
if req.host in httperrnodes:
req.success = False
req.error = "Node set up for HTTP errors"
elif req.host in failnodes:
req.success = True
req.resp_status_code = 404
req.resp_body = serializer.DumpJson({
"code": 404,
"message": "Method not found",
"explain": "Explanation goes here",
})
else:
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, hash(req.host)))
def testHttpError(self):
nodes = ["uaf6pbbv%s" % i for i in range(50)]
body = dict((n, "") for n in nodes)
resolver = rpc._StaticResolver(nodes)
httperrnodes = set(nodes[1::7])
self.assertEqual(len(httperrnodes), 7)
failnodes = set(nodes[2::3]) - httperrnodes
self.assertEqual(len(failnodes), 14)
self.assertEqual(len(set(nodes) - failnodes - httperrnodes), 29)
proc = rpc._RpcProcessor(resolver, 15165)
http_proc = \
_FakeRequestProcessor(compat.partial(self._GetHttpErrorResponse,
httperrnodes, failnodes))
result = proc(nodes, "vg_list", body,
constants.RPC_TMO_URGENT, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(sorted(result.keys()), sorted(nodes))
for name in nodes:
lhresp = result[name]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, name)
self.assertEqual(lhresp.call, "vg_list")
if name in httperrnodes:
self.assert_(lhresp.fail_msg)
self.assertRaises(errors.OpExecError, lhresp.Raise, "failed")
elif name in failnodes:
self.assert_(lhresp.fail_msg)
self.assertRaises(errors.OpPrereqError, lhresp.Raise, "failed",
prereq=True, ecode=errors.ECODE_INVAL)
else:
self.assertFalse(lhresp.fail_msg)
self.assertEqual(lhresp.payload, hash(name))
lhresp.Raise("should not raise")
self.assertEqual(http_proc.reqcount, len(nodes))
def _GetInvalidResponseA(self, req):
self.assertEqual(req.path, "/version")
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson(("This", "is", "an", "invalid",
"response", "!", 1, 2, 3))
def _GetInvalidResponseB(self, req):
self.assertEqual(req.path, "/version")
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson("invalid response")
def testInvalidResponse(self):
resolver = rpc._StaticResolver(["oqo7lanhly.example.com"])
proc = rpc._RpcProcessor(resolver, 19978)
for fn in [self._GetInvalidResponseA, self._GetInvalidResponseB]:
http_proc = _FakeRequestProcessor(fn)
host = "oqo7lanhly.example.com"
body = {host: ""}
result = proc([host], "version", body, 60, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(result.keys(), [host])
lhresp = result[host]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, host)
self.assert_(lhresp.fail_msg)
self.assertFalse(lhresp.payload)
self.assertEqual(lhresp.call, "version")
self.assertRaises(errors.OpExecError, lhresp.Raise, "failed")
self.assertEqual(http_proc.reqcount, 1)
def _GetBodyTestResponse(self, test_data, req):
self.assertEqual(req.host, "192.0.2.84")
self.assertEqual(req.port, 18700)
self.assertEqual(req.path, "/upload_file")
self.assertEqual(serializer.LoadJson(req.post_data), test_data)
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, None))
def testResponseBody(self):
test_data = {
"Hello": "World",
"xyz": range(10),
}
resolver = rpc._StaticResolver(["192.0.2.84"])
http_proc = _FakeRequestProcessor(compat.partial(self._GetBodyTestResponse,
test_data))
proc = rpc._RpcProcessor(resolver, 18700)
host = "node19759"
body = {host: serializer.DumpJson(test_data)}
result = proc([host], "upload_file", body, 30, NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(result.keys(), [host])
lhresp = result[host]
self.assertFalse(lhresp.offline)
self.assertEqual(lhresp.node, host)
self.assertFalse(lhresp.fail_msg)
self.assertEqual(lhresp.payload, None)
self.assertEqual(lhresp.call, "upload_file")
lhresp.Raise("should not raise")
self.assertEqual(http_proc.reqcount, 1)
class TestSsconfResolver(unittest.TestCase):
def testSsconfLookup(self):
addr_list = ["192.0.2.%d" % n for n in range(0, 255, 13)]
node_list = ["node%d.example.com" % n for n in range(0, 255, 13)]
node_addr_list = [" ".join(t) for t in zip(node_list, addr_list)]
ssc = GetFakeSimpleStoreClass(lambda _: node_addr_list)
result = rpc._SsconfResolver(True, node_list, NotImplemented,
ssc=ssc, nslookup_fn=NotImplemented)
self.assertEqual(result, zip(node_list, addr_list, node_list))
def testNsLookup(self):
addr_list = ["192.0.2.%d" % n for n in range(0, 255, 13)]
node_list = ["node%d.example.com" % n for n in range(0, 255, 13)]
ssc = GetFakeSimpleStoreClass(lambda _: [])
node_addr_map = dict(zip(node_list, addr_list))
nslookup_fn = lambda name, family=None: node_addr_map.get(name)
result = rpc._SsconfResolver(True, node_list, NotImplemented,
ssc=ssc, nslookup_fn=nslookup_fn)
self.assertEqual(result, zip(node_list, addr_list, node_list))
def testDisabledSsconfIp(self):
addr_list = ["192.0.2.%d" % n for n in range(0, 255, 13)]
node_list = ["node%d.example.com" % n for n in range(0, 255, 13)]
ssc = GetFakeSimpleStoreClass(_RaiseNotImplemented)
node_addr_map = dict(zip(node_list, addr_list))
nslookup_fn = lambda name, family=None: node_addr_map.get(name)
result = rpc._SsconfResolver(False, node_list, NotImplemented,
ssc=ssc, nslookup_fn=nslookup_fn)
self.assertEqual(result, zip(node_list, addr_list, node_list))
def testBothLookups(self):
addr_list = ["192.0.2.%d" % n for n in range(0, 255, 13)]
node_list = ["node%d.example.com" % n for n in range(0, 255, 13)]
n = len(addr_list) / 2
node_addr_list = [" ".join(t) for t in zip(node_list[n:], addr_list[n:])]
ssc = GetFakeSimpleStoreClass(lambda _: node_addr_list)
node_addr_map = dict(zip(node_list[:n], addr_list[:n]))
nslookup_fn = lambda name, family=None: node_addr_map.get(name)
result = rpc._SsconfResolver(True, node_list, NotImplemented,
ssc=ssc, nslookup_fn=nslookup_fn)
self.assertEqual(result, zip(node_list, addr_list, node_list))
def testAddressLookupIPv6(self):
addr_list = ["2001:db8::%d" % n for n in range(0, 255, 11)]
node_list = ["node%d.example.com" % n for n in range(0, 255, 11)]
node_addr_list = [" ".join(t) for t in zip(node_list, addr_list)]
ssc = GetFakeSimpleStoreClass(lambda _: node_addr_list)
result = rpc._SsconfResolver(True, node_list, NotImplemented,
ssc=ssc, nslookup_fn=NotImplemented)
self.assertEqual(result, zip(node_list, addr_list, node_list))
class TestStaticResolver(unittest.TestCase):
def test(self):
addresses = ["192.0.2.%d" % n for n in range(0, 123, 7)]
nodes = ["node%s.example.com" % n for n in range(0, 123, 7)]
res = rpc._StaticResolver(addresses)
self.assertEqual(res(nodes, NotImplemented), zip(nodes, addresses, nodes))
def testWrongLength(self):
res = rpc._StaticResolver([])
self.assertRaises(AssertionError, res, ["abc"], NotImplemented)
class TestNodeConfigResolver(unittest.TestCase):
@staticmethod
def _GetSingleOnlineNode(uuid):
assert uuid == "node90-uuid"
return objects.Node(name="node90.example.com",
uuid=uuid,
offline=False,
primary_ip="192.0.2.90")
@staticmethod
def _GetSingleOfflineNode(uuid):
assert uuid == "node100-uuid"
return objects.Node(name="node100.example.com",
uuid=uuid,
offline=True,
primary_ip="192.0.2.100")
def testSingleOnline(self):
self.assertEqual(rpc._NodeConfigResolver(self._GetSingleOnlineNode,
NotImplemented,
["node90-uuid"], None),
[("node90.example.com", "192.0.2.90", "node90-uuid")])
def testSingleOffline(self):
self.assertEqual(rpc._NodeConfigResolver(self._GetSingleOfflineNode,
NotImplemented,
["node100-uuid"], None),
[("node100.example.com", rpc._OFFLINE, "node100-uuid")])
def testSingleOfflineWithAcceptOffline(self):
fn = self._GetSingleOfflineNode
assert fn("node100-uuid").offline
self.assertEqual(rpc._NodeConfigResolver(fn, NotImplemented,
["node100-uuid"],
rpc_defs.ACCEPT_OFFLINE_NODE),
[("node100.example.com", "192.0.2.100", "node100-uuid")])
for i in [False, True, "", "Hello", 0, 1]:
self.assertRaises(AssertionError, rpc._NodeConfigResolver,
fn, NotImplemented, ["node100.example.com"], i)
def testUnknownSingleNode(self):
self.assertEqual(rpc._NodeConfigResolver(lambda _: None, NotImplemented,
["node110.example.com"], None),
[("node110.example.com", "node110.example.com",
"node110.example.com")])
def testMultiEmpty(self):
self.assertEqual(rpc._NodeConfigResolver(NotImplemented,
lambda: {},
[], None),
[])
def testMultiSomeOffline(self):
nodes = dict(("node%s-uuid" % i,
objects.Node(name="node%s.example.com" % i,
offline=((i % 3) == 0),
primary_ip="192.0.2.%s" % i,
uuid="node%s-uuid" % i))
for i in range(1, 255))
# Resolve no names
self.assertEqual(rpc._NodeConfigResolver(NotImplemented,
lambda: nodes,
[], None),
[])
# Offline, online and unknown hosts
self.assertEqual(rpc._NodeConfigResolver(NotImplemented,
lambda: nodes,
["node3-uuid",
"node92-uuid",
"node54-uuid",
"unknown.example.com",],
None), [
("node3.example.com", rpc._OFFLINE, "node3-uuid"),
("node92.example.com", "192.0.2.92", "node92-uuid"),
("node54.example.com", rpc._OFFLINE, "node54-uuid"),
("unknown.example.com", "unknown.example.com", "unknown.example.com"),
])
class TestCompress(unittest.TestCase):
def test(self):
for data in ["", "Hello", "Hello World!\nnew\nlines"]:
self.assertEqual(rpc._Compress(NotImplemented, data),
(constants.RPC_ENCODING_NONE, data))
for data in [512 * " ", 5242 * "Hello World!\n"]:
compressed = rpc._Compress(NotImplemented, data)
self.assertEqual(len(compressed), 2)
self.assertEqual(backend._Decompress(compressed), data)
def testDecompression(self):
self.assertRaises(AssertionError, backend._Decompress, "")
self.assertRaises(AssertionError, backend._Decompress, [""])
self.assertRaises(AssertionError, backend._Decompress,
("unknown compression", "data"))
self.assertRaises(Exception, backend._Decompress,
(constants.RPC_ENCODING_ZLIB_BASE64, "invalid zlib data"))
class TestRpcClientBase(unittest.TestCase):
def testNoHosts(self):
cdef = ("test_call", NotImplemented, None, constants.RPC_TMO_SLOW, [],
None, None, NotImplemented)
http_proc = _FakeRequestProcessor(NotImplemented)
client = rpc._RpcClientBase(rpc._StaticResolver([]), NotImplemented,
_req_process_fn=http_proc)
self.assertEqual(client._Call(cdef, [], []), {})
# Test wrong number of arguments
self.assertRaises(errors.ProgrammerError, client._Call,
cdef, [], [0, 1, 2])
def testTimeout(self):
def _CalcTimeout((arg1, arg2)):
return arg1 + arg2
def _VerifyRequest(exp_timeout, req):
self.assertEqual(req.read_timeout, exp_timeout)
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, hex(req.read_timeout)))
resolver = rpc._StaticResolver([
"192.0.2.1",
"192.0.2.2",
])
nodes = [
"node1.example.com",
"node2.example.com",
]
tests = [(100, None, 100), (30, None, 30)]
tests.extend((_CalcTimeout, i, i + 300)
for i in [0, 5, 16485, 30516])
for timeout, arg1, exp_timeout in tests:
cdef = ("test_call", NotImplemented, None, timeout, [
("arg1", None, NotImplemented),
("arg2", None, NotImplemented),
], None, None, NotImplemented)
http_proc = _FakeRequestProcessor(compat.partial(_VerifyRequest,
exp_timeout))
client = rpc._RpcClientBase(resolver, NotImplemented,
_req_process_fn=http_proc)
result = client._Call(cdef, nodes, [arg1, 300])
self.assertEqual(len(result), len(nodes))
self.assertTrue(compat.all(not res.fail_msg and
res.payload == hex(exp_timeout)
for res in result.values()))
def testArgumentEncoder(self):
(AT1, AT2) = range(1, 3)
resolver = rpc._StaticResolver([
"192.0.2.5",
"192.0.2.6",
])
nodes = [
"node5.example.com",
"node6.example.com",
]
encoders = {
AT1: lambda _, value: hex(value),
AT2: lambda _, value: hash(value),
}
cdef = ("test_call", NotImplemented, None, constants.RPC_TMO_NORMAL, [
("arg0", None, NotImplemented),
("arg1", AT1, NotImplemented),
("arg1", AT2, NotImplemented),
], None, None, NotImplemented)
def _VerifyRequest(req):
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, req.post_data))
http_proc = _FakeRequestProcessor(_VerifyRequest)
for num in [0, 3796, 9032119]:
client = rpc._RpcClientBase(resolver, encoders.get,
_req_process_fn=http_proc)
result = client._Call(cdef, nodes, ["foo", num, "Hello%s" % num])
self.assertEqual(len(result), len(nodes))
for res in result.values():
self.assertFalse(res.fail_msg)
self.assertEqual(serializer.LoadJson(res.payload),
["foo", hex(num), hash("Hello%s" % num)])
def testPostProc(self):
def _VerifyRequest(nums, req):
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, nums))
resolver = rpc._StaticResolver([
"192.0.2.90",
"192.0.2.95",
])
nodes = [
"node90.example.com",
"node95.example.com",
]
def _PostProc(res):
self.assertFalse(res.fail_msg)
res.payload = sum(res.payload)
return res
cdef = ("test_call", NotImplemented, None, constants.RPC_TMO_NORMAL, [],
None, _PostProc, NotImplemented)
# Seeded random generator
rnd = random.Random(20299)
for i in [0, 4, 74, 1391]:
nums = [rnd.randint(0, 1000) for _ in range(i)]
http_proc = _FakeRequestProcessor(compat.partial(_VerifyRequest, nums))
client = rpc._RpcClientBase(resolver, NotImplemented,
_req_process_fn=http_proc)
result = client._Call(cdef, nodes, [])
self.assertEqual(len(result), len(nodes))
for res in result.values():
self.assertFalse(res.fail_msg)
self.assertEqual(res.payload, sum(nums))
def testPreProc(self):
def _VerifyRequest(req):
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, req.post_data))
resolver = rpc._StaticResolver([
"192.0.2.30",
"192.0.2.35",
])
nodes = [
"node30.example.com",
"node35.example.com",
]
def _PreProc(node, data):
self.assertEqual(len(data), 1)
return data[0] + node
cdef = ("test_call", NotImplemented, None, constants.RPC_TMO_NORMAL, [
("arg0", None, NotImplemented),
], _PreProc, None, NotImplemented)
http_proc = _FakeRequestProcessor(_VerifyRequest)
client = rpc._RpcClientBase(resolver, NotImplemented,
_req_process_fn=http_proc)
for prefix in ["foo", "bar", "baz"]:
result = client._Call(cdef, nodes, [prefix])
self.assertEqual(len(result), len(nodes))
for (idx, (node, res)) in enumerate(result.items()):
self.assertFalse(res.fail_msg)
self.assertEqual(serializer.LoadJson(res.payload), prefix + node)
def testResolverOptions(self):
def _VerifyRequest(req):
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, req.post_data))
nodes = [
"node30.example.com",
"node35.example.com",
]
def _Resolver(expected, hosts, options):
self.assertEqual(hosts, nodes)
self.assertEqual(options, expected)
return zip(hosts, nodes, hosts)
def _DynamicResolverOptions((arg0, )):
return sum(arg0)
tests = [
(None, None, None),
(rpc_defs.ACCEPT_OFFLINE_NODE, None, rpc_defs.ACCEPT_OFFLINE_NODE),
(False, None, False),
(True, None, True),
(0, None, 0),
(_DynamicResolverOptions, [1, 2, 3], 6),
(_DynamicResolverOptions, range(4, 19), 165),
]
for (resolver_opts, arg0, expected) in tests:
cdef = ("test_call", NotImplemented, resolver_opts,
constants.RPC_TMO_NORMAL, [
("arg0", None, NotImplemented),
], None, None, NotImplemented)
http_proc = _FakeRequestProcessor(_VerifyRequest)
client = rpc._RpcClientBase(compat.partial(_Resolver, expected),
NotImplemented, _req_process_fn=http_proc)
result = client._Call(cdef, nodes, [arg0])
self.assertEqual(len(result), len(nodes))
for (idx, (node, res)) in enumerate(result.items()):
self.assertFalse(res.fail_msg)
class _FakeConfigForRpcRunner:
GetAllNodesInfo = NotImplemented
def __init__(self, cluster=NotImplemented):
self._cluster = cluster
def GetNodeInfo(self, name):
return objects.Node(name=name)
def GetMultiNodeInfo(self, names):
return [(name, self.GetNodeInfo(name)) for name in names]
def GetClusterInfo(self):
return self._cluster
def GetInstanceDiskParams(self, _):
return constants.DISK_DT_DEFAULTS
class TestRpcRunner(unittest.TestCase):
def testUploadFile(self):
data = 1779 * "Hello World\n"
tmpfile = tempfile.NamedTemporaryFile()
tmpfile.write(data)
tmpfile.flush()
st = os.stat(tmpfile.name)
nodes = [
"node1.example.com",
]
def _VerifyRequest(req):
(uldata, ) = serializer.LoadJson(req.post_data)
self.assertEqual(len(uldata), 7)
self.assertEqual(uldata[0], tmpfile.name)
self.assertEqual(list(uldata[1]), list(rpc._Compress(nodes[0], data)))
self.assertEqual(uldata[2], st.st_mode)
self.assertEqual(uldata[3], "user%s" % os.getuid())
self.assertEqual(uldata[4], "group%s" % os.getgid())
self.assertTrue(uldata[5] is not None)
self.assertEqual(uldata[6], st.st_mtime)
req.success = True
req.resp_status_code = http.HTTP_OK
req.resp_body = serializer.DumpJson((True, None))
http_proc = _FakeRequestProcessor(_VerifyRequest)
std_runner = rpc.RpcRunner(_FakeConfigForRpcRunner(), None,
_req_process_fn=http_proc,
_getents=mocks.FakeGetentResolver)
cfg_runner = rpc.ConfigRunner(None, ["192.0.2.13"],
_req_process_fn=http_proc,
_getents=mocks.FakeGetentResolver)
for runner in [std_runner, cfg_runner]:
result = runner.call_upload_file(nodes, tmpfile.name)
self.assertEqual(len(result), len(nodes))
for (idx, (node, res)) in enumerate(result.items()):
self.assertFalse(res.fail_msg)
def testEncodeInstance(self):
cluster = objects.Cluster(hvparams={
constants.HT_KVM: {
constants.HV_CDROM_IMAGE_PATH: "foo",
},
},
beparams={
constants.PP_DEFAULT: {
constants.BE_MAXMEM: 8192,
},
},
os_hvp={},
osparams={
"linux": {
"role": "unknown",
},
})
cluster.UpgradeConfig()
inst = objects.Instance(name="inst1.example.com",
hypervisor=constants.HT_KVM,
os="linux",
hvparams={
constants.HV_CDROM_IMAGE_PATH: "bar",
constants.HV_ROOT_PATH: "/tmp",
},
beparams={
constants.BE_MINMEM: 128,
constants.BE_MAXMEM: 256,
},
nics=[
objects.NIC(nicparams={
constants.NIC_MODE: "mymode",
}),
],
disk_template=constants.DT_PLAIN,
disks=[
objects.Disk(dev_type=constants.DT_PLAIN, size=4096,
logical_id=("vg", "disk6120")),
objects.Disk(dev_type=constants.DT_PLAIN, size=1024,
logical_id=("vg", "disk8508")),
])
inst.UpgradeConfig()
cfg = _FakeConfigForRpcRunner(cluster=cluster)
runner = rpc.RpcRunner(cfg, None,
_req_process_fn=NotImplemented,
_getents=mocks.FakeGetentResolver)
def _CheckBasics(result):
self.assertEqual(result["name"], "inst1.example.com")
self.assertEqual(result["os"], "linux")
self.assertEqual(result["beparams"][constants.BE_MINMEM], 128)
self.assertEqual(len(result["nics"]), 1)
self.assertEqual(result["nics"][0]["nicparams"][constants.NIC_MODE],
"mymode")
# Generic object serialization
result = runner._encoder(NotImplemented, (rpc_defs.ED_OBJECT_DICT, inst))
_CheckBasics(result)
self.assertEqual(len(result["hvparams"]), 2)
result = runner._encoder(NotImplemented,
(rpc_defs.ED_OBJECT_DICT_LIST, 5 * [inst]))
map(_CheckBasics, result)
map(lambda r: self.assertEqual(len(r["hvparams"]), 2), result)
# Just an instance
result = runner._encoder(NotImplemented, (rpc_defs.ED_INST_DICT, inst))
_CheckBasics(result)
self.assertEqual(result["beparams"][constants.BE_MAXMEM], 256)
self.assertEqual(result["hvparams"][constants.HV_CDROM_IMAGE_PATH], "bar")
self.assertEqual(result["hvparams"][constants.HV_ROOT_PATH], "/tmp")
self.assertEqual(result["osparams"], {
"role": "unknown",
})
self.assertEqual(len(result["hvparams"]),
len(constants.HVC_DEFAULTS[constants.HT_KVM]))
# Instance with OS parameters
result = runner._encoder(NotImplemented,
(rpc_defs.ED_INST_DICT_OSP_DP, (inst, {
"role": "webserver",
"other": "field",
})))
_CheckBasics(result)
self.assertEqual(result["beparams"][constants.BE_MAXMEM], 256)
self.assertEqual(result["hvparams"][constants.HV_CDROM_IMAGE_PATH], "bar")
self.assertEqual(result["hvparams"][constants.HV_ROOT_PATH], "/tmp")
self.assertEqual(result["osparams"], {
"role": "webserver",
"other": "field",
})
# Instance with hypervisor and backend parameters
result = runner._encoder(NotImplemented,
(rpc_defs.ED_INST_DICT_HVP_BEP_DP, (inst, {
constants.HT_KVM: {
constants.HV_BOOT_ORDER: "xyz",
},
}, {
constants.BE_VCPUS: 100,
constants.BE_MAXMEM: 4096,
})))
_CheckBasics(result)
self.assertEqual(result["beparams"][constants.BE_MAXMEM], 4096)
self.assertEqual(result["beparams"][constants.BE_VCPUS], 100)
self.assertEqual(result["hvparams"][constants.HT_KVM], {
constants.HV_BOOT_ORDER: "xyz",
})
self.assertEqual(result["disks"], [{
"dev_type": constants.DT_PLAIN,
"dynamic_params": {},
"size": 4096,
"logical_id": ("vg", "disk6120"),
"params": constants.DISK_DT_DEFAULTS[inst.disk_template],
}, {
"dev_type": constants.DT_PLAIN,
"dynamic_params": {},
"size": 1024,
"logical_id": ("vg", "disk8508"),
"params": constants.DISK_DT_DEFAULTS[inst.disk_template],
}])
self.assertTrue(compat.all(disk.params == {} for disk in inst.disks),
msg="Configuration objects were modified")
class TestLegacyNodeInfo(unittest.TestCase):
KEY_BOOT = "bootid"
KEY_NAME = "name"
KEY_STORAGE_FREE = "storage_free"
KEY_STORAGE_TOTAL = "storage_size"
KEY_CPU_COUNT = "cpu_count"
KEY_SPINDLES_FREE = "spindles_free"
KEY_SPINDLES_TOTAL = "spindles_total"
KEY_STORAGE_TYPE = "type" # key for storage type
VAL_BOOT = 0
VAL_VG_NAME = "xy"
VAL_VG_FREE = 11
VAL_VG_TOTAL = 12
VAL_VG_TYPE = "lvm-vg"
VAL_CPU_COUNT = 2
VAL_PV_NAME = "ab"
VAL_PV_FREE = 31
VAL_PV_TOTAL = 32
VAL_PV_TYPE = "lvm-pv"
DICT_VG = {
KEY_NAME: VAL_VG_NAME,
KEY_STORAGE_FREE: VAL_VG_FREE,
KEY_STORAGE_TOTAL: VAL_VG_TOTAL,
KEY_STORAGE_TYPE: VAL_VG_TYPE,
}
DICT_HV = {KEY_CPU_COUNT: VAL_CPU_COUNT}
DICT_SP = {
KEY_STORAGE_TYPE: VAL_PV_TYPE,
KEY_NAME: VAL_PV_NAME,
KEY_STORAGE_FREE: VAL_PV_FREE,
KEY_STORAGE_TOTAL: VAL_PV_TOTAL,
}
STD_LST = [VAL_BOOT, [DICT_VG, DICT_SP], [DICT_HV]]
STD_DICT = {
KEY_BOOT: VAL_BOOT,
KEY_NAME: VAL_VG_NAME,
KEY_STORAGE_FREE: VAL_VG_FREE,
KEY_STORAGE_TOTAL: VAL_VG_TOTAL,
KEY_SPINDLES_FREE: VAL_PV_FREE,
KEY_SPINDLES_TOTAL: VAL_PV_TOTAL,
KEY_CPU_COUNT: VAL_CPU_COUNT,
}
def testWithSpindles(self):
result = rpc.MakeLegacyNodeInfo(self.STD_LST, constants.DT_PLAIN)
self.assertEqual(result, self.STD_DICT)
def testNoSpindles(self):
my_lst = [self.VAL_BOOT, [self.DICT_VG], [self.DICT_HV]]
result = rpc.MakeLegacyNodeInfo(my_lst, constants.DT_PLAIN)
expected_dict = dict((k,v) for k, v in self.STD_DICT.iteritems())
expected_dict[self.KEY_SPINDLES_FREE] = 0
expected_dict[self.KEY_SPINDLES_TOTAL] = 0
self.assertEqual(result, expected_dict)
if __name__ == "__main__":
testutils.GanetiTestProgram()
| gpl-2.0 | -2,720,023,759,345,180,700 | 34.952183 | 80 | 0.611028 | false |
sandlbn/django-theatre | theatre_performance/views.py | 1 | 6668 | # -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from .models import Performance
from .models import PerformanceFrontPage
from .models import PerformanceGenre
from .models import PerformanceTime
from .models import PerformanceDonor
from theatre_news.models import News
from .forms import PerformanceForm
from .forms import PerformanceTimeForm
from .forms import PerformanceDonorForm
from .forms import PerformanceGenreForm
from django.views.generic import ListView
from django.views.generic import DetailView
from django.views.generic import CreateView
from django.views.generic import UpdateView
from django.views.generic import DeleteView
from django.views.generic import TemplateView
from django.utils.datetime_safe import datetime
from braces.views import StaffuserRequiredMixin
from django.core.urlresolvers import reverse_lazy
from theatre_core.utils.path import template_path
class FrontPageView(ListView):
''' Start Page with promoted Performances '''
queryset = PerformanceFrontPage.objects.filter(
)
def get_context_data(self, *args, **kwargs):
context = super(FrontPageView, self).get_context_data(**kwargs)
context["performances"] = self.queryset
context['news'] = News.objects.filter(
published=True).order_by('-id')[:3]
return context
template_name = template_path(PerformanceTime, 'frontend', 'index')
class PerformanceView(DetailView):
''' Single Performance '''
model = Performance
slug_field = 'slug'
template_name = 'performance.html'
class PerformanceTimeDetailView(DetailView):
''' Single Performance Time '''
model = PerformanceTime
context_object_name = 'performance'
template_name = template_path(PerformanceTime, 'frontend', 'detail')
class PerformanceList(TemplateView):
''' Start Page with listed Performances '''
template_name = template_path(Performance, 'frontend', 'time_calendar')
class PerformanceBackendListView(StaffuserRequiredMixin, ListView):
''' Start Page with listed Performances '''
model = Performance
template_name = template_path(Performance, 'backend', 'list')
class PerformanceCreateView(StaffuserRequiredMixin, CreateView):
model = Performance
form_class = PerformanceForm
success_url = reverse_lazy('backend-performance-list')
template_name = template_path(Performance, 'backend', 'create_form')
class PerformanceUpdateView(StaffuserRequiredMixin, UpdateView):
model = Performance
form_class = PerformanceForm
success_url = reverse_lazy('backend-performance-list')
template_name = template_path(Performance, 'backend', 'update_form')
class PerformanceDeleteView(StaffuserRequiredMixin, DeleteView):
model = Performance
success_url = reverse_lazy('backend-performance-list')
template_name = template_path(Performance, 'backend', 'confirm_delete')
class PerformanceGenreBackendListView(StaffuserRequiredMixin, ListView):
''' Start Page with listed Performances '''
model = PerformanceGenre
template_name = template_path(PerformanceGenre, 'backend', 'list')
class PerformanceGenreCreateView(StaffuserRequiredMixin, CreateView):
model = PerformanceGenre
form_class = PerformanceGenreForm
success_url = reverse_lazy('backend-performance-genre-list')
template_name = template_path(PerformanceGenre, 'backend', 'create_form')
class PerformanceGenreUpdateView(StaffuserRequiredMixin, UpdateView):
model = PerformanceGenre
form_class = PerformanceGenreForm
success_url = reverse_lazy('backend-performance-genre-list')
template_name = template_path(PerformanceGenre, 'backend', 'update_form')
class PerformanceGenreDeleteView(StaffuserRequiredMixin, DeleteView):
model = PerformanceGenre
success_url = reverse_lazy('backend-performance-genre-list')
template_name = template_path(PerformanceGenre, 'backend',
'confirm_delete')
class PerformanceTimeBackendListView(StaffuserRequiredMixin, ListView):
''' Start Page with listed Performances '''
model = PerformanceTime
template_name = template_path(PerformanceTime, 'backend', 'list')
class PerformanceTimeCreateView(StaffuserRequiredMixin, CreateView):
model = PerformanceTime
form_class = PerformanceTimeForm
success_url = reverse_lazy('backend-performance-time-list')
template_name = template_path(PerformanceTime, 'backend', 'create_form')
class PerformanceTimeUpdateView(StaffuserRequiredMixin, UpdateView):
model = PerformanceTime
form_class = PerformanceTimeForm
success_url = reverse_lazy('backend-performance-time-list')
template_name = template_path(PerformanceTime, 'backend', 'update_form')
class PerformanceTimeDeleteView(StaffuserRequiredMixin, DeleteView):
model = PerformanceTime
success_url = reverse_lazy('backend-performance-time-list')
template_name = template_path(PerformanceTime, 'backend', 'confirm_delete')
class PerformanceDonorBackendListView(StaffuserRequiredMixin, ListView):
''' Start Page with listed Performance Donor '''
template_name = template_path(PerformanceDonor, 'backend', 'list')
def get_queryset(self):
performance_pk = self.kwargs['performance_pk']
return PerformanceDonor.objects.filter(performance=performance_pk)
def get_context_data(self, **kwargs):
context = super(PerformanceDonorBackendListView,
self).get_context_data(**kwargs)
context['performance_pk'] = self.kwargs['performance_pk']
return context
class PerformanceDonorCreateView(StaffuserRequiredMixin, CreateView):
model = PerformanceDonor
form_class = PerformanceDonorForm
success_url = reverse_lazy('backend-performance-donor-list')
template_name = template_path(PerformanceDonor, 'backend', 'create_form')
def get_initial(self, **kwargs):
initial = super(PerformanceDonorCreateView, self).get_initial(**kwargs)
performance_pk = self.kwargs['performance_pk']
initial['performance'] = performance_pk
return initial
class PerformanceDonorUpdateView(StaffuserRequiredMixin, UpdateView):
model = PerformanceDonor
form_class = PerformanceDonorForm
success_url = reverse_lazy('backend-performance-donor-list')
template_name = template_path(PerformanceDonor, 'backend', 'update_form')
class PerformanceDonorDeleteView(StaffuserRequiredMixin, DeleteView):
model = PerformanceTime
success_url = reverse_lazy('backend-performance-donor-list')
template_name = template_path(PerformanceDonor, 'backend',
'confirm_delete')
| lgpl-3.0 | -5,993,623,482,546,329,000 | 32.847716 | 79 | 0.741602 | false |
mugurrus/ally-py-common | hr-user/hr/user/impl/user.py | 1 | 7083 | '''
Created on Mar 6, 2012
@package: hr user
@copyright: 2011 Sourcefabric o.p.s.
@license http://www.gnu.org/licenses/gpl-3.0.txt
@author: Mihai Balaceanu
Implementation for user services.
'''
from functools import reduce
import hashlib
from ally.api.criteria import AsLike, AsBoolean
from ally.api.validate import validate
from ally.container import wire
from ally.container.ioc import injected
from ally.container.support import setup
from ally.internationalization import _
from sql_alchemy.impl.entity import EntityServiceAlchemy
from sql_alchemy.support.util_service import insertModel
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql.expression import or_
from ally.api.error_p1 import ConflictError, InvalidError
from hr.user.api.user import IUserService, QUser, User, Password, Avatar
from hr.user.meta.user import UserMapped
from ally.api.model import Content
from ally.cdm.spec import ICDM, PathNotFound
from urllib.request import urlopen
# --------------------------------------------------------------------
@injected
@validate(UserMapped)
@setup(IUserService, name='userService')
class UserServiceAlchemy(EntityServiceAlchemy, IUserService):
'''
Implementation for @see: IUserService
'''
cdmAvatar = ICDM; wire.entity('cdmAvatar')
# the content delivery manager where to publish avatar
avatar_url = 'http://www.gravatar.com/avatar/%(hash_email)s?s=%(size)s'; wire.config('avatar_url', doc='''
The url from where the avatar is loaded.''')
default_avatar_size = 200; wire.config('default_avatar_size', doc='''
Default user avatar image size.''')
allNames = {UserMapped.UserName, UserMapped.FullName, UserMapped.EMail, UserMapped.PhoneNumber}
def __init__(self):
'''
Construct the service
'''
assert isinstance(self.default_avatar_size, int), 'Invalid default user avatar image size %s' % self.default_avatar_size
assert isinstance(self.allNames, set), 'Invalid all name %s' % self.allNames
assert isinstance(self.cdmAvatar, ICDM), 'Invalid CDM %s' % self.cdmAvatar
EntityServiceAlchemy.__init__(self, UserMapped, QUser, all=self.queryAll, inactive=self.queryInactive)
def getById(self, identifier, scheme='http'):
user = super().getById(identifier)
assert isinstance(user, UserMapped)
if user.avatarPath:
try:
self.cdmAvatar.getMetadata(user.avatarPath)
user.Avatar = self.cdmAvatar.getURI(user.avatarPath, scheme)
except PathNotFound:
user.Avatar = user.avatarPath
elif user.EMail:
user.Avatar = self.avatar_url % {'hash_email': hashlib.md5(user.EMail.lower().encode()).hexdigest(),
'size': self.default_avatar_size}
return user
def getAll(self, q=None, **options):
'''
@see: IUserService.getAll
'''
if q is None: q = QUser(inactive=False)
elif QUser.inactive not in q: q.inactive = False
# Making sure that the default query is for active.
return super().getAll(q, **options)
def update(self, user):
'''
@see: IUserService.update
'''
assert isinstance(user, User), 'Invalid user %s' % user
if user.UserName is not None: user.UserName = user.UserName.lower()
self.checkUser(user, user.Id)
return super().update(user)
def insert(self, user):
'''
@see: IUserService.insert
'''
assert isinstance(user, User), 'Invalid user %s' % user
user.UserName = user.UserName.lower()
self.checkUser(user)
userDb = insertModel(UserMapped, user, password=user.Password)
assert isinstance(userDb, UserMapped), 'Invalid user %s' % userDb
return userDb.Id
def changePassword(self, id, password):
'''
@see: IUserService.changePassword
'''
assert isinstance(password, Password), 'Invalid password change %s' % password
try:
sql = self.session().query(UserMapped)
userDb = sql.filter(UserMapped.Id == id).filter(UserMapped.password == password.OldPassword).one()
except NoResultFound: raise InvalidError(_('Invalid old password'), Password.OldPassword)
assert isinstance(userDb, UserMapped), 'Invalid user %s' % userDb
userDb.password = password.NewPassword
def setAvatar(self, id, avatar, scheme='http', content=None):
'''
@see: IUserService.setAvatar
'''
assert isinstance(avatar, Avatar), 'Invalid avatar %s' % avatar
assert content is None or isinstance(content, Content), 'Invalid content %s' % content
user = super().getById(id)
assert isinstance(user, UserMapped), 'Invalid user identifer %s' % id
if avatar.URL:
try:
urlopen(avatar.URL)
user.avatarPath = avatar.URL
except ValueError:
raise InvalidError(_('Invalid avatar URL'))
elif content is not None:
user.avatarPath = '%s/%s' % (id, content.name)
self.cdmAvatar.publishContent(user.avatarPath, content, {})
avatar.URL = self.cdmAvatar.getURI(user.avatarPath, scheme)
else:
raise InvalidError(_('Avatar must be supplied'))
return avatar.URL
# ----------------------------------------------------------------
def checkUser(self, user, userId=None):
''' Checks if the user name is not conflicting with other users names.'''
if User.Active not in user or user.Active:
if user.UserName is None:
assert userId is not None, 'Invalid user id %s' % userId
userName = self.session().query(UserMapped.UserName).filter(UserMapped.Id == userId)
else: userName = user.UserName
sql = self.session().query(UserMapped.Id).filter(UserMapped.UserName == userName)
sql = sql.filter(UserMapped.Active == True)
if userId is not None: sql = sql.filter(UserMapped.Id != userId)
if sql.count() > 0: raise ConflictError(_('There is already an active user with this name'), User.UserName)
def queryAll(self, sql, crit):
'''
Processes the all query.
'''
assert isinstance(crit, AsLike), 'Invalid criteria %s' % crit
filters = []
if AsLike.like in crit:
for col in self.allNames: filters.append(col.like(crit.like))
elif AsLike.ilike in crit:
for col in self.allNames: filters.append(col.ilike(crit.ilike))
sql = sql.filter(reduce(or_, filters))
return sql
def queryInactive(self, sql, crit):
'''
Processes the inactive query.
'''
assert isinstance(crit, AsBoolean), 'Invalid criteria %s' % crit
return sql.filter(UserMapped.Active == (crit.value is False))
| gpl-3.0 | -8,506,056,176,071,447,000 | 38.792135 | 128 | 0.620076 | false |
shobhitmishra/CodingProblems | LeetCode/Session3/SencondMinInABTree.py | 1 | 1590 | import sys
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def findSecondMinimumValue(self, root: TreeNode) -> int:
if not root or (not root.left and not root.right):
return -1
if root.left.val < root.right.val:
secondMinInLeft = self.findSecondMinimumValue(root.left)
return root.right.val if secondMinInLeft == -1 else min(root.right.val, secondMinInLeft)
elif root.left.val > root.right.val:
secondMinInRight = self.findSecondMinimumValue(root.right)
return root.left.val if secondMinInRight == -1 else min(root.left.val, secondMinInRight)
else:
secondMinInLeft = self.findSecondMinimumValue(root.left)
secondMinInRight = self.findSecondMinimumValue(root.right)
if secondMinInLeft == -1 and secondMinInRight == -1:
return -1
elif secondMinInLeft == -1:
return secondMinInRight
elif secondMinInRight == -1:
return secondMinInLeft
return min(secondMinInLeft, secondMinInRight)
root = TreeNode(10)
root.left = TreeNode(3)
root.left.left = TreeNode(2)
root.left.right = TreeNode(8)
root.left.right.left = TreeNode(7)
root.left.right.right = TreeNode(9)
root.right = TreeNode(15)
root.right.left = TreeNode(13)
root.right.right = TreeNode(17)
root.right.right.right = TreeNode(19)
ob = Solution()
print(ob.findSecondMinimumValue(root)) | mit | -278,850,236,724,756,200 | 35.159091 | 100 | 0.627673 | false |
qedsoftware/commcare-hq | corehq/apps/couch_sql_migration/tests/test_migration.py | 1 | 22231 | import os
import uuid
from datetime import datetime
from couchdbkit.exceptions import ResourceNotFound
from django.conf import settings
from django.core.files.uploadedfile import UploadedFile
from django.core.management import call_command
from django.test import TestCase
from django.test import override_settings
from casexml.apps.case.mock import CaseBlock
from corehq.apps.commtrack.helpers import make_product
from corehq.apps.couch_sql_migration.couchsqlmigration import get_diff_db
from corehq.apps.domain.dbaccessors import get_doc_ids_in_domain_by_type
from corehq.apps.domain.models import Domain
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.hqcase.utils import submit_case_blocks
from corehq.apps.receiverwrapper.util import submit_form_locally
from corehq.apps.receiverwrapper.exceptions import LocalSubmissionError
from corehq.apps.tzmigration.models import TimezoneMigrationProgress
from corehq.apps.tzmigration.timezonemigration import FormJsonDiff
from corehq.blobs import get_blob_db
from corehq.blobs.tests.util import TemporaryS3BlobDB
from corehq.form_processor.backends.sql.dbaccessors import FormAccessorSQL, CaseAccessorSQL, LedgerAccessorSQL
from corehq.form_processor.interfaces.dbaccessors import FormAccessors, CaseAccessors, LedgerAccessors
from corehq.form_processor.tests.utils import FormProcessorTestUtils
from corehq.form_processor.utils import should_use_sql_backend
from corehq.form_processor.utils.general import clear_local_domain_sql_backend_override
from corehq.util.test_utils import (
create_and_save_a_form, create_and_save_a_case, set_parent_case,
trap_extra_setup, TestFileMixin
)
from couchforms.models import XFormInstance
class BaseMigrationTestCase(TestCase, TestFileMixin):
file_path = 'data',
root = os.path.dirname(__file__)
def setUp(self):
super(BaseMigrationTestCase, self).setUp()
with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"):
config = settings.S3_BLOB_DB_SETTINGS
self.s3db = TemporaryS3BlobDB(config)
assert get_blob_db() is self.s3db, (get_blob_db(), self.s3db)
FormProcessorTestUtils.delete_all_cases_forms_ledgers()
self.domain_name = uuid.uuid4().hex
self.domain = create_domain(self.domain_name)
# all new domains are set complete when they are created
TimezoneMigrationProgress.objects.filter(domain=self.domain_name).delete()
self.assertFalse(should_use_sql_backend(self.domain_name))
def tearDown(self):
FormProcessorTestUtils.delete_all_cases_forms_ledgers()
self.domain.delete()
def _do_migration_and_assert_flags(self, domain):
self.assertFalse(should_use_sql_backend(domain))
call_command('migrate_domain_from_couch_to_sql', domain, MIGRATE=True, no_input=True)
self.assertTrue(should_use_sql_backend(domain))
def _compare_diffs(self, expected):
diffs = get_diff_db(self.domain_name).get_diffs()
json_diffs = [(diff.kind, diff.json_diff) for diff in diffs]
self.assertEqual(expected, json_diffs)
def _get_form_ids(self, doc_type='XFormInstance'):
return FormAccessors(domain=self.domain_name).get_all_form_ids_in_domain(doc_type=doc_type)
def _get_case_ids(self):
return CaseAccessors(domain=self.domain_name).get_case_ids_in_domain()
class MigrationTestCase(BaseMigrationTestCase):
def test_basic_form_migration(self):
create_and_save_a_form(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self._compare_diffs([])
def test_basic_form_migration_with_timezones(self):
form_xml = self.get_xml('tz_form')
with override_settings(PHONE_TIMEZONES_HAVE_BEEN_PROCESSED=False,
PHONE_TIMEZONES_SHOULD_BE_PROCESSED=False):
submit_form_locally(form_xml, self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_case_ids()))
self.assertEqual(1, len(self._get_form_ids()))
self._compare_diffs([])
def test_form_with_not_meta_migration(self):
xml = """<?xml version="1.0" ?>
<n0:registration xmlns:n0="http://openrosa.org/user/registration">
<username>W4</username>
<password>2</password>
<uuid>P8DU7OLHVLZXU21JR10H3W8J2</uuid>
<date>2013-11-19</date>
<registering_phone_id>8H1N48EFPF6PA4UOO8YGZ2KFZ</registering_phone_id>
<user_data>
<data key="user_type">standard</data>
</user_data>
</n0:registration>
"""
submit_form_locally(xml, self.domain_name)
couch_form_ids = self._get_form_ids()
self.assertEqual(1, len(couch_form_ids))
self._do_migration_and_assert_flags(self.domain_name)
sql_form_ids = self._get_form_ids()
self.assertEqual(couch_form_ids, sql_form_ids)
self._compare_diffs([])
def test_form_with_missing_xmlns(self):
form_id = uuid.uuid4().hex
form_template = """<?xml version='1.0' ?>
<data uiVersion="1" version="1" name=""{xmlns}>
<name>fgg</name>
<n1:meta xmlns:n1="http://openrosa.org/jr/xforms">
<n1:deviceID>354957031935664</n1:deviceID>
<n1:timeStart>2016-03-01T12:04:16Z</n1:timeStart>
<n1:timeEnd>2016-03-01T12:04:16Z</n1:timeEnd>
<n1:username>bcdemo</n1:username>
<n1:userID>user-abc</n1:userID>
<n1:instanceID>{form_id}</n1:instanceID>
</n1:meta>
</data>"""
xml = form_template.format(
form_id=form_id,
xmlns=' xmlns="http://openrosa.org/formdesigner/456"'
)
submit_form_locally(xml, self.domain_name)
# hack the form to remove XMLNS since it's now validated during form submission
form = FormAccessors(self.domain_name).get_form(form_id)
form.xmlns = None
del form.form_data['@xmlns']
xml_no_xmlns = form_template.format(form_id=form_id, xmlns="")
form.delete_attachment('form.xml')
form.put_attachment(xml_no_xmlns, 'form.xml')
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self._compare_diffs([])
def test_archived_form_migration(self):
form = create_and_save_a_form(self.domain_name)
form.archive('user1')
self.assertEqual(1, len(self._get_form_ids('XFormArchived')))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids('XFormArchived')))
self._compare_diffs([])
def test_error_form_migration(self):
submit_form_locally(
"""<data xmlns="example.com/foo">
<meta>
<instanceID>abc-easy-as-123</instanceID>
</meta>
<case case_id="" xmlns="http://commcarehq.org/case/transaction/v2">
<update><foo>bar</foo></update>
</case>
</data>""",
self.domain_name,
)
self.assertEqual(1, len(self._get_form_ids('XFormError')))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids('XFormError')))
self._compare_diffs([])
def test_error_with_normal_doc_type_migration(self):
submit_form_locally(
"""<data xmlns="example.com/foo">
<meta>
<instanceID>im-a-bad-form</instanceID>
</meta>
<case case_id="" xmlns="http://commcarehq.org/case/transaction/v2">
<update><foo>bar</foo></update>
</case>
</data>""",
self.domain_name,
)
form = FormAccessors(self.domain_name).get_form('im-a-bad-form')
form_json = form.to_json()
form_json['doc_type'] = 'XFormInstance'
XFormInstance.wrap(form_json).save()
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids('XFormError')))
self._compare_diffs([])
def test_duplicate_form_migration(self):
with open('corehq/ex-submodules/couchforms/tests/data/posts/duplicate.xml') as f:
duplicate_form_xml = f.read()
submit_form_locally(duplicate_form_xml, self.domain_name)
submit_form_locally(duplicate_form_xml, self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_form_ids('XFormDuplicate')))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_form_ids('XFormDuplicate')))
self._compare_diffs([])
def test_deprecated_form_migration(self):
form_id = uuid.uuid4().hex
case_id = uuid.uuid4().hex
owner_id = uuid.uuid4().hex
case_block = CaseBlock(
create=True,
case_id=case_id,
case_type='person',
owner_id=owner_id,
update={
'property': 'original value'
}
).as_string()
submit_case_blocks(case_block, domain=self.domain_name, form_id=form_id)
# submit a new form with a different case update
case_block = CaseBlock(
create=True,
case_id=case_id,
case_type='newtype',
owner_id=owner_id,
update={
'property': 'edited value'
}
).as_string()
submit_case_blocks(case_block, domain=self.domain_name, form_id=form_id)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_form_ids('XFormDeprecated')))
self.assertEqual(1, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_form_ids('XFormDeprecated')))
self.assertEqual(1, len(self._get_case_ids()))
self._compare_diffs([])
def test_old_form_metadata_migration(self):
form_with_old_meta = """<?xml version="1.0" ?>
<system uiVersion="1" version="1" xmlns="http://commcarehq.org/case">
<meta xmlns="http://openrosa.org/jr/xforms">
<deviceID/>
<timeStart>2013-09-18T11:41:17Z</timeStart>
<timeEnd>2013-09-18T11:41:17Z</timeEnd>
<username>[email protected]</username>
<userID>06d75f978d3370f5b277b2685626b653</userID>
<uid>efe8d4306a7b426681daf33df41da46c</uid>
</meta>
<data>
<p1>123</p1>
</data>
</system>
"""
submit_form_locally(form_with_old_meta, self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self._compare_diffs([])
def test_deleted_form_migration(self):
form = create_and_save_a_form(self.domain_name)
FormAccessors(self.domain.name).soft_delete_forms(
[form.form_id], datetime.utcnow(), 'test-deletion'
)
self.assertEqual(1, len(get_doc_ids_in_domain_by_type(
self.domain_name, "XFormInstance-Deleted", XFormInstance.get_db())
))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(FormAccessorSQL.get_deleted_form_ids_in_domain(self.domain_name)))
self._compare_diffs([])
def test_submission_error_log_migration(self):
try:
submit_form_locally("To be an XForm or NOT to be an xform/>", self.domain_name)
except LocalSubmissionError:
pass
self.assertEqual(1, len(self._get_form_ids(doc_type='SubmissionErrorLog')))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids(doc_type='SubmissionErrorLog')))
self._compare_diffs([])
def test_hqsubmission_migration(self):
form = create_and_save_a_form(self.domain_name)
form.doc_type = 'HQSubmission'
form.save()
self.assertEqual(1, len(get_doc_ids_in_domain_by_type(
self.domain_name, "HQSubmission", XFormInstance.get_db())
))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self._compare_diffs([])
def test_migrate_attachments(self):
attachment_source = './corehq/ex-submodules/casexml/apps/case/tests/data/attachments/fruity.jpg'
attachment_file = open(attachment_source, 'rb')
attachments = {
'fruity_file': UploadedFile(attachment_file, 'fruity_file', content_type='image/jpeg')
}
xml = """<?xml version='1.0' ?>
<data uiVersion="1" version="1" name="" xmlns="http://openrosa.org/formdesigner/123">
<name>fgg</name>
<date>2011-06-07</date>
<n0:case case_id="case-123" user_id="user-abc" date_modified="{date}" xmlns:n0="http://commcarehq.org/case/transaction/v2">
<n0:create>
<n0:case_type_id>cc_bc_demo</n0:case_type_id>
<n0:case_name>fgg</n0:case_name>
</n0:create>
<n0:attachment>
<n0:fruity_file src="fruity_file" from="local"/>
</n0:attachment>
</n0:case>
<n1:meta xmlns:n1="http://openrosa.org/jr/xforms">
<n1:deviceID>354957031935664</n1:deviceID>
<n1:timeStart>{date}</n1:timeStart>
<n1:timeEnd>{date}</n1:timeEnd>
<n1:username>bcdemo</n1:username>
<n1:userID>user-abc</n1:userID>
<n1:instanceID>{form_id}</n1:instanceID>
</n1:meta>
</data>""".format(
date='2016-03-01T12:04:16Z',
attachment_source=attachment_source,
form_id=uuid.uuid4().hex
)
submit_form_locally(
xml,
self.domain_name,
attachments=attachments,
)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_form_ids()))
self.assertEqual(1, len(self._get_case_ids()))
self._compare_diffs([])
def test_basic_case_migration(self):
create_and_save_a_case(self.domain_name, case_id=uuid.uuid4().hex, case_name='test case')
self.assertEqual(1, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_case_ids()))
self._compare_diffs([])
def test_basic_case_migration_case_name(self):
case_id = uuid.uuid4().hex
submit_case_blocks(
CaseBlock(
case_id,
case_type='migrate',
create=True,
update={'p1': 1},
).as_string(),
self.domain_name
)
submit_case_blocks(
CaseBlock(
case_id,
update={'name': 'test21'},
).as_string(),
self.domain_name
)
self.assertEqual(1, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_case_ids()))
self._compare_diffs([])
def test_case_with_indices_migration(self):
parent_case_id = uuid.uuid4().hex
child_case_id = uuid.uuid4().hex
parent_case = create_and_save_a_case(self.domain_name, case_id=parent_case_id, case_name='test parent')
child_case = create_and_save_a_case(self.domain_name, case_id=child_case_id, case_name='test child')
set_parent_case(self.domain_name, child_case, parent_case)
self.assertEqual(2, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(2, len(self._get_case_ids()))
self._compare_diffs([])
indices = CaseAccessorSQL.get_indices(self.domain_name, child_case_id)
self.assertEqual(1, len(indices))
self.assertEqual(parent_case_id, indices[0].referenced_id)
def test_deleted_case_migration(self):
parent_case_id = uuid.uuid4().hex
child_case_id = uuid.uuid4().hex
parent_case = create_and_save_a_case(self.domain_name, case_id=parent_case_id, case_name='test parent')
child_case = create_and_save_a_case(self.domain_name, case_id=child_case_id, case_name='test child')
set_parent_case(self.domain_name, child_case, parent_case)
form_ids = self._get_form_ids()
self.assertEqual(3, len(form_ids))
FormAccessors(self.domain.name).soft_delete_forms(
form_ids, datetime.utcnow(), 'test-deletion-with-cases'
)
CaseAccessors(self.domain.name).soft_delete_cases(
[parent_case_id, child_case_id], datetime.utcnow(), 'test-deletion-with-cases'
)
self.assertEqual(2, len(get_doc_ids_in_domain_by_type(
self.domain_name, "CommCareCase-Deleted", XFormInstance.get_db())
))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(2, len(CaseAccessorSQL.get_deleted_case_ids_in_domain(self.domain_name)))
self._compare_diffs([])
parent_transactions = CaseAccessorSQL.get_transactions(parent_case_id)
self.assertEqual(2, len(parent_transactions))
self.assertTrue(parent_transactions[0].is_case_create)
self.assertTrue(parent_transactions[1].is_form_transaction)
child_transactions = CaseAccessorSQL.get_transactions(child_case_id)
self.assertEqual(2, len(child_transactions))
self.assertTrue(child_transactions[0].is_case_create)
self.assertTrue(child_transactions[1].is_case_index)
def test_xform_ids_diff(self):
case_id = uuid.uuid4().hex
submit_case_blocks(
CaseBlock(
case_id,
case_type='migrate',
create=True,
update={'p1': 1},
).as_string(),
self.domain_name
)
submit_case_blocks(
CaseBlock(
case_id,
update={'p2': 2},
).as_string(),
self.domain_name
)
case = CaseAccessors(self.domain_name).get_case(case_id)
removed_form_id = case.xform_ids.pop(1)
case.save()
self.assertEqual(1, len(self._get_case_ids()))
self._do_migration_and_assert_flags(self.domain_name)
self.assertEqual(1, len(self._get_case_ids()))
self._compare_diffs([
(u'CommCareCase', FormJsonDiff(
diff_type=u'set_mismatch', path=[u'xform_ids', u'[*]'],
old_value=u'', new_value=removed_form_id
))
])
def test_commit(self):
self._do_migration_and_assert_flags(self.domain_name)
clear_local_domain_sql_backend_override(self.domain_name)
call_command('migrate_domain_from_couch_to_sql', self.domain_name, COMMIT=True, no_input=True)
self.assertTrue(Domain.get_by_name(self.domain_name).use_sql_backend)
class LedgerMigrationTests(BaseMigrationTestCase):
def setUp(self):
super(LedgerMigrationTests, self).setUp()
self.liquorice = make_product(self.domain_name, 'liquorice', 'liquorice')
self.sherbert = make_product(self.domain_name, 'sherbert', 'sherbert')
self.jelly_babies = make_product(self.domain_name, 'jelly babies', 'jbs')
def tearDown(self):
try:
self.liquorice.delete()
self.sherbert.delete()
self.jelly_babies.delete()
except ResourceNotFound:
pass # domain.delete() in parent class got there first
super(LedgerMigrationTests, self).tearDown()
def _submit_ledgers(self, ledger_blocks):
return submit_case_blocks(ledger_blocks, self.domain_name)[0].form_id
def _set_balance(self, balance, case_id, product_id):
from corehq.apps.commtrack.tests.util import get_single_balance_block
return self._submit_ledgers([
get_single_balance_block(case_id, product_id, balance)
])
def test_migrate_ledgers(self):
case_id = uuid.uuid4().hex
create_and_save_a_case(self.domain_name, case_id=case_id, case_name="Simon's sweet shop")
self._set_balance(100, case_id, self.liquorice._id)
self._set_balance(50, case_id, self.sherbert._id)
self._set_balance(175, case_id, self.jelly_babies._id)
expected_stock_state = {'stock': {
self.liquorice._id: 100,
self.sherbert._id: 50,
self.jelly_babies._id: 175
}}
self._validate_ledger_data(self._get_ledger_state(case_id), expected_stock_state)
self._do_migration_and_assert_flags(self.domain_name)
self._validate_ledger_data(self._get_ledger_state(case_id), expected_stock_state)
transactions = LedgerAccessorSQL.get_ledger_transactions_for_case(case_id)
self.assertEqual(3, len(transactions))
self._compare_diffs([])
def _validate_ledger_data(self, state_dict, expected):
for section, products in state_dict.items():
for product, state in products.items():
self.assertEqual(state.stock_on_hand, expected[section][product])
def _get_ledger_state(self, case_id):
return LedgerAccessors(self.domain_name).get_case_ledger_state(case_id)
| bsd-3-clause | 3,296,710,757,705,920,500 | 41.834297 | 135 | 0.615807 | false |
junhaodong/misc | web_crawler/find_email_addresses.py | 1 | 3846 | import dryscrape, re, sys
from bs4 import BeautifulSoup
class EmailCrawler:
"""
Takes a domain name and prints out a list of email addresses found on that web page
or a lower level web page with the same given domain name.
Stores emails, paths, and visited_paths as sets to avoid duplicates.
Uses Dryscrape to dynamically scrape text of JavaScript generated and static websites.
Uses BeautifulSoup to search for valid href's to continue crawling on.
"""
emailRE = re.compile("[\w.+-]+@(?!\dx)[\w-]+\.[\w.-]+[\w-]+")
def __init__(self, domain):
if 'http' not in domain:
domain = 'http://' + domain
self.url = domain.lower()
self.session = dryscrape.Session(base_url=self.url)
self.emails = set()
self.paths = set()
self.visited_paths = set()
self.num_pages_limit = 50
self.session.set_attribute('auto_load_images', False)
def is_valid_tag(self, tag):
"""Checks if a tag contains a valid href that hasn't been visited yet."""
if tag.has_attr('href') and len(tag['href']) > 0:
href = tag['href']
complete_href = self.session.complete_url(href)
is_relative = self.url in complete_href
is_visited = complete_href in self.visited_paths
is_style_sheet = tag.name == "link"
is_jumpTo = "#" in href
is_mailTo = "mailto" in href
is_js = "javascript:" in href
return is_relative and \
not (is_visited or is_style_sheet or is_jumpTo or is_mailTo or is_js)
else:
return False
def find_emails_and_paths(self, path=None):
# Load the DOM
try:
self.session.visit(path)
except:
print("Error accessing the given URL")
return
# Pass the DOM as HTML into the lxml parser
print("Crawling on:\t" + path)
response = self.session.body()
soup = BeautifulSoup(response, "lxml")
# Add new emails to `self.emails`
for email in re.findall(self.emailRE, response):
self.emails.add(email)
# Mark the current path as visited
self.visited_paths.add(path)
# Add new paths to `self.paths`
for tag in soup.find_all(self.is_valid_tag):
href = self.session.complete_url(tag['href']).lower()
self.paths.add(href)
def find(self):
"""
Crawls through new paths until the page limit has been reached or
there are no more discoverable paths.
"""
self.paths.add(self.url)
while len(self.visited_paths) < self.num_pages_limit and \
len(self.paths) > 0:
self.find_emails_and_paths(path=self.paths.pop())
def print_emails(self):
# Print the emails found (if any)
if len(self.emails) > 0:
print("\nFound these email addresses:")
for email in self.emails:
print("\t" + email)
else:
print("\nNo email addresses found.")
def main():
"""
Initializes the crawler with the given domain name
and optional maximum number of pages to search.
Finds and prints any emails found.
"""
if len(sys.argv) >= 2:
crawler = EmailCrawler(sys.argv[1])
if len(sys.argv) >= 3 and sys.argv[2].isdigit():
crawler.num_pages_limit = int(sys.argv[2])
print("Beginning crawl with a limit of " + str(crawler.num_pages_limit) + " pages...\n")
crawler.find()
crawler.print_emails()
else:
print("Error: Please enter a domain to search on and an optional page limit (default=50).")
print("Example: `python find_email_addresses.py jana.com 30`")
sys.exit(1)
if __name__ == "__main__":
main()
| mit | -1,839,977,054,518,523,100 | 34.611111 | 99 | 0.583983 | false |
akhilpm/Masters-Project | 2DPCA/2DPCA.py | 1 | 2758 | '''
2DPCA for feature extraction of MNIST digits dataset
Author : Akhil P M
'''
from settings import *
from sklearn.ensemble import RandomForestClassifier
import utils
def compute_covariance_matrix(A):
""" compute the 2D covariance matrix in image space"""
no_of_images = len(A)
cov = np.zeros((A.shape[2], A.shape[2]))
for i in xrange(no_of_images):
cov = cov + np.dot(np.transpose(A[i]), A[i])
cov = cov / no_of_images
return cov
def extract_feature(A, x):
""" compute y[i] = A[i]*x for all images """
no_of_images = len(A)
features = np.zeros((no_of_images, A.shape[1]))
for i in xrange(no_of_images):
features[i] = np.ravel(np.dot(A[i], x))
return features
def main():
""" the main function"""
#set the timer
start = time.time()
#load the data
trainX = np.load('trainX.npy')
testX = np.load('testX.npy')
trainY = np.load('trainY.npy')
testY = np.load('testY.npy')
print('\n!!! Data Loading Completed !!!\n')
#generate 2D data
data_train = utils.generate_2D(trainX)
data_test = utils.generate_2D(testX)
ncol = data_train.shape[2]
features_train = np.zeros((len(data_train), data_train.shape[1]))
features_test = np.zeros((len(data_test), data_test.shape[1]))
#get the mean image
mean_image = utils.get_mean_image(data_train)
#substract the mean image from all images & center them
normalized_data = utils.substract_mean(data_train, mean_image)
data_train = utils.substract_mean(data_train, mean_image)
data_test = utils.substract_mean(data_test, mean_image)
#compute the covariance matrix in 2D space
SA = compute_covariance_matrix(normalized_data)
#find eigen values & eigen vectors of covariance matrix
U, s, _ = np.linalg.svd(SA)
#extract features using 2DPCA
selected = []
clf = RandomForestClassifier(n_estimators=300, n_jobs=-1)
max_acc = 0.0
for i in xrange(ncol):
proj_dir = U[:, i].reshape(ncol, 1)
tempTrainX = extract_feature(data_train, proj_dir)
tempTestX = extract_feature(data_test, proj_dir)
clf.fit(tempTrainX, trainY)
pred = clf.predict(tempTestX)
acc = accuracy_score(testY, pred)
print('PC vector %d gives accuracy : %f\n' %(i+1, acc))
#if acc >=0.1:
# selected.append(i)
# features_train = features_train + s[i] * tempTrainX
# features_test = features_test + s[i] * tempTestX
if acc > max_acc:
max_acc = acc
features_train = np.copy(tempTrainX)
features_test = np.copy(tempTestX)
print features_train.shape
np.save('trainX_feat', features_train)
np.save('testX_feat', features_test)
clf.fit(features_train, trainY)
pred = clf.predict(features_test)
print('accuracy : %f\n' %accuracy_score(testY, pred))
#print selected
print('Test Time : %f Minutes\n' %((time.time()-start)/60))
if __name__ == '__main__':
main()
| mit | -3,920,353,254,661,672,000 | 24.302752 | 66 | 0.681653 | false |
pypa/setuptools_scm | testing/test_git.py | 1 | 11485 | import sys
import os
from os.path import join as opj
import pytest
from datetime import datetime, date
from unittest.mock import patch, Mock
from setuptools_scm import integration, git, NonNormalizedVersion
from setuptools_scm.utils import do, has_command
from setuptools_scm.file_finder_git import git_find_files
pytestmark = pytest.mark.skipif(
not has_command("git", warn=False), reason="git executable not found"
)
@pytest.fixture
def wd(wd, monkeypatch):
monkeypatch.delenv("HOME", raising=False)
wd("git init")
wd("git config user.email [email protected]")
wd('git config user.name "a test"')
wd.add_command = "git add ."
wd.commit_command = "git commit -m test-{reason}"
return wd
@pytest.mark.parametrize(
"given, tag, number, node, dirty",
[
("3.3.1-rc26-0-g9df187b", "3.3.1-rc26", 0, "g9df187b", False),
("17.33.0-rc-17-g38c3047c0", "17.33.0-rc", 17, "g38c3047c0", False),
],
)
def test_parse_describe_output(given, tag, number, node, dirty):
parsed = git._git_parse_describe(given)
assert parsed == (tag, number, node, dirty)
def test_root_relative_to(tmpdir, wd, monkeypatch):
monkeypatch.delenv("SETUPTOOLS_SCM_DEBUG")
p = wd.cwd.joinpath("sub/package")
p.mkdir(parents=True)
p.joinpath("setup.py").write_text(
"""from setuptools import setup
setup(use_scm_version={"root": "../..",
"relative_to": __file__})
"""
)
res = do((sys.executable, "setup.py", "--version"), p)
assert res == "0.1.dev0"
def test_git_gone(wd, monkeypatch):
monkeypatch.setenv("PATH", str(wd.cwd / "not-existing"))
with pytest.raises(EnvironmentError, match="'git' was not found"):
git.parse(str(wd.cwd), git.DEFAULT_DESCRIBE)
@pytest.mark.issue("https://github.com/pypa/setuptools_scm/issues/298")
@pytest.mark.issue(403)
def test_file_finder_no_history(wd, caplog):
file_list = git_find_files(str(wd.cwd))
assert file_list == []
assert "listing git files failed - pretending there aren't any" in caplog.text
@pytest.mark.issue("https://github.com/pypa/setuptools_scm/issues/281")
def test_parse_call_order(wd):
git.parse(str(wd.cwd), git.DEFAULT_DESCRIBE)
def test_version_from_git(wd):
assert wd.version == "0.1.dev0"
assert git.parse(str(wd.cwd), git.DEFAULT_DESCRIBE).branch == "master"
wd.commit_testfile()
assert wd.version.startswith("0.1.dev1+g")
assert not wd.version.endswith("1-")
wd("git tag v0.1")
assert wd.version == "0.1"
wd.write("test.txt", "test2")
assert wd.version.startswith("0.2.dev0+g")
wd.commit_testfile()
assert wd.version.startswith("0.2.dev1+g")
wd("git tag version-0.2")
assert wd.version.startswith("0.2")
wd.commit_testfile()
wd("git tag version-0.2.post210+gbe48adfpost3+g0cc25f2")
with pytest.warns(
UserWarning, match="tag '.*' will be stripped of its suffix '.*'"
):
assert wd.version.startswith("0.2")
wd.commit_testfile()
wd("git tag 17.33.0-rc")
assert wd.version == "17.33.0rc0"
# custom normalization
assert wd.get_version(normalize=False) == "17.33.0-rc"
assert wd.get_version(version_cls=NonNormalizedVersion) == "17.33.0-rc"
assert (
wd.get_version(version_cls="setuptools_scm.NonNormalizedVersion")
== "17.33.0-rc"
)
@pytest.mark.parametrize("with_class", [False, type, str])
def test_git_version_unnormalized_setuptools(with_class, tmpdir, wd, monkeypatch):
"""
Test that when integrating with setuptools without normalization,
the version is not normalized in write_to files,
but still normalized by setuptools for the final dist metadata.
"""
monkeypatch.delenv("SETUPTOOLS_SCM_DEBUG")
p = wd.cwd
# create a setup.py
dest_file = str(tmpdir.join("VERSION.txt")).replace("\\", "/")
if with_class is False:
# try normalize = False
setup_py = """
from setuptools import setup
setup(use_scm_version={'normalize': False, 'write_to': '%s'})
"""
elif with_class is type:
# custom non-normalizing class
setup_py = """
from setuptools import setup
class MyVersion:
def __init__(self, tag_str: str):
self.version = tag_str
def __repr__(self):
return self.version
setup(use_scm_version={'version_cls': MyVersion, 'write_to': '%s'})
"""
elif with_class is str:
# non-normalizing class referenced by name
setup_py = """from setuptools import setup
setup(use_scm_version={
'version_cls': 'setuptools_scm.NonNormalizedVersion',
'write_to': '%s'
})
"""
# finally write the setup.py file
p.joinpath("setup.py").write_text(setup_py % dest_file)
# do git operations and tag
wd.commit_testfile()
wd("git tag 17.33.0-rc1")
# setuptools still normalizes using packaging.Version (removing the dash)
res = do((sys.executable, "setup.py", "--version"), p)
assert res == "17.33.0rc1"
# but the version tag in the file is non-normalized (with the dash)
assert tmpdir.join("VERSION.txt").read() == "17.33.0-rc1"
@pytest.mark.issue(179)
def test_unicode_version_scheme(wd):
scheme = b"guess-next-dev".decode("ascii")
assert wd.get_version(version_scheme=scheme)
@pytest.mark.issue(108)
@pytest.mark.issue(109)
def test_git_worktree(wd):
wd.write("test.txt", "test2")
# untracked files dont change the state
assert wd.version == "0.1.dev0"
wd("git add test.txt")
assert wd.version.startswith("0.1.dev0+d")
@pytest.mark.issue(86)
@pytest.mark.parametrize("today", [False, True])
def test_git_dirty_notag(today, wd, monkeypatch):
if today:
monkeypatch.delenv("SOURCE_DATE_EPOCH", raising=False)
wd.commit_testfile()
wd.write("test.txt", "test2")
wd("git add test.txt")
assert wd.version.startswith("0.1.dev1")
if today:
# the date on the tag is in UTC
tag = datetime.utcnow().date().strftime(".d%Y%m%d")
else:
tag = ".d20090213"
# we are dirty, check for the tag
assert tag in wd.version
@pytest.mark.issue(193)
def test_git_worktree_support(wd, tmpdir):
wd.commit_testfile()
worktree = tmpdir.join("work_tree")
wd("git worktree add -b work-tree %s" % worktree)
res = do([sys.executable, "-m", "setuptools_scm", "ls"], cwd=worktree)
assert str(worktree) in res
@pytest.fixture
def shallow_wd(wd, tmpdir):
wd.commit_testfile()
wd.commit_testfile()
wd.commit_testfile()
target = tmpdir.join("wd_shallow")
do(["git", "clone", "file://%s" % wd.cwd, str(target), "--depth=1"])
return target
def test_git_parse_shallow_warns(shallow_wd, recwarn):
git.parse(str(shallow_wd))
msg = recwarn.pop()
assert "is shallow and may cause errors" in str(msg.message)
def test_git_parse_shallow_fail(shallow_wd):
with pytest.raises(ValueError) as einfo:
git.parse(str(shallow_wd), pre_parse=git.fail_on_shallow)
assert "git fetch" in str(einfo.value)
def test_git_shallow_autocorrect(shallow_wd, recwarn):
git.parse(str(shallow_wd), pre_parse=git.fetch_on_shallow)
msg = recwarn.pop()
assert "git fetch was used to rectify" in str(msg.message)
git.parse(str(shallow_wd), pre_parse=git.fail_on_shallow)
def test_find_files_stop_at_root_git(wd):
wd.commit_testfile()
project = wd.cwd / "project"
project.mkdir()
project.joinpath("setup.cfg").touch()
assert integration.find_files(str(project)) == []
@pytest.mark.issue(128)
def test_parse_no_worktree(tmpdir):
ret = git.parse(str(tmpdir))
assert ret is None
def test_alphanumeric_tags_match(wd):
wd.commit_testfile()
wd("git tag newstyle-development-started")
assert wd.version.startswith("0.1.dev1+g")
def test_git_archive_export_ignore(wd, monkeypatch):
wd.write("test1.txt", "test")
wd.write("test2.txt", "test")
wd.write(
".git/info/attributes",
# Explicitly include test1.txt so that the test is not affected by
# a potentially global gitattributes file on the test machine.
"/test1.txt -export-ignore\n/test2.txt export-ignore",
)
wd("git add test1.txt test2.txt")
wd.commit()
monkeypatch.chdir(wd.cwd)
assert integration.find_files(".") == [opj(".", "test1.txt")]
@pytest.mark.issue(228)
def test_git_archive_subdirectory(wd, monkeypatch):
wd("mkdir foobar")
wd.write("foobar/test1.txt", "test")
wd("git add foobar")
wd.commit()
monkeypatch.chdir(wd.cwd)
assert integration.find_files(".") == [opj(".", "foobar", "test1.txt")]
@pytest.mark.issue(251)
def test_git_archive_run_from_subdirectory(wd, monkeypatch):
wd("mkdir foobar")
wd.write("foobar/test1.txt", "test")
wd("git add foobar")
wd.commit()
monkeypatch.chdir(wd.cwd / "foobar")
assert integration.find_files(".") == [opj(".", "test1.txt")]
def test_git_feature_branch_increments_major(wd):
wd.commit_testfile()
wd("git tag 1.0.0")
wd.commit_testfile()
assert wd.get_version(version_scheme="python-simplified-semver").startswith("1.0.1")
wd("git checkout -b feature/fun")
wd.commit_testfile()
assert wd.get_version(version_scheme="python-simplified-semver").startswith("1.1.0")
@pytest.mark.issue("https://github.com/pypa/setuptools_scm/issues/303")
def test_not_matching_tags(wd):
wd.commit_testfile()
wd("git tag apache-arrow-0.11.1")
wd.commit_testfile()
wd("git tag apache-arrow-js-0.9.9")
wd.commit_testfile()
assert wd.get_version(
tag_regex=r"^apache-arrow-([\.0-9]+)$",
git_describe_command="git describe --dirty --tags --long --exclude *js* ",
).startswith("0.11.2")
@pytest.mark.issue("https://github.com/pypa/setuptools_scm/issues/411")
@pytest.mark.xfail(reason="https://github.com/pypa/setuptools_scm/issues/449")
def test_non_dotted_version(wd):
wd.commit_testfile()
wd("git tag apache-arrow-1")
wd.commit_testfile()
assert wd.get_version().startswith("2")
def test_non_dotted_version_with_updated_regex(wd):
wd.commit_testfile()
wd("git tag apache-arrow-1")
wd.commit_testfile()
assert wd.get_version(tag_regex=r"^apache-arrow-([\.0-9]+)$").startswith("2")
def test_non_dotted_tag_no_version_match(wd):
wd.commit_testfile()
wd("git tag apache-arrow-0.11.1")
wd.commit_testfile()
wd("git tag apache-arrow")
wd.commit_testfile()
assert wd.get_version().startswith("0.11.2.dev2")
@pytest.mark.issue("https://github.com/pypa/setuptools_scm/issues/381")
def test_gitdir(monkeypatch, wd):
""" """
wd.commit_testfile()
normal = wd.version
# git hooks set this and break subsequent setuptools_scm unless we clean
monkeypatch.setenv("GIT_DIR", __file__)
assert wd.version == normal
def test_git_getdate(wd):
# TODO: case coverage for git wd parse
today = date.today()
def parse_date():
return git.parse(os.fspath(wd.cwd)).node_date
git_wd = git.GitWorkdir(os.fspath(wd.cwd))
assert git_wd.get_head_date() is None
assert parse_date() == today
wd.commit_testfile()
assert git_wd.get_head_date() == today
meta = git.parse(os.fspath(wd.cwd))
assert meta.node_date == today
def test_git_getdate_badgit(
wd,
):
wd.commit_testfile()
git_wd = git.GitWorkdir(os.fspath(wd.cwd))
with patch.object(git_wd, "do_ex", Mock(return_value=("%cI", "", 0))):
assert git_wd.get_head_date() is None
| mit | 2,673,699,600,375,099,000 | 29.065445 | 88 | 0.655464 | false |
ahmetcemturan/SFACT | skeinforge_application/skeinforge_plugins/craft_plugins/export_plugins/gcode_step.py | 1 | 9483 | """
This page is in the table of contents.
Gcode step is an export plugin to convert gcode from float position to number of steps.
An export plugin is a script in the export_plugins folder which has the getOutput function, the globalIsReplaceable variable and if it's output is not replaceable, the writeOutput function. It is meant to be run from the export tool. To ensure that the plugin works on platforms which do not handle file capitalization properly, give the plugin a lower case name.
The getOutput function of this script takes a gcode text and returns it with the positions converted into number of steps. The writeOutput function of this script takes a gcode text and writes that with the positions converted into number of steps.
==Settings==
===Add Feed Rate Even When Unchanging===
Default is on.
When selected, the feed rate will be added even when it did not change from the previous line.
===Add Space Between Words===
Default is on.
When selected, a space will be added between each gcode word.
===Add Z Even When Unchanging===
Default is on.
When selected, the z word will be added even when it did not change.
===Feed Rate Step Length===
Default is 0.1 millimeters/second.
Defines the feed rate step length.
===Offset===
====X Offset====
Default is zero.
Defines the X Offset.
====Y Offset====
Default is zero.
Defines the Y Offset.
====Z Offset====
Default is zero.
Defines the Z Offset.
===Step Length===
====E Step Length====
Default is 0.1 millimeters.
Defines the E extrusion distance step length.
===Radius Rate Step Length===
Default is 0.1 millimeters/second.
Defines the radius step length.
====X Step Length====
Default is 0.1 millimeters.
Defines the X axis step length.
====Y Step Length====
Default is 0.1 millimeters.
Defines the Y axis step length.
====Z Step Length====
Default is 0.01 millimeters.
Defines the Z axis step length.
"""
from __future__ import absolute_import
import __init__
from fabmetheus_utilities import archive
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import settings
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
from struct import Struct
import cStringIO
import os
import sys
__author__ = 'Enrique Perez ([email protected])'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
# This is true if the output is text and false if it is binary.
globalIsReplaceable = True
def getCharacterIntegerString(character, offset, splitLine, stepLength):
'Get a character and integer string.'
floatValue = getFloatFromCharacterSplitLine(character, splitLine)
if floatValue is None:
return ''
floatValue += offset
integerValue = int(round(float(floatValue / stepLength)))
return character + str(integerValue)
def getFloatFromCharacterSplitLine(character, splitLine):
'Get the float after the first occurence of the character in the split line.'
lineFromCharacter = gcodec.getStringFromCharacterSplitLine(character, splitLine)
if lineFromCharacter is None:
return None
return float(lineFromCharacter)
def getNewRepository():
'Get new repository.'
return GcodeStepRepository()
def getOutput(gcodeText, repository=None):
'Get the exported version of a gcode file.'
if gcodeText == '':
return ''
if repository is None:
repository = GcodeStepRepository()
settings.getReadRepository(repository)
return GcodeStepSkein().getCraftedGcode(repository, gcodeText)
def writeOutput( fileName, gcodeText = ''):
'Write the exported version of a gcode file.'
gcodeText = gcodec.getGcodeFileText(fileName, gcodeText)
repository = GcodeStepRepository()
settings.getReadRepository(repository)
output = getOutput(gcodeText, repository)
suffixFileName = fileName[: fileName.rfind('.')] + '_gcode_step.gcode'
archive.writeFileText(suffixFileName, output)
print('The converted file is saved as ' + archive.getSummarizedFileName(suffixFileName))
class GcodeStepRepository:
'A class to handle the export settings.'
def __init__(self):
'Set the default settings, execute title & settings fileName.'
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.export_plugins.gcode_step.html', self)
self.addFeedRateEvenWhenUnchanging = settings.BooleanSetting().getFromValue('Add Feed Rate Even When Unchanging', self, True)
self.addSpaceBetweenWords = settings.BooleanSetting().getFromValue('Add Space Between Words', self, True)
self.addZEvenWhenUnchanging = settings.BooleanSetting().getFromValue('Add Z Even When Unchanging', self, True)
self.fileNameInput = settings.FileNameInput().getFromFileName([('Gcode text files', '*.gcode')], 'Open File to be Converted to Gcode Step', self, '')
self.feedRateStepLength = settings.FloatSpin().getFromValue(0.0, 'Feed Rate Step Length (millimeters/second)', self, 1.0, 0.1)
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Offset -', self )
self.xOffset = settings.FloatSpin().getFromValue(-100.0, 'X Offset (millimeters)', self, 100.0, 0.0)
self.yOffset = settings.FloatSpin().getFromValue(-100.0, 'Y Offset (millimeters)', self, 100.0, 0.0)
self.zOffset = settings.FloatSpin().getFromValue(-10.0, 'Z Offset (millimeters)', self, 10.0, 0.0)
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Step Length -', self )
self.eStepLength = settings.FloatSpin().getFromValue(0.0, 'E Step Length (float)', self, 1.0, 0.1)
self.radiusStepLength = settings.FloatSpin().getFromValue(0.0, 'Radius Step Length (millimeters)', self, 1.0, 0.1)
self.xStepLength = settings.FloatSpin().getFromValue(0.0, 'X Step Length (millimeters)', self, 1.0, 0.1)
self.yStepLength = settings.FloatSpin().getFromValue(0.0, 'Y Step Length (millimeters)', self, 1.0, 0.1)
self.zStepLength = settings.FloatSpin().getFromValue(0.0, 'Z Step Length (millimeters)', self, 0.2, 0.01)
self.executeTitle = 'Convert to Gcode Step'
def execute(self):
'Convert to gcode step button has been clicked.'
fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, ['.gcode'], self.fileNameInput.wasCancelled)
for fileName in fileNames:
writeOutput(fileName)
class GcodeStepSkein:
'A class to convert gcode into 16 byte binary segments.'
def __init__(self):
self.oldFeedRateString = None
self.oldZString = None
self.output = cStringIO.StringIO()
def addCharacterInteger(self, character, lineStringIO, offset, splitLine, stepLength):
'Add a character and integer to line string.'
characterIntegerString = getCharacterIntegerString(character, offset, splitLine, stepLength)
self.addStringToLine(lineStringIO, characterIntegerString)
def addLine(self, line):
'Add a line of text and a newline to the output.'
self.output.write(line + '\n')
def addStringToLine(self, lineStringIO, wordString):
'Add a character and integer to line string.'
if wordString == '':
return
if self.repository.addSpaceBetweenWords.value:
lineStringIO.write(' ')
lineStringIO.write(wordString)
def getCraftedGcode(self, repository, gcodeText):
'Parse gcode text and store the gcode.'
self.repository = repository
lines = archive.getTextLines(gcodeText)
for line in lines:
self.parseLine(line)
return self.output.getvalue()
def parseLine(self, line):
'Parse a gcode line.'
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
if len(firstWord) < 1:
return
firstLetter = firstWord[0]
if firstLetter == '(':
return
if firstWord != 'G1' and firstWord != 'G2' and firstWord != 'G3':
self.addLine(line)
return
lineStringIO = cStringIO.StringIO()
lineStringIO.write(firstWord)
self.addCharacterInteger('I', lineStringIO, 0.0, splitLine, self.repository.xStepLength.value)
self.addCharacterInteger('J', lineStringIO, 0.0, splitLine, self.repository.yStepLength.value)
self.addCharacterInteger('R', lineStringIO, 0.0, splitLine, self.repository.radiusStepLength.value)
self.addCharacterInteger('X', lineStringIO, self.repository.xOffset.value, splitLine, self.repository.xStepLength.value)
self.addCharacterInteger('Y', lineStringIO, self.repository.yOffset.value, splitLine, self.repository.yStepLength.value)
zString = getCharacterIntegerString('Z', self.repository.zOffset.value, splitLine, self.repository.zStepLength.value)
feedRateString = getCharacterIntegerString('F', 0.0, splitLine, self.repository.feedRateStepLength.value)
if zString != '':
if zString != self.oldZString or self.repository.addZEvenWhenUnchanging.value:
self.addStringToLine(lineStringIO, zString)
if feedRateString != '':
if feedRateString != self.oldFeedRateString or self.repository.addFeedRateEvenWhenUnchanging.value:
self.addStringToLine(lineStringIO, feedRateString)
self.addCharacterInteger('E', lineStringIO, 0.0, splitLine, self.repository.eStepLength.value)
self.addLine(lineStringIO.getvalue())
self.oldFeedRateString = feedRateString
self.oldZString = zString
def main():
'Display the export dialog.'
if len(sys.argv) > 1:
writeOutput(' '.join(sys.argv[1 :]))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == '__main__':
main()
| agpl-3.0 | -5,384,852,783,440,880,000 | 38.18595 | 365 | 0.761362 | false |
strus38/WPaaS | wpars/tasks.py | 1 | 8400 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
tasks
~~~~~
This file contains all the tasks used by the REST API
So, all the wpar commands used.
:copyright: (c) 2013 by @MIS
"""
import os
import subprocess
import config
import tarfile
from celery import Celery
from celery.result import AsyncResult
celery = Celery('tasks', backend=config.BACKEND_URI, broker=config.BROCKER_URI)
def build_cmd_line(data):
cmd_opts=[]
if 'password' in data and data['password'] != "":
cmd_opts.append('-P')
cmd_opts.append(data['password'])
if 'start' in data and data['start'] == "yes":
cmd_opts.append('-s')
if 'network' in data:
if 'address' in data['network'] and data['network']['address'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('address='+data['network']['address'])
if 'netmask' in data['network'] and data['network']['netmask'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('netmask='+data['network']['netmask'])
if 'interface' in data['network'] and data['network']['interface'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('interface='+data['network']['interface'])
if 'ipv4' in data['network'] and data['network']['ipv4'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('address='+data['network']['ipv4'])
if 'broadcast' in data['network'] and data['network']['broadcast'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('broadcast='+data['network']['broadcast'])
if 'ipv6' in data['network'] and data['network']['ipv6'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('address6='+data['network']['ipv6'])
if 'prefixlen' in data['network'] and data['network']['prefixlen'] != "":
if not '-N' in cmd_opts:
cmd_opts.append('-N')
cmd_opts.append('prefixlen='+data['network']['prefixlen'])
if 'hostname' in data:
if data['hostname'] != "":
cmd_opts.append('-h')
cmd_opts.append(data['hostname'])
if 'autostart' in data and data['autostart'] == "yes":
cmd_opts.append('-A')
if 'backupdevice' in data:
if data['backupdevice'] != "":
cmd_opts.append('-B')
cmd_opts.append(data['backupdevice'])
if 'checkpointable' in data and data['checkpointable'] == "yes":
cmd_opts.append('-c')
if 'versioned' in data and data['versioned'] == "yes":
cmd_opts.append('-C')
if 'basedir' in data:
if data['basedir'] != "":
cmd_opts.append('-d')
cmd_opts.append(data['basedir'])
if 'filesets' in data:
if data['filesets'] != "":
cmd_opts.append('-e')
cmd_opts.append(data['filesets'])
if 'force' in data and data['force'] == "yes":
cmd_opts.append('-F')
if 'vg' in data:
if data['vg'] != "":
cmd_opts.append('-g')
cmd_opts.append(data['vg'])
if 'postscript' in data:
if data['postscript'] != "":
cmd_opts.append('-k')
cmd_opts.append(data['postscript'])
if 'privateRWfs' in data and data['privateRWfs'] == "yes":
cmd_opts.append('-l')
if 'mountdir' in data:
if 'dir' in data['mountdir'] and data['mountdir']['dir'] != "":
cmd_opts.append('-M')
cmd_opts.append('directory='+data['mountdir']['dir'])
if 'vfs' in data['mountdir'] and data['mountdir']['vfs'] != "":
cmd_opts.append('vfs='+data['mountdir']['vfs'])
if 'dev' in data['mountdir'] and data['mountdir']['dev'] != "":
cmd_opts.append('dev='+data['mountdir']['dev'])
if 'dupnameresolution' in data and data['dupnameresolution'] == "yes":
cmd_opts.append('-r')
if 'devname' in data and data['devname'] != "":
if '-D' not in cmd_opts:
cmd_opts.append('-D')
cmd_opts.append('devname='+data['devname'])
if 'rootvg' in data and data['rootvg'] != "no":
if '-D' not in cmd_opts:
cmd_opts.append('-D')
cmd_opts.append('rootvg='+data['rootvg'])
return cmd_opts
def _run_cmd(cmd, wait=True):
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
if err is None or err is "":
ret = 0
if wait:
ret = process.wait()
return ret,out,err
@celery.task
def wpar_mkwpar(name, options):
wpar_cmd = ['/usr/sbin/mkwpar', '-n', name]
# Let's add more options if needed
wpar_cmd += options
# Launch the command
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_check_task(task_id):
async_res = AsyncResult(task_id)
return async_res
@celery.task
def wpar_startwpar(name):
wpar_cmd = ['/usr/sbin/startwpar', name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_stopwpar(name):
wpar_cmd = ['/usr/sbin/stopwpar', name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_rebootwpar(name):
wpar_cmd = ['/usr/sbin/rebootwpar', name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_rmwpar(name):
# Add the -F flag to stop it whatever its state
wpar_cmd = ['/usr/sbin/rmwpar', '-F', name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_restorewpar(name, file):
wpar_cmd = ['/usr/sbin/restwpar', '-f', file, name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_savewpar(name, file):
wpar_cmd = ['/usr/bin/savewpar', '-f', file, name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_migwpar(name, file):
wpar_cmd = ['/usr/sbin/migwpar', '-d', file, '-C', name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_syncwpar(name):
wpar_cmd = ['/usr/sbin/syncwpar', name]
ret,out,err = _run_cmd(wpar_cmd)
return ret,out,err
@celery.task
def wpar_listwpar():
wpar_list_cmd = ['/usr/sbin/lswpar','-c']
ret,out,err = _run_cmd(wpar_list_cmd)
return out
@celery.task
def wpar_listdetailswpar(wpar_name):
wpar_list_cmd = ['/usr/sbin/lswpar','-L', wpar_name]
ret,out,err = _run_cmd(wpar_list_cmd)
return out
@celery.task
def host_stats():
stat_cmd = ['/usr/bin/lparstat','-i']
ret,out,err = _run_cmd(stat_cmd)
return out
@celery.task
def host_cpustats():
proc_cmd = ['/usr/bin/pmlist','-s']
ret,out,err = _run_cmd(proc_cmd)
return out
@celery.task
def host_status():
status_cmd = ['/home/misoard/wparrip.sh']
ret,out,err = _run_cmd(status_cmd)
return out
@celery.task
def host_shutdown():
shutdown_cmd = ['/etc/shutdown']
ret,out,err = _run_cmd(shutdown_cmd)
return out
@celery.task
def host_reboot():
reboot_cmd = ['/etc/reboot','now']
ret,out,err = _run_cmd(reboot_cmd)
return out
@celery.task
def host_os_stats():
os_cmd = ['/usr/bin/oslevel']
ret,out,err = _run_cmd(os_cmd)
return out
@celery.task
def host_network_devices():
net_cmd = ['/etc/lsdev','-Cc','if']
ret,out,err = _run_cmd(net_cmd)
return out
@celery.task
def image_inspect(image_fullpath):
ls_cmd = ['/usr/bin/lsmksysb','-lf',image_fullpath]
ret,out,err = _run_cmd(ls_cmd)
return ret,out,err
@celery.task
def image_create(path, data):
files = []
# First create the <image_local>/<image_name>.info file. It acts as the image repository locally
# to know which images are used by the WPARs (do not want it to be in a DB since it could be used
# without this program.)
info_file = path+'/'+data['name']+'.info'
with open(info_file, 'w') as outfile:
json.dump(data, outfile)
# Now, depending on the image, we build a .tgz file containing either:
# - The .info file only
# - The .info file and the mksysb
# - The .info file and whatever NFS tree or program
files.append(data['name']+'.info')
if data['type'] == 'mksysb':
files.append(data['name'])
_targzip_content(files)
return 0,data['id'],""
def _targzip_content(path, files):
full=path+'/'+data['name']+'.tgz'
tar = tarfile.open(full, "w:gz")
for name in files:
tar.add(path+'/'+name)
tar.close()
return full
| apache-2.0 | -7,396,022,001,565,113,000 | 28.166667 | 98 | 0.647143 | false |
mercycorps/TolaActivity | tola/settings/base.py | 1 | 12158 | """Common settings and globals."""
from django.utils.translation import ugettext_lazy as _
from os.path import abspath, basename, dirname, join, normpath
from sys import path
########## PATH CONFIGURATION
#BASE DIR
#BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
# Add LOCALE_PATH , a list of directories where Django looks for translation
# files
LOCALE_PATHS = [
join(DJANGO_ROOT, 'locale'),
]
FORMAT_MODULE_PATH = [
'formats'
]
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Your Name', '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
DATE_FORMAT = 'Y-m-d' # most likely overridden in a formats.py file
# Add list of languages available for selection.
LANGUAGES = [
('en', _('English')),
('fr', _('French')),
('es', _('Spanish')),
# ('ar', _('Arabic')),
]
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'assets'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(SITE_ROOT, 'build'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key should only be used for development and testing.
SECRET_KEY = r"!0^+)=t*ly6ycprf9@kfw$6fsjd0xoh#pa*2erx1m*lp5k9ko7"
########## END SECRET CONFIGURATION
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['127.0.0.1', 'mercycorps.org']
########## END SITE CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
#'APP_DIRS': True,
'DIRS': [
normpath(join(SITE_ROOT, 'templates')),
],
'OPTIONS': {
'debug': DEBUG,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.tz',
'django.template.context_processors.media',
'django.template.context_processors.static',
'tola.processor.google_analytics',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'
]
},
},
]
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE = (
# Default Django middleware.
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware', # Activate locale middleware
'tola.middleware.UserLanguageMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware',
'social_django.middleware.SocialAuthExceptionMiddleware',
'tola.middleware.TimingMiddleware',
#'debug_toolbar.middleware.DebugToolbarMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## REST CONFIGURATION
# Add Pagination to Rest Framework lists
REST_FRAMEWORK = {
'PAGINATE_BY': 10,
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
)
}
########## END REST CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
'social_django',
#'social.apps.django_app.default'
)
THIRD_PARTY_APPS = (
'rest_framework',
'rest_framework.authtoken',
'crispy_forms',
'django_extensions',
'mathfilters',
'import_export',
'django_wysiwyg',
'ckeditor',
'ckeditor_uploader',
'simplejson',
'simple_history',
#'user_language_middleware',
'widget_tweaks',
'webpack_loader',
'safedelete',
'django_mysql',
'adminsortable2',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'workflow',
'tola',
'feed',
'indicators',
'tola_management'
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
####### AUTHENTICATION BAKEND CONFIG ##################
# https://github.com/django/django/blob/master/django/contrib/auth/backends.py
AUTHENTICATION_BACKENDS = (
#'social_core.backends.open_id.OpenIdAuth',
#'social_core.backends.google.GoogleOpenId',
'social_core.backends.google.GoogleOAuth2',
#'social_core.backends.google.GoogleOAuth',
#'social_core.backends.twitter.TwitterOAuth',
#'social_core.backends.yahoo.YahooOpenId',
'django.contrib.auth.backends.ModelBackend',
)
SOCIAL_AUTH_PIPELINE = (
'social_core.pipeline.social_auth.social_details',
'social_core.pipeline.social_auth.social_uid',
'tola.pipeline.domains_allowed',
'social_core.pipeline.social_auth.auth_allowed',
'tola.pipeline.create_user_okta',
#'social_core.pipeline.social_auth.social_user',
'tola.pipeline.social_user_tola',
'tola.pipeline.associate_email_or_redirect',
#'social_core.pipeline.social_auth.associate_by_email',
'social_core.pipeline.user.get_username',
#'social_core.pipeline.social_auth.associate_user',
'tola.pipeline.associate_user_tola',
'social_core.pipeline.social_auth.load_extra_data',
'social_core.pipeline.user.user_details',
)
############ END OF AUTHENTICATION BACKEND ##############
### PASSWORD VALIDATION ###
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 9,
}
},
]
########## Login redirect ###########
LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_URL_NAMESPACE = 'social'
PROJECT_PATH = dirname(dirname(dirname(abspath(__file__))))
path.append(PROJECT_PATH)
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = '%s.wsgi.application' % SITE_NAME
########## END WSGI CONFIGURATION
CRISPY_TEMPLATE_PACK = 'bootstrap3'
#wysiwyg settings
DJANGO_WYSIWYG_FLAVOR = "ckeditor"
CKEDITOR_UPLOAD_PATH = "media/uploads/"
CKEDITOR_CONFIGS = {
'default': {
'toolbar': 'full',
'height': 300,
'width': 300,
},
}
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
GOOGLE_ANALYTICS_PROPERTY_ID = None # replaced in private settings file
GOOGLE_ANALYTICS_DOMAIN = 'example.org' # replaced in private settings file
# Webpack
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'dist/',
'STATS_FILE': join(SITE_ROOT, 'webpack-stats.json'),
}
}
########## LOGGING CONFIGURATION
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)-8s %(name)-12s %(message)s',
}
},
'handlers': {
'file': {
'level': 'ERROR',
'class': 'logging.FileHandler',
'filename': 'error.log',
'formatter': 'standard'
},
'login_file': {
'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler',
'filename': 'login.log',
'when': 'midnight',
'formatter': 'standard',
}
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'ERROR',
'propagate': True,
},
'workflow': {
'handlers': ['file'],
'level': 'ERROR',
'propagate': True,
},
'indicators': {
'handlers': ['file'],
'level': 'ERROR',
'propagate': True,
},
'tola': {
'handlers': ['file'],
'level': 'ERROR',
'propagate': True,
},
'login': {
'handlers': ['login_file'],
'level': 'DEBUG',
'propagate': True
}
},
}
| apache-2.0 | 3,049,343,211,461,103,600 | 27.674528 | 98 | 0.637276 | false |
thisisshi/cloud-custodian | c7n/resources/ssm.py | 1 | 27799 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import json
import hashlib
import operator
from concurrent.futures import as_completed
from c7n.actions import Action
from c7n.exceptions import PolicyValidationError
from c7n.filters import Filter, CrossAccountAccessFilter
from c7n.query import QueryResourceManager, TypeInfo
from c7n.manager import resources
from c7n.tags import universal_augment
from c7n.utils import chunks, get_retry, local_session, type_schema, filter_empty
from c7n.version import version
from .aws import shape_validate
from .ec2 import EC2
@resources.register('ssm-parameter')
class SSMParameter(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('describe_parameters', 'Parameters', None)
name = "Name"
id = "Name"
universal_taggable = True
arn_type = "parameter"
cfn_type = 'AWS::SSM::Parameter'
retry = staticmethod(get_retry(('Throttled',)))
permissions = ('ssm:GetParameters',
'ssm:DescribeParameters')
augment = universal_augment
@SSMParameter.action_registry.register('delete')
class DeleteParameter(Action):
schema = type_schema('delete')
permissions = ("ssm:DeleteParameter",)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for r in resources:
self.manager.retry(
client.delete_parameter, Name=r['Name'],
ignore_err_codes=('ParameterNotFound',))
@resources.register('ssm-managed-instance')
class ManagedInstance(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('describe_instance_information', 'InstanceInformationList', None)
id = 'InstanceId'
name = 'Name'
date = 'RegistrationDate'
arn_type = "managed-instance"
permissions = ('ssm:DescribeInstanceInformation',)
@EC2.action_registry.register('send-command')
@ManagedInstance.action_registry.register('send-command')
class SendCommand(Action):
"""Run an SSM Automation Document on an instance.
:Example:
Find ubuntu 18.04 instances are active with ssm.
.. code-block:: yaml
policies:
- name: ec2-osquery-install
resource: ec2
filters:
- type: ssm
key: PingStatus
value: Online
- type: ssm
key: PlatformName
value: Ubuntu
- type: ssm
key: PlatformVersion
value: 18.04
actions:
- type: send-command
command:
DocumentName: AWS-RunShellScript
Parameters:
commands:
- wget https://pkg.osquery.io/deb/osquery_3.3.0_1.linux.amd64.deb
- dpkg -i osquery_3.3.0_1.linux.amd64.deb
"""
schema = type_schema(
'send-command',
command={'type': 'object'},
required=('command',))
permissions = ('ssm:SendCommand',)
shape = "SendCommandRequest"
annotation = 'c7n:SendCommand'
def validate(self):
shape_validate(self.data['command'], self.shape, 'ssm')
# If used against an ec2 resource, require an ssm status filter
# to ensure that we're not trying to send commands to instances
# that aren't in ssm.
if self.manager.type != 'ec2':
return
found = False
for f in self.manager.iter_filters():
if f.type == 'ssm':
found = True
break
if not found:
raise PolicyValidationError(
"send-command requires use of ssm filter on ec2 resources")
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for resource_set in chunks(resources, 50):
self.process_resource_set(client, resource_set)
def process_resource_set(self, client, resources):
command = dict(self.data['command'])
command['InstanceIds'] = [
r['InstanceId'] for r in resources]
result = client.send_command(**command).get('Command')
for r in resources:
r.setdefault('c7n:SendCommand', []).append(result['CommandId'])
@resources.register('ssm-activation')
class SSMActivation(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('describe_activations', 'ActivationList', None)
id = 'ActivationId'
name = 'Description'
date = 'CreatedDate'
arn = False
permissions = ('ssm:DescribeActivations',)
@SSMActivation.action_registry.register('delete')
class DeleteSSMActivation(Action):
schema = type_schema('delete')
permissions = ('ssm:DeleteActivation',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for a in resources:
client.delete_activation(ActivationId=a["ActivationId"])
@resources.register('ops-item')
class OpsItem(QueryResourceManager):
"""Resource for OpsItems in SSM OpsCenter
https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html
"""
class resource_type(TypeInfo):
enum_spec = ('describe_ops_items', 'OpsItemSummaries', None)
service = 'ssm'
arn_type = 'opsitem'
id = 'OpsItemId'
name = 'Title'
default_report_fields = (
'Status', 'Title', 'LastModifiedTime',
'CreatedBy', 'CreatedTime')
QueryKeys = {
'Status',
'CreatedBy',
'Source',
'Priority',
'Title',
'OpsItemId',
'CreatedTime',
'LastModifiedTime',
'OperationalData',
'OperationalDataKey',
'OperationalDataValue',
'ResourceId',
'AutomationId'}
QueryOperators = {'Equal', 'LessThan', 'GreaterThan', 'Contains'}
def validate(self):
self.query = self.resource_query()
return super(OpsItem, self).validate()
def get_resources(self, ids, cache=True, augment=True):
if isinstance(ids, str):
ids = [ids]
return self.resources({
'OpsItemFilters': [{
'Key': 'OpsItemId',
'Values': [i],
'Operator': 'Equal'} for i in ids]})
def resources(self, query=None):
q = self.resource_query()
if q and query and 'OpsItemFilters' in query:
q['OpsItemFilters'].extend(query['OpsItemFilters'])
return super(OpsItem, self).resources(query=q)
def resource_query(self):
filters = []
for q in self.data.get('query', ()):
if (not isinstance(q, dict) or
not set(q.keys()) == {'Key', 'Values', 'Operator'} or
q['Key'] not in self.QueryKeys or
q['Operator'] not in self.QueryOperators):
raise PolicyValidationError(
"invalid ops-item query %s" % self.data['query'])
filters.append(q)
return {'OpsItemFilters': filters}
@OpsItem.action_registry.register('update')
class UpdateOpsItem(Action):
"""Update an ops item.
: example :
Close out open ops items older than 30 days for a given issue.
.. code-block:: yaml
policies:
- name: issue-items
resource: aws.ops-item
filters:
- Status: Open
- Title: checking-lambdas
- type: value
key: CreatedTime
value_type: age
op: greater-than
value: 30
actions:
- type: update
status: Resolved
"""
schema = type_schema(
'update',
description={'type': 'string'},
priority={'enum': list(range(1, 6))},
title={'type': 'string'},
topics={'type': 'array', 'items': {'type': 'string'}},
status={'enum': ['Open', 'In Progress', 'Resolved']},
)
permissions = ('ssm:UpdateOpsItem',)
def process(self, resources):
attrs = dict(self.data)
attrs = filter_empty({
'Description': attrs.get('description'),
'Title': attrs.get('title'),
'Priority': attrs.get('priority'),
'Status': attrs.get('status'),
'Notifications': [{'Arn': a} for a in attrs.get('topics', ())]})
modified = []
for r in resources:
for k, v in attrs.items():
if k not in r or r[k] != v:
modified.append(r)
self.log.debug("Updating %d of %d ops items", len(modified), len(resources))
client = local_session(self.manager.session_factory).client('ssm')
for m in modified:
client.update_ops_item(OpsItemId=m['OpsItemId'], **attrs)
class OpsItemFilter(Filter):
"""Filter resources associated to extant OpsCenter operational items.
:example:
Find ec2 instances with open ops items.
.. code-block:: yaml
policies:
- name: ec2-instances-ops-items
resource: ec2
filters:
- type: ops-item
# we can filter on source, title, priority
priority: [1, 2]
"""
schema = type_schema(
'ops-item',
status={'type': 'array',
'default': ['Open'],
'items': {'enum': ['Open', 'In progress', 'Resolved']}},
priority={'type': 'array', 'items': {'enum': list(range(1, 6))}},
title={'type': 'string'},
source={'type': 'string'})
schema_alias = True
permissions = ('ssm:DescribeOpsItems',)
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('ssm')
results = []
for resource_set in chunks(resources, 10):
qf = self.get_query_filter(resource_set)
items = client.describe_ops_items(**qf).get('OpsItemSummaries')
arn_item_map = {}
for i in items:
for arn in json.loads(
i['OperationalData']['/aws/resources']['Value']):
arn_item_map.setdefault(arn['arn'], []).append(i['OpsItemId'])
for arn, r in zip(self.manager.get_arns(resource_set), resource_set):
if arn in arn_item_map:
r['c7n:opsitems'] = arn_item_map[arn]
results.append(r)
return results
def get_query_filter(self, resources):
q = []
q.append({'Key': 'Status', 'Operator': 'Equal',
'Values': self.data.get('status', ('Open',))})
if self.data.get('priority'):
q.append({'Key': 'Priority', 'Operator': 'Equal',
'Values': list(map(str, self.data['priority']))})
if self.data.get('title'):
q.append({'Key': 'Title', 'Operator': 'Contains',
'Values': [self.data['title']]})
if self.data.get('source'):
q.append({'Key': 'Source', 'Operator': 'Equal',
'Values': [self.data['source']]})
q.append({'Key': 'ResourceId', 'Operator': 'Contains',
'Values': [r[self.manager.resource_type.id] for r in resources]})
return {'OpsItemFilters': q}
@classmethod
def register_resource(cls, registry, resource_class):
if 'ops-item' not in resource_class.filter_registry:
resource_class.filter_registry.register('ops-item', cls)
resources.subscribe(OpsItemFilter.register_resource)
class PostItem(Action):
"""Post an OpsItem to AWS Systems Manager OpsCenter Dashboard.
https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html
Each ops item supports up to a 100 associated resources. This
action supports the builtin OpsCenter dedup logic with additional
support for associating new resources to existing Open ops items.
: Example :
Create an ops item for ec2 instances with Create User permissions
.. code-block:: yaml
policies:
- name: over-privileged-ec2
resource: aws.ec2
filters:
- type: check-permissions
match: allowed
actions:
- iam:CreateUser
actions:
- type: post-item
priority: 3
The builtin OpsCenter dedup logic will kick in if the same
resource set (ec2 instances in this case) is posted for the same
policy.
: Example :
Create an ops item for sqs queues with cross account access as ops items.
.. code-block:: yaml
policies:
- name: sqs-cross-account-access
resource: aws.sqs
filters:
- type: cross-account
actions:
- type: mark-for-op
days: 5
op: delete
- type: post-item
title: SQS Cross Account Access
description: |
Cross Account Access detected in SQS resource IAM Policy.
tags:
Topic: Security
"""
schema = type_schema(
'post-item',
description={'type': 'string'},
tags={'type': 'object'},
priority={'enum': list(range(1, 6))},
title={'type': 'string'},
topics={'type': 'string'},
)
schema_alias = True
permissions = ('ssm:CreateOpsItem',)
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('ssm')
item_template = self.get_item_template()
resources = list(sorted(resources, key=operator.itemgetter(
self.manager.resource_type.id)))
items = self.get_items(client, item_template)
if items:
# - Use a copy of the template as we'll be passing in status changes on updates.
# - The return resources will be those that we couldn't fit into updates
# to existing resources.
resources = self.update_items(client, items, dict(item_template), resources)
item_ids = [i['OpsItemId'] for i in items[:5]]
for resource_set in chunks(resources, 100):
resource_arns = json.dumps(
[{'arn': arn} for arn in sorted(self.manager.get_arns(resource_set))])
item_template['OperationalData']['/aws/resources'] = {
'Type': 'SearchableString', 'Value': resource_arns}
if items:
item_template['RelatedOpsItems'] = [
{'OpsItemId': item_ids[:5]}]
try:
oid = client.create_ops_item(**item_template).get('OpsItemId')
item_ids.insert(0, oid)
except client.exceptions.OpsItemAlreadyExistsException:
pass
for r in resource_set:
r['c7n:opsitem'] = oid
def get_items(self, client, item_template):
qf = [
{'Key': 'OperationalDataValue',
'Operator': 'Contains',
'Values': [item_template['OperationalData'][
'/custodian/dedup']['Value']]},
{'Key': 'OperationalDataKey',
'Operator': 'Equal',
'Values': ['/custodian/dedup']},
{'Key': 'Status',
'Operator': 'Equal',
# In progress could imply activity/executions underway, we don't want to update
# the resource set out from underneath that so only look at Open state.
'Values': ['Open']},
{'Key': 'Source',
'Operator': 'Equal',
'Values': ['Cloud Custodian']}]
items = client.describe_ops_items(OpsItemFilters=qf)['OpsItemSummaries']
return list(sorted(items, key=operator.itemgetter('CreatedTime'), reverse=True))
def update_items(self, client, items, item_template, resources):
"""Update existing Open OpsItems with new resources.
Originally this tried to support attribute updates as well, but
the reasoning around that is a bit complex due to partial state
evaluation around any given execution, so its restricted atm
to just updating associated resources.
For management of ops items, use a policy on the
ops-item resource.
Rationale: Typically a custodian policy will be evaluating
some partial set of resources at any given execution (ie think
a lambda looking at newly created resources), where as a
collection of ops center items will represent the total
set. Custodian can multiplex the partial set of resource over
a set of ops items (100 resources per item) which minimizes
the item count. When updating the state of an ops item though,
we have to contend with the possibility that we're doing so
with only a partial state. Which could be confusing if we
tried to set the Status to Resolved even if we're only evaluating
a handful of resources associated to an ops item.
"""
arn_item_map = {}
item_arn_map = {}
for i in items:
item_arn_map[i['OpsItemId']] = arns = json.loads(
i['OperationalData']['/aws/resources']['Value'])
for arn in arns:
arn_item_map[arn['arn']] = i['OpsItemId']
arn_resource_map = dict(zip(self.manager.get_arns(resources), resources))
added = set(arn_resource_map).difference(arn_item_map)
updated = set()
remainder = []
# Check for resource additions
for a in added:
handled = False
for i in items:
if len(item_arn_map[i['OpsItemId']]) >= 100:
continue
item_arn_map[i['OpsItemId']].append({'arn': a})
updated.add(i['OpsItemId'])
arn_resource_map[a]['c7n:opsitem'] = i['OpsItemId']
handled = True
break
if not handled:
remainder.append(a)
for i in items:
if not i['OpsItemId'] in updated:
continue
i = dict(i)
for k in ('CreatedBy', 'CreatedTime', 'Source', 'LastModifiedBy',
'LastModifiedTime'):
i.pop(k, None)
i['OperationalData']['/aws/resources']['Value'] = json.dumps(
item_arn_map[i['OpsItemId']])
i['OperationalData'].pop('/aws/dedup', None)
client.update_ops_item(**i)
return remainder
def get_item_template(self):
title = self.data.get('title', self.manager.data['name']).strip()
dedup = ("%s %s %s %s" % (
title,
self.manager.type,
self.manager.config.region,
self.manager.config.account_id)).encode('utf8')
# size restrictions on this value is 4-20, digest is 32
dedup = hashlib.md5(dedup).hexdigest()[:20] # nosec nosemgrep
i = dict(
Title=title,
Description=self.data.get(
'description',
self.manager.data.get(
'description',
self.manager.data.get('name'))),
Priority=self.data.get('priority'),
Source="Cloud Custodian",
Tags=[{'Key': k, 'Value': v} for k, v in self.data.get(
'tags', self.manager.data.get('tags', {})).items()],
Notifications=[{'Arn': a} for a in self.data.get('topics', ())],
OperationalData={
'/aws/dedup': {
'Type': 'SearchableString',
'Value': json.dumps({'dedupString': dedup})},
'/custodian/execution-id': {
'Type': 'String',
'Value': self.manager.ctx.execution_id},
# We need our own dedup string to be able to filter
# search on it.
'/custodian/dedup': {
'Type': 'SearchableString',
'Value': dedup},
'/custodian/policy': {
'Type': 'String',
'Value': json.dumps(self.manager.data)},
'/custodian/version': {
'Type': 'String',
'Value': version},
'/custodian/policy-name': {
'Type': 'SearchableString',
'Value': self.manager.data['name']},
'/custodian/resource': {
'Type': 'SearchableString',
'Value': self.manager.type},
}
)
return filter_empty(i)
@classmethod
def register_resource(cls, registry, resource_class):
if 'post-item' not in resource_class.action_registry:
resource_class.action_registry.register('post-item', cls)
resources.subscribe(PostItem.register_resource)
@resources.register('ssm-document')
class SSMDocument(QueryResourceManager):
class resource_type(TypeInfo):
service = 'ssm'
enum_spec = ('list_documents', 'DocumentIdentifiers', {'Filters': [
{
'Key': 'Owner',
'Values': ['Self']}]})
name = 'Name'
date = 'RegistrationDate'
arn_type = 'Document'
permissions = ('ssm:ListDocuments',)
@SSMDocument.filter_registry.register('cross-account')
class SSMDocumentCrossAccount(CrossAccountAccessFilter):
"""Filter SSM documents which have cross account permissions
:example:
.. code-block:: yaml
policies:
- name: ssm-cross-account
resource: ssm-document
filters:
- type: cross-account
whitelist: [xxxxxxxxxxxx]
"""
permissions = ('ssm:DescribeDocumentPermission',)
def process(self, resources, event=None):
self.accounts = self.get_accounts()
results = []
client = local_session(self.manager.session_factory).client('ssm')
with self.executor_factory(max_workers=3) as w:
futures = []
for resource_set in chunks(resources, 10):
futures.append(w.submit(
self.process_resource_set, client, resource_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception checking cross account access \n %s" % (
f.exception()))
continue
results.extend(f.result())
return results
def process_resource_set(self, client, resource_set):
results = []
for r in resource_set:
attrs = self.manager.retry(
client.describe_document_permission,
Name=r['Name'],
PermissionType='Share',
ignore_err_codes=('InvalidDocument',))['AccountSharingInfoList']
shared_accounts = {
g.get('AccountId') for g in attrs}
delta_accounts = shared_accounts.difference(self.accounts)
if delta_accounts:
r['c7n:CrossAccountViolations'] = list(delta_accounts)
results.append(r)
return results
@SSMDocument.action_registry.register('set-sharing')
class RemoveSharingSSMDocument(Action):
"""Edit list of accounts that share permissions on an SSM document. Pass in a list of account
IDs to the 'add' or 'remove' fields to edit document sharing permissions.
Set 'remove' to 'matched' to automatically remove any external accounts on a
document (use in conjunction with the cross-account filter).
:example:
.. code-block:: yaml
policies:
- name: ssm-set-sharing
resource: ssm-document
filters:
- type: cross-account
whitelist: [xxxxxxxxxxxx]
actions:
- type: set-sharing
add: [yyyyyyyyyy]
remove: matched
"""
schema = type_schema('set-sharing',
remove={
'oneOf': [
{'enum': ['matched']},
{'type': 'array', 'items': {
'type': 'string'}},
]},
add={
'type': 'array', 'items': {
'type': 'string'}})
permissions = ('ssm:ModifyDocumentPermission',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
add_accounts = self.data.get('add', [])
remove_accounts = self.data.get('remove', [])
if self.data.get('remove') == 'matched':
for r in resources:
try:
client.modify_document_permission(
Name=r['Name'],
PermissionType='Share',
AccountIdsToAdd=add_accounts,
AccountIdsToRemove=r['c7n:CrossAccountViolations']
)
except client.exceptions.InvalidDocumentOperation as e:
raise(e)
else:
for r in resources:
try:
client.modify_document_permission(
Name=r['Name'],
PermissionType='Share',
AccountIdsToAdd=add_accounts,
AccountIdsToRemove=remove_accounts
)
except client.exceptions.InvalidDocumentOperation as e:
raise(e)
@SSMDocument.action_registry.register('delete')
class DeleteSSMDocument(Action):
"""Delete SSM documents. Set force flag to True to force delete on documents that are
shared across accounts. This will remove those shared accounts, and then delete the document.
Otherwise, delete will fail and raise InvalidDocumentOperation exception
if a document is shared with other accounts. Default value for force is False.
:example:
.. code-block:: yaml
policies:
- name: ssm-delete-documents
resource: ssm-document
filters:
- type: cross-account
whitelist: [xxxxxxxxxxxx]
actions:
- type: delete
force: True
"""
schema = type_schema(
'delete',
force={'type': 'boolean'}
)
permissions = ('ssm:DeleteDocument', 'ssm:ModifyDocumentPermission',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('ssm')
for r in resources:
try:
client.delete_document(Name=r['Name'], Force=True)
except client.exceptions.InvalidDocumentOperation as e:
if self.data.get('force', False):
response = client.describe_document_permission(
Name=r['Name'],
PermissionType='Share'
)
client.modify_document_permission(
Name=r['Name'],
PermissionType='Share',
AccountIdsToRemove=response.get('AccountIds', [])
)
client.delete_document(
Name=r['Name'],
Force=True
)
else:
raise(e)
| apache-2.0 | -7,850,574,087,396,876,000 | 34.457908 | 97 | 0.548869 | false |
ashutoshvt/psi4 | tests/pytests/test_jkmemory.py | 1 | 1305 | """
Tests for the memory estimators on JK objects
"""
import psi4
import pytest
from .utils import *
def _build_system(basis):
mol = psi4.geometry("""
Ar 0 0 0
Ar 0 0 5
Ar 0 0 15
Ar 0 0 25
Ar 0 0 35
""")
#psi4.set_options({"INTS_TOLERANCE": 0.0})
basis = psi4.core.BasisSet.build(mol, target=basis)
aux = psi4.core.BasisSet.build(basis.molecule(), "DF_BASIS_SCF",
psi4.core.get_option("SCF", "DF_BASIS_SCF"), "JKFIT",
basis.name(), basis.has_puream())
return basis, aux
@pytest.mark.parametrize("basis,jk_type,estimate",[
# Zero temps
["cc-pvdz", "DIRECT", 0],
["cc-pvdz", "OUT_OF_CORE", 0],
# pvdz tests
["cc-pvdz", "MEM_DF", 1542360],
["cc-pvdz", "DISK_DF", 1286244],
["cc-pvdz", "CD", 2916000],
["cc-pvdz", "PK", 65610000],
# 5z tests
["cc-pv5z", "MEM_DF", 55172760],
["cc-pv5z", "DISK_DF", 26984120],
]) # yapf: disable
def test_jk_memory_estimate(basis, jk_type, estimate):
basis, aux = _build_system(basis)
jk = psi4.core.JK.build(basis, aux=aux, jk_type=jk_type, do_wK=False, memory=1e9)
assert compare_integers(estimate, jk.memory_estimate(), "{} memory estimate".format(jk_type))
| lgpl-3.0 | -1,547,428,685,653,749,200 | 24.588235 | 97 | 0.56092 | false |
openturns/otdistfunc | python/test/t_distributed_python_wrapper_std.py | 1 | 1727 | #! /usr/bin/env python
# -*- coding: utf8 -*-
import sys
import os
# space separated list of hosts
#remote_hosts = "localhost node-1:2 node-3:4"
remote_hosts = "localhost"
test_dir = os.path.dirname(os.path.realpath(__file__))
start_script = sys.executable + " " + test_dir + os.sep + \
"t_distributed_python_wrapper.py"
test_type = "local"
if len(sys.argv) >= 3:
test_type = sys.argv[2]
if test_type == "local":
default_param = " --test local "
elif test_type == "remote":
default_param = " --test remote --hosts " + remote_hosts + " "
else:
print ('Wrong arguments!')
exit(1)
os.system(start_script + default_param + " --point ")
os.system(start_script + default_param + " --analytical ")
os.system(start_script + default_param + " --point --analytical ")
os.system(start_script + default_param +
"--sample-size 50 --work-time 0.1 ")
os.system(start_script + default_param +
"--sample-size 10 --nb-output 1000 ")
os.system(start_script + default_param +
"--sample-size 1 ")
os.system(start_script + default_param +
"--sample-size 5 --work-time 0.1 --cleanup no ")
os.system(start_script + default_param +
"--sample-size 1 --cleanup no ")
os.system(start_script + default_param +
"--sample-size 5 --work-time 0.1 --cleanup all ")
os.system(start_script + default_param +
"--sample-size 1 --cleanup all ")
os.system(start_script + default_param +
" --sample-size 10 --work-time 0.1 --error ")
os.system(start_script + default_param +
" --sample-size 4 --work-time 0.1 --error --cleanup no")
os.system(start_script + default_param +
" --sample-size 4 --work-time 0.1 --error --cleanup all")
| lgpl-3.0 | -7,919,461,645,429,032,000 | 29.839286 | 67 | 0.62073 | false |
scalingdata/Impala | tests/common/skip.py | 1 | 2938 | #!/usr/bin/env python
# Copyright (c) 2015 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Impala py.test skipif markers. When a test can't be run against S3,
# choose the appropriate reason (or add a new one if needed) and
# annotate the class or test routine with the marker.
#
import os
import pytest
from functools import partial
from tests.util.filesystem_utils import IS_DEFAULT_FS, IS_S3, IS_ISILON
class SkipIfS3:
# These ones are skipped due to product limitations.
insert = pytest.mark.skipif(IS_S3, reason="INSERT not implemented for S3")
load_data = pytest.mark.skipif(IS_S3, reason="LOAD DATA not implemented for S3")
caching = pytest.mark.skipif(IS_S3, reason="SET CACHED not implemented for S3")
hive = pytest.mark.skipif(IS_S3, reason="Hive doesn't work with S3")
jira = partial(pytest.mark.skipif, IS_S3)
# These ones need test infra work to re-enable.
udfs = pytest.mark.skipif(IS_S3, reason="udas/udfs not copied to S3")
datasrc = pytest.mark.skipif(IS_S3, reason="data sources not copied to S3")
hdfs_client = pytest.mark.skipif(IS_S3, reason="hdfs_client doesn't work with S3")
hbase = pytest.mark.skipif(IS_S3, reason="HBase not started with S3")
qualified_path = pytest.mark.skipif(IS_S3,
reason="Tests rely on HDFS qualified paths, IMPALA-1872")
class SkipIf:
# Some tests require a non-default filesystem to be present.
default_fs = pytest.mark.skipif(IS_DEFAULT_FS, reason="Non-default filesystem needed")
skip_hbase = pytest.mark.skipif(pytest.config.option.skip_hbase,
reason="--skip_hbase argument specified")
not_default_fs = pytest.mark.skipif(not IS_DEFAULT_FS,
reason="Default filesystem needed")
class SkipIfIsilon:
caching = pytest.mark.skipif(IS_ISILON, reason="SET CACHED not implemented for Isilon")
hbase = pytest.mark.skipif(IS_ISILON, reason="HBase not tested with Isilon")
hive = pytest.mark.skipif(IS_ISILON, reason="Hive not tested with Isilon")
hdfs_acls = pytest.mark.skipif(IS_ISILON, reason="HDFS acls are not supported on Isilon")
hdfs_block_size = pytest.mark.skipif(IS_ISILON,
reason="Isilon uses its own block size")
hdfs_encryption = pytest.mark.skipif(IS_ISILON,
reason="HDFS encryption is not supported with Isilon")
untriaged = pytest.mark.skipif(IS_ISILON,
reason="This Isilon issue has yet to be triaged.")
jira = partial(pytest.mark.skipif, IS_ISILON)
| apache-2.0 | -2,121,602,511,979,583,200 | 44.90625 | 91 | 0.741661 | false |
unreal666/outwiker | src/outwiker/gui/controls/togglebutton.py | 3 | 9278 | # -*- coding: utf-8 -*-
import wx
from wx.lib.buttons import ThemedGenBitmapTextToggleButton
class ToggleButton(ThemedGenBitmapTextToggleButton):
def __init__(self,
parent,
id=-1,
bitmap=None,
label='',
pos=wx.DefaultPosition,
size=wx.DefaultSize,
style=0,
validator=wx.DefaultValidator,
name="togglebutton",
align=wx.ALIGN_LEFT):
super(ToggleButton, self).__init__(parent, id, bitmap,
label, pos, size,
style, validator, name)
self.colorNormal = wx.Colour(255, 255, 255)
self.colorToggled = wx.Colour(144, 195, 212)
self.colorShadow = wx.Colour(200, 200, 200)
self.colorBorder = wx.Colour(0, 0, 0)
self.colorBorderToggled = wx.Colour(0, 0, 255)
self.colorTextNormal = wx.Colour(0, 0, 0)
self.colorTextDisabled = wx.SystemSettings.GetColour(wx.SYS_COLOUR_GRAYTEXT)
self.colorTextToggled = wx.Colour(0, 0, 0)
self.toggleShiftX = 2
self.toggleShiftY = 2
self.roundRadius = 2
self.align = align
self.padding = 8
self.marginImage = 4
self._updateMinSize()
def _updateMinSize(self):
contentWidth = self.GetContentWidth()
self.SetMinSize((contentWidth + self.padding * 2 + self.toggleShiftX,
-1))
def GetColorNormal(self):
return self.colorNormal
def SetColorNormal(self, color):
self.colorNormal = color
self.Refresh()
def GetColorToggled(self):
return self.colorToggled
def SetColorToggled(self, color):
self.colorToggled = color
self.Refresh()
def GetColorShadow(self):
return self.colorShadow
def SetColorShadow(self, color):
self.colorShadow = color
self.Refresh()
def GetColorBorder(self):
return self.colorBorder
def SetColorBorder(self, color):
self.colorBorder = color
self.Refresh()
def GetColorBorderToggled(self):
return self.colorBorderToggled
def SetColorBorderToggled(self, color):
self.colorBorderToggled = color
self.Refresh()
def GetColorTextNormal(self):
return self.colorTextNormal
def SetColorTextNormal(self, color):
self.colorTextNormal = color
self.Refresh()
def GetColorTextDisabled(self):
return self.colorTextDisabled
def SetColorTextDisabled(self, color):
self.colorTextDisabled = color
self.Refresh()
def GetColorTextToggled(self):
return self.colorTextToggled
def SetColorTextToggled(self, color):
self.colorTextToggled = color
self.Refresh()
def GetAlign(self):
return self.align
def SetAlign(self, align):
self.align = align
self.Refresh()
def GetToggleShift(self):
return (self.toggleShiftX, self.toggleShiftY)
def SetToggleShift(self, shiftX, shiftY):
self.toggleShiftX = shiftX
self.toggleShiftY = shiftY
self.Refresh()
def GetRoundRadius(self):
return self.roundRadius
def SetRoundRadius(self, radius):
self.roundRadius = radius
self.Refresh()
def GetPadding(self):
return self.padding
def SetPadding(self, padding):
self.padding = padding
self._updateMinSize()
self.Refresh()
def GetMarginImage(self):
return self.marginImage
def SetMarginImage(self, margin):
self.marginImage = margin
self._updateMinSize()
self.Refresh()
def DrawFocusIndicator(self, dc, w, h):
bw = self.bezelWidth
textClr = self.GetForegroundColour()
focusIndPen = wx.Pen(textClr, 1, wx.USER_DASH)
focusIndPen.SetDashes([1, 1])
focusIndPen.SetCap(wx.CAP_BUTT)
if wx.Platform == "__WXMAC__":
dc.SetLogicalFunction(wx.XOR)
else:
focusIndPen.SetColour(self.focusClr)
dc.SetLogicalFunction(wx.INVERT)
dc.SetPen(focusIndPen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
if self.GetToggle():
shiftX = self.toggleShiftX
shiftY = self.toggleShiftY
else:
shiftX = 0
shiftY = 0
dc.DrawRoundedRectangle(bw+2 + shiftX,
bw+2 + shiftY,
w-bw*2-5 - self.toggleShiftX,
h-bw*2-5 - self.toggleShiftY,
self.roundRadius)
dc.SetLogicalFunction(wx.COPY)
def DrawBezel(self, dc, x1, y1, x2, y2):
brushBackground = wx.Brush(self.colorNormal)
penBackground = wx.Pen(self.colorNormal)
dc.SetBrush(brushBackground)
dc.SetPen(penBackground)
dc.DrawRectangle((0, 0), self.GetSize())
width_full = x2 - x1
height_full = y2 - y1
rect_width = width_full - self.toggleShiftX
rect_height = height_full - self.toggleShiftY
rect_x0 = 0 if not self.GetToggle() else self.toggleShiftX
rect_y0 = 0 if not self.GetToggle() else self.toggleShiftY
# Draw shadow
brushShadow = wx.Brush(self.colorShadow)
penShadow = wx.Pen(self.colorShadow)
dc.SetBrush(brushShadow)
dc.SetPen(penShadow)
dc.DrawRoundedRectangle(self.toggleShiftX, self.toggleShiftY,
rect_width, rect_height,
self.roundRadius)
# Draw button
color = self.colorToggled if self.GetToggle() else self.colorNormal
colorBorder = self.colorBorderToggled if self.GetToggle() else self.colorBorder
brush = wx.Brush(color)
pen = wx.Pen(colorBorder)
dc.SetBrush(brush)
dc.SetPen(pen)
dc.DrawRoundedRectangle(rect_x0, rect_y0,
rect_width, rect_height,
self.roundRadius)
dc.SetBrush(wx.NullBrush)
def _getBitmap(self):
bmp = self.bmpLabel
if bmp is not None:
# if the bitmap is used
if self.bmpDisabled and not self.IsEnabled():
bmp = self.bmpDisabled
if self.bmpFocus and self.hasFocus:
bmp = self.bmpFocus
if self.bmpSelected and not self.up:
bmp = self.bmpSelected
return bmp
def GetContentWidth(self):
bmp = self._getBitmap()
if bmp is not None:
bw = bmp.GetWidth()
else:
# no bitmap -> size is zero
bw = 0
label = self.GetLabel()
dc = wx.WindowDC(self)
dc.SetFont(self.GetFont())
# size of text
tw, th = dc.GetTextExtent(label)
contentWidth = bw + tw + self.marginImage
return contentWidth
def DrawLabel(self, dc, width, height, dx=0, dy=0):
if self.IsEnabled() and self.GetToggle():
dc.SetTextForeground(self.colorTextToggled)
elif self.IsEnabled():
dc.SetTextForeground(self.colorTextNormal)
else:
dc.SetTextForeground(self.colorTextDisabled)
bmp = self._getBitmap()
if bmp is not None:
bw, bh = bmp.GetWidth(), bmp.GetHeight()
hasMask = bmp.GetMask() is not None
else:
# no bitmap -> size is zero
bw = bh = 0
label = self.GetLabel()
dc.SetFont(self.GetFont())
# size of text
tw, th = dc.GetTextExtent(label)
if self.GetToggle():
dx = self.toggleShiftX
dy = self.toggleShiftY
contentWidth = bw + tw + self.marginImage
if self.align == wx.ALIGN_LEFT:
pos_x = self.padding + dx
elif self.align == wx.ALIGN_CENTER:
pos_x = (width - contentWidth - self.toggleShiftX) / 2 + dx
else:
assert False
if pos_x < self.padding + dx:
pos_x = self.padding + dx
if bmp is not None:
# draw bitmap if available
dc.DrawBitmap(bmp, pos_x, (height - bh) / 2 + dy, hasMask)
dc.DrawText(label, pos_x + bw + self.marginImage, (height-th)/2+dy)
class MyTestFrame(wx.Frame):
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, wx.ID_ANY, title, size=(400, 300))
panel = wx.Panel(self)
# Build a bitmap button and a normal one
bmp = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_OTHER, (16, 16))
btn = ToggleButton(panel, -1, label=u'adsfasdf', pos=(10, 10))
btn.SetSize((150, 75))
btn2 = ToggleButton(panel, -1, bmp, label=u'adsfasdf', pos=(10, 110), align=wx.ALIGN_CENTER)
btn2.SetSize((150, 75))
btn3 = ToggleButton(panel, -1, bmp, label=u'adsfasdfadsf', pos=(10, 210), align=wx.ALIGN_CENTER)
btn3.SetSize(btn3.GetMinSize())
btn3.SetRoundRadius(0)
if __name__ == '__main__':
app = wx.App()
frame = MyTestFrame(None, 'ToggleButton Test')
frame.Show()
frame.SetSize((500, 600))
app.MainLoop()
| gpl-3.0 | 532,456,159,052,573,950 | 29.221498 | 104 | 0.573184 | false |
RobinQuetin/CAIRIS-web | cairis/cairis/PropertyDialog.py | 1 | 3948 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
import armid
import WidgetFactory
class PropertyDialog(wx.Dialog):
def __init__(self,parent,setProperties,values):
wx.Dialog.__init__(self,parent,armid.PROPERTY_ID,'Add Security Property',style=wx.DEFAULT_DIALOG_STYLE|wx.MAXIMIZE_BOX|wx.THICK_FRAME|wx.RESIZE_BORDER,size=(400,300))
weights = {"Confidentiality":0,"Integrity":1,"Availability":2,"Accountability":3,"Anonymity":4,"Pseudonymity":5,"Unlinkability":6,"Unobservability":7}
self.thePropertyName = ''
self.thePropertyValue = ''
self.thePropertyRationale = 'None'
self.commitLabel = 'Add'
mainSizer = wx.BoxSizer(wx.VERTICAL)
# defaultProperties = set(['Confidentiality','Integrity','Availability','Accountability','Anonymity','Pseudonymity','Unlinkability','Unobservability'])
defaultProperties = set(weights.keys())
propertyList = sorted(list(defaultProperties.difference(setProperties)), key=lambda x:weights[x])
mainSizer.Add(WidgetFactory.buildComboSizerList(self,'Property',(87,30),armid.PROPERTY_COMBOPROPERTY_ID,propertyList),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildComboSizerList(self,'Value',(87,30),armid.PROPERTY_COMBOVALUE_ID,values),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildMLTextSizer(self,'Rationale',(87,60),armid.PROPERTY_TEXTRATIONALE_ID),1,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildAddCancelButtonSizer(self,armid.PROPERTY_BUTTONADD_ID),0,wx.ALIGN_CENTER)
self.SetSizer(mainSizer)
wx.EVT_BUTTON(self,armid.PROPERTY_BUTTONADD_ID,self.onCommit)
def load(self,pName,pValue,pRationale):
propertyCtrl = self.FindWindowById(armid.PROPERTY_COMBOPROPERTY_ID)
valueCtrl = self.FindWindowById(armid.PROPERTY_COMBOVALUE_ID)
ratCtrl = self.FindWindowById(armid.PROPERTY_TEXTRATIONALE_ID)
commitCtrl = self.FindWindowById(armid.PROPERTY_BUTTONADD_ID)
commitCtrl.SetLabel('Edit')
propertyCtrl.SetValue(pName)
valueCtrl.SetValue(pValue)
ratCtrl.SetValue(pRationale)
self.commitLabel = 'Edit'
def onCommit(self,evt):
propertyCtrl = self.FindWindowById(armid.PROPERTY_COMBOPROPERTY_ID)
valueCtrl = self.FindWindowById(armid.PROPERTY_COMBOVALUE_ID)
ratCtrl = self.FindWindowById(armid.PROPERTY_TEXTRATIONALE_ID)
self.thePropertyName = propertyCtrl.GetValue()
self.thePropertyValue = valueCtrl.GetValue()
self.thePropertyRationale = ratCtrl.GetValue()
commitTxt = self.commitLabel + ' Security Property'
if len(self.thePropertyName) == 0:
dlg = wx.MessageDialog(self,'No property selected',commitTxt,wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
elif (len(self.thePropertyValue) == 0):
dlg = wx.MessageDialog(self,'No value selected',commitTxt,wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
elif (len(self.thePropertyRationale) == 0):
dlg = wx.MessageDialog(self,'No rationale',commitTxt,wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
else:
self.EndModal(armid.PROPERTY_BUTTONADD_ID)
def property(self): return self.thePropertyName
def value(self): return self.thePropertyValue
def rationale(self): return self.thePropertyRationale
| apache-2.0 | 1,893,080,471,082,788,000 | 46.566265 | 170 | 0.744681 | false |
openslack/openslack-web | openslack/userena/contrib/umessages/templatetags/umessages_tags.py | 1 | 2642 | from django import template
from userena.contrib.umessages.models import MessageRecipient
import re
register = template.Library()
class MessageCount(template.Node):
def __init__(self, um_from_user, var_name, um_to_user=None):
self.user = template.Variable(um_from_user)
self.var_name = var_name
if um_to_user:
self.um_to_user = template.Variable(um_to_user)
else:
self.um_to_user = um_to_user
def render(self, context):
try:
user = self.user.resolve(context)
except template.VariableDoesNotExist:
return ''
if not self.um_to_user:
message_count = MessageRecipient.objects.count_unread_messages_for(user)
else:
try:
um_to_user = self.um_to_user.resolve(context)
except template.VariableDoesNotExist:
return ''
message_count = MessageRecipient.objects.count_unread_messages_between(user,
um_to_user)
context[self.var_name] = message_count
return ''
@register.tag
def get_unread_message_count_for(parser, token):
"""
Returns the unread message count for a user.
Syntax::
{% get_unread_message_count_for [user] as [var_name] %}
Example usage::
{% get_unread_message_count_for pero as message_count %}
"""
try:
tag_name, arg = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("%s tag requires arguments" % token.contents.split()[0])
m = re.search(r'(.*?) as (\w+)', arg)
if not m:
raise template.TemplateSyntaxError("%s tag had invalid arguments" % tag_name)
user, var_name = m.groups()
return MessageCount(user, var_name)
@register.tag
def get_unread_message_count_between(parser, token):
"""
Returns the unread message count between two users.
Syntax::
{% get_unread_message_count_between [user] and [user] as [var_name] %}
Example usage::
{% get_unread_message_count_between funky and wunki as message_count %}
"""
try:
tag_name, arg = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("%s tag requires arguments" % token.contents.split()[0])
m = re.search(r'(.*?) and (.*?) as (\w+)', arg)
if not m:
raise template.TemplateSyntaxError("%s tag had invalid arguments" % tag_name)
um_from_user, um_to_user, var_name = m.groups()
return MessageCount(um_from_user, var_name, um_to_user)
| apache-2.0 | 3,915,772,045,790,340,000 | 28.685393 | 99 | 0.604845 | false |
isaacmg/fb_scraper | get_posts.py | 1 | 1266 | from fb_scrapper import scrape_groups_pages
# To use our application you can use scrape_groups_pages or use one of our predefined functions
# Declare your group or page id here
#group_id = "canoeandkayak"
# Choose which scraping function you want to call
# This function is currently not properly working and will scrape all comments
def scrape_posts_last_es(group_id):
scrape_groups_pages(group_id, 0, False, True)
#def scrape_comments_from_last_scrape(group_id):
#scrape_groups_pages(group_id, 1, True, False)
# Scrape since the last time stamp for the id in the shelve file
def scrape_posts_from_last_scrape(group_id):
scrape_groups_pages(group_id, 1, False, False)
# You must have Kafka running on localhost:9092 or change the port in fb_posts_realtime.py
# Also be advised this may be buggy (we are still in the process of writing tests for it).
def scrape_posts_from_last_scrape_kafka(group_id):
scrape_groups_pages(group_id, 1, True, False)
def scrape_all_posts(group_id):
scrape_groups_pages(group_id, 0, False, False)
#def scrape_all_comments(group_id):
#scrape_groups_pages(group_id, 0, True, False)
#scrape_all_posts("paddlesoft")
#scrape_all_comments("paddlesoft")
#scrape_comments_from_last_scrape("176485839144245")
| apache-2.0 | 6,550,896,712,374,341,000 | 35.171429 | 95 | 0.748815 | false |
google-research/long-range-arena | lra_benchmarks/listops/configs/performer_base.py | 1 | 1100 | # Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configuration and hyperparameter sweeps."""
from lra_benchmarks.listops.configs import base_listops_config
from ml_collections import config_dict
def get_config():
"""Get the default hyperparameter configuration."""
config = base_listops_config.get_config()
config.model_type = "performer"
config.model_kwargs = config_dict.create(
attention_fn_cls="softmax",
attention_fn_kwargs=config_dict.create(ortho_scaling=0.0, nb_features=2))
return config
def get_hyper(hyper):
return hyper.product([])
| apache-2.0 | 4,178,187,234,934,423,600 | 33.375 | 79 | 0.752727 | false |
leyyin/university-PC | elearning/course/forms.py | 1 | 3054 | from django import forms
from django.contrib.admin import widgets
from django.db.models.query_utils import Q
from django.forms.extras.widgets import SelectDateWidget
from django.forms.widgets import HiddenInput, Textarea, DateInput
from django.forms import ModelForm, CheckboxSelectMultiple
from django.contrib.auth.models import Group
from elearning.models import Course, Subject,UserELearning, Assignment, AssignmentGroup
# https://docs.djangoproject.com/en/1.8/topics/forms/modelforms/
class SubjectModelChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.name
class TeacherModelChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.id+obj.name
class SimpleCourseForm(ModelForm):
class Meta:
model = Course
fields = ['name']
subject = SubjectModelChoiceField(queryset=Subject.objects.all(), empty_label=None)
class TeacherEditCourseForm(SimpleCourseForm):
students = forms.ModelMultipleChoiceField(required=False, widget=CheckboxSelectMultiple, queryset=UserELearning.objects.filter(user__groups__name='student'))
assistants = forms.ModelMultipleChoiceField(required=False, widget=CheckboxSelectMultiple, queryset=UserELearning.objects.filter(user__groups__name='assistant'))
def clean_students(self):
data = self.cleaned_data['students']
return data
def clean_assistants(self):
data = self.cleaned_data['assistants']
return data
class AssignmentForm(ModelForm):
class Meta:
model = Assignment
fields = ['name', 'description', 'deadline', 'type', 'group']
widgets = {
'description': Textarea(attrs={'cols': 80, 'rows': 7}),
'deadline': SelectDateWidget(
empty_label=("Choose Year", "Choose Month", "Choose Day"),
),
}
id = forms.HiddenInput()
group = forms.ModelChoiceField(required=True, queryset=AssignmentGroup.objects.all(), empty_label=None)
class ReadOnlyAssignmentForm(AssignmentForm):
def __init__(self, *args, **kwargs):
super(AssignmentForm, self).__init__(*args, **kwargs)
for key in self.fields.keys():
self.fields[key].widget.attrs['readonly'] = True
self.fields[key].widget.attrs['disabled'] = 'disabled'
widgets = {
'deadline': forms.CharField(),
}
class AssignStudentsForm(forms.Form):
def __init__(self, assignment, *args, **kwargs):
super(forms.Form, self).__init__(*args, **kwargs)
self.fields["students"] = forms.ModelMultipleChoiceField(required=False, widget=CheckboxSelectMultiple,
queryset=assignment.course.students.all())
def clean_students(self):
data = self.cleaned_data['students']
return data
class AdminEditCourseForm(TeacherEditCourseForm):
teacher = forms.ModelChoiceField(required=True, queryset=UserELearning.objects.filter(user__groups__name='teacher'), empty_label=None)
| mit | -862,088,765,838,127,400 | 35.795181 | 165 | 0.69057 | false |
harishvc/githubanalytics | MyMoment.py | 1 | 1651 | import datetime
from time import gmtime, strftime
import pytz
#Humanize time in milliseconds
#Reference: http://stackoverflow.com/questions/26276906/python-convert-seconds-from-epoch-time-into-human-readable-time
def HTM(a, context):
#print "Processing ....", a
b = int(datetime.datetime.now().strftime("%s"))
#print "Time NOW ...", b
c = b - a
#print "Time elapsed ...", c
days = c // 86400
hours = c // 3600 % 24
minutes = c // 60 % 60
seconds = c % 60
if (days > 0): return ( str(days) + " days " + context)
elif (hours > 0): return (str(hours) + " hours " + context)
elif (minutes > 0): return ( str(minutes) + " minutes " + context)
elif (seconds > 1): return (str(seconds) + " seconds " + context)
elif (seconds == 1): return (str(seconds) + " second " + context)
elif (seconds == 0): return ("< 1 second")
else: return ("Now ") #Error
#Humanize time in hours
def HTH(a):
b = int(datetime.datetime.now().strftime("%s"))
c = b - a
days = c // 86400
hours = c // 3600 % 24
return hours
#My Timestamp used in logfile
def MT():
fmt = '%Y-%m-%d %H:%M:%S'
return (datetime.datetime.now(pytz.timezone("America/Los_Angeles")).strftime(fmt))
#My Timestamp for filename
def FT():
fmt = '%d%b%Y-%H%M%S'
return ( datetime.datetime.now(pytz.timezone("America/Los_Angeles")).strftime(fmt) )
#Time now in epoch milliseconds
def TNEM():
return (int(datetime.datetime.now().strftime("%s")) * 1000)
#Time then (back in minutes) is epoch milliseconds
def TTEM(Back):
return (int(datetime.datetime.now().strftime("%s")) * 1000 - (Back * 60 * 1000))
| mit | 2,189,820,734,005,846,300 | 32.693878 | 119 | 0.621442 | false |
dpetzold/dakku | dakku/backup.py | 1 | 4832 | import datetime
import logging
import re
import socket
import subprocess
import os
from django.conf import settings
from . import exceptions as dakku_exception
logger = logging.getLogger(__name__)
class BackupBase(object):
def deletefile(self, date_str):
"""Given a date in YYYYMMDD check if the file should be deleted or keep"""
today = datetime.date.today()
date = datetime.date(int(date_str[:4]), int(date_str[4:6]), int(date_str[6:8]))
age = today - date
if age < datetime.timedelta(weeks=2):
if self.verbose:
print('keeping < 2 weeks')
return False
if age < datetime.timedelta(weeks=8) and date.weekday() == 0:
if self.verbose:
print('keeping monday')
return False
if date.day == 2:
if self.verbose:
print('keeping first of the month')
return False
return True
class BackupUtil(object):
def __init__(self, router, container_name, dry_run=False, verbose=False):
from .mysql import MysqlUtil
from .rackspace import RackspaceUtil
self.mysql = MysqlUtil(router, verbose)
self.rackspace = RackspaceUtil(container_name, verbose, dry_run=dry_run)
self.verbose = verbose
self.start_time = datetime.datetime.utcnow()
if not os.path.exists(settings.BACKUP_DIR):
os.mkdir(settings.BACKUP_DIR)
def _run_cmd(self, cmd, filepath, ext=None):
if self.verbose:
print(cmd)
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
if ext is not None:
filepath += ext
filesize = os.stat(filepath).st_size
if filesize == 0:
raise dakku_exception.BadFileSize('Bad filesize for "%s"' % (filepath))
return filepath, filesize
def tar_directory(self, directory, prefix=None):
root, name = os.path.split(directory)
name = '%s.%s-%s.tar.bz2' % \
(name, self.start_time.strftime('%Y%m%d_%H%M%S'), socket.gethostname())
if prefix is not None:
backup_dir = '%s/%s' % (settings.BACKUP_DIR, prefix)
else:
backup_dir = settings.BACKUP_DIR
if not os.path.exists(backup_dir):
os.mkdir(backup_dir)
filepath = '%s/%s' % (backup_dir, name)
cmd = '/bin/tar cfj %s %s -C %s' % (filepath, directory, root)
return self._run_cmd(cmd, filepath)
def backup_database(self):
dbfile = self.mysql.dump()
uploaded = self.rackspace.store(dbfile, 'db')
logger.info('Uploaded %s to %s %s' % (dbfile, uploaded.name, uploaded.size))
if self.verbose:
print(uploaded.name)
os.unlink(dbfile)
return uploaded
def backup_site(self):
filepath, filesize = self.tar_directory(settings.SITE_ROOT, 'site')
if self.verbose:
print('%s %s' % (filepath, filesize))
uploaded = self.rackspace.store(filepath, 'site')
logger.info('Uploaded %s to %s %s' % (filepath, uploaded.name, uploaded.size))
if self.verbose:
print(uploaded.name)
return uploaded
def backup_all(self):
self.backup_database()
self.backup_site()
deletes = self.rackspace.cull()
for deleted in deletes:
logger.info('Deleted %s' % (deleted.name))
if self.verbose:
print('Deleted: %s' % (deleted.name))
deletes = self.cull()
for deleted in deletes:
logger.info('Deleted %s' % (deleted.name))
if self.verbose:
print('Deleted: %s' % (deleted.name))
def restore(self, filename=None, remote=None):
self.mysql.dump()
self.mysql.drop()
self.mysql.create()
if remote is not None:
filename = self.rackspace.get(remote, settings.BACKUP_DIR)
return self.mysql.source(filename)
def list(self):
for obj in self.rackspace.list():
print('%s %s' % (obj.name, obj.size))
def cull_local(self):
culled = []
files = os.listdir(settings.BACKUP_DIR)
for filename in files:
filepath = '%s/%s/' % (settings.BACKUP_DIR, filename)
search = re.search(r'(\d{8})', filename)
if search is None:
continue
if self.deletefile(search.group(0)):
if self.verbose:
print('Deleting %s' % (filename))
if not self.dry_run:
os.unlink(filepath)
culled.append(filename)
elif self.verbose:
print('Keeping %s' % (filename))
return culled
def cull(self):
self.rackspace.cull()
self.cull_local()
| bsd-3-clause | -8,806,372,401,183,013,000 | 32.79021 | 87 | 0.569536 | false |
chripell/mytools | gimp/plugins/resizer.py | 1 | 1661 | #!/usr/bin/env python
import ConfigParser
import errno
from gimpfu import register, PF_INT16, pdb, main
from gimpenums import INTERPOLATION_CUBIC
import gimp
import os
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
CONFIG_DIR = os.path.join(
os.path.expanduser("~"),
".config",
"GIMP_plugins")
mkdir_p(CONFIG_DIR)
CONFIG_FILE = os.path.join(
CONFIG_DIR,
"resizer_max")
config = ConfigParser.RawConfigParser()
config.read(CONFIG_FILE)
try:
msize = config.getint("sizes", "max")
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
config.add_section("sizes")
config.set("sizes", "max", "1600")
msize = config.getint("sizes", "max")
def resizer(img, drawable, size_max):
gimp.context_push()
img.undo_group_start()
w = img.width
h = img.height
if w > h:
factor = float(size_max) / w
else:
factor = float(size_max) / h
pdb.gimp_image_scale_full(
img, w * factor, h * factor,
INTERPOLATION_CUBIC)
config.set("sizes", "max", size_max)
with open(CONFIG_FILE, 'wb') as configfile:
config.write(configfile)
img.undo_group_end()
gimp.context_pop()
register(
"python_resizer_max",
"Resize to a given maximum dimension",
"Resize to a given maximum dimension",
"[email protected]",
"Public Domain",
"2018",
"<Image>/Script-Fu/Resizer Max",
"RGB*, GRAY*",
[
(PF_INT16, "max_size", "Maximum size", msize),
],
[],
resizer)
main()
| apache-2.0 | -2,538,402,579,378,744,300 | 21.146667 | 65 | 0.615292 | false |
kurtraschke/camelot | camelot/view/controls/editors/wideeditor.py | 1 | 1420 | # ============================================================================
#
# Copyright (C) 2007-2010 Conceptive Engineering bvba. All rights reserved.
# www.conceptive.be / [email protected]
#
# This file is part of the Camelot Library.
#
# This file may be used under the terms of the GNU General Public
# License version 2.0 as published by the Free Software Foundation
# and appearing in the file license.txt included in the packaging of
# this file. Please review this information to ensure GNU
# General Public Licensing requirements will be met.
#
# If you are unsure which license is appropriate for your use, please
# visit www.python-camelot.com or contact [email protected]
#
# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# For use of this library in commercial applications, please contact
# [email protected]
#
# ============================================================================
class WideEditor(object):
"""Class signaling that an editor, is a wide editor, so it's label should be displayed
on top of the editor and the editor itself should take two columns::
class WideTextLineEditor(TextLineEditor, WideEditor):
pass
will generate a test line editor where the text line takes the whole with of the
form"""
| gpl-2.0 | 3,623,529,977,134,472,700 | 42.030303 | 90 | 0.675352 | false |
wuchaofan/collectsource | mobileinterest/settings.py | 1 | 2223 | """
Django settings for mobileinterest project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'wdw3xjg7g9lc1nk&5867@=1th!3)^7+2#i$f++gzt*=8jo9+kq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
# 'django.contrib.admin',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
'views',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'mobileinterest.urls'
WSGI_APPLICATION = 'mobileinterest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
TEMPLATE_DIRS = (
BASE_DIR+'/templates',
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
CACHE_BACKEND = os.path.join(BASE_DIR, 'django_cache') #'file:///var/tmp/django_cache'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
| gpl-2.0 | 5,870,867,587,422,026,000 | 23.163043 | 86 | 0.7148 | false |
jacol12345/TP-ankiety-web-app | mobilepolls/polls/models.py | 1 | 5421 | # -*- coding: utf-8 -*-
import json
from datetime import datetime
from django.core.urlresolvers import reverse
from django.db import models
from django.contrib.auth.models import User
from model_utils.managers import InheritanceManager
class PollManager(models.Manager):
def active(self):
return self.filter(published=True).filter(expires__lte=datetime.now())
class Poll(models.Model):
MALE = 'M'
FEMALE = 'F'
NONE = 'N'
REQUIRED_GENDER_CHOICES = (
(MALE, 'Male'),
(FEMALE, 'Female'),
(NONE, 'None'),
)
title = models.CharField(max_length=200)
created = models.DateField()
expires = models.DateField()
required_gender = models.CharField(max_length=1, default=NONE,
choices=REQUIRED_GENDER_CHOICES)
required_student = models.BooleanField(default=False)
required_smoker = models.BooleanField(default=False)
required_employed = models.BooleanField(default=False)
published = models.BooleanField(default=True)
owner = models.ForeignKey(User)
objects = PollManager()
def get_absolute_url(self):
return reverse('show_poll', kwargs={'poll_id': self.id})
def get_cost(self):
return sum([
x.points for x in self.questions.all().select_subclasses()])
def get_completion_time(self):
return sum([x.time_to_complete for x
in self.questions.all().select_subclasses()])
def wrap_to_json(self):
return json.dumps(self.wrap_to_dict(), sort_keys=True, indent=4,
separators=(',', ': '))
def wrap_to_dict(self):
return {'poll_id': self.id,
'questions': [x.wrap_to_dict() for x in self.questions.all()],
'time_to_complete': self.get_completion_time(),
'points': self.get_cost(),
'title': self.title,
'author': self.owner.get_username(),}
def __unicode__(self):
return u'%s' % (self.title,)
class MobileUser(models.Model):
phone_id = models.TextField(max_length=100)
student = models.BooleanField(default=False)
employed = models.BooleanField(default=False)
male = models.BooleanField(default=False)
female = models.BooleanField(default=False)
smoking = models.BooleanField(default=False)
available_polls = models.ManyToManyField(Poll, null=True, blank=True, related_name='available_for_users')
answered_polls = models.ManyToManyField(Poll, null=True, blank=True, related_name='answered_by_users')
def __unicode__(self):
return u'%s' % (self.phone_id,)
def get_points(self):
return sum([x.get_cost() for x in self.answered_polls.all()])
class QuestionBase(models.Model):
'''
Answers are stored as a list of tuples:
[(1, "Yes"), (2, "No")]
'''
SINGLE = 'SC'
MULTI = 'MC'
STARS = 'ST'
RANK = 'RK'
QUESTION_TYPE_CHOICES = (
(SINGLE, 'Singe Choice Question'),
(MULTI, 'Mutliple Choice Question'),
(STARS, '1-5 Star Question'),
(RANK, 'Rank question'),
)
poll = models.ForeignKey(Poll, related_name='questions')
title = models.CharField(max_length=200)
available_answers = models.TextField(blank=True)
question_type = models.CharField(max_length=2, default=SINGLE,
choices=QUESTION_TYPE_CHOICES)
points = 0.0
time_to_complete = 0.0
objects = InheritanceManager()
def available_answers_to_list(self):
try:
return json.loads(self.available_answers)
except ValueError:
return ''
def list_to_available_answers(self, answers_list):
answers = [(i, answers_list[i]) for i in range(len(answers_list))]
self.available_answers = json.dumps(answers)
def wrap_to_dict(self):
return {'id': self.id,
'title': self.title,
'available_answers': self.available_answers_to_list(),
'question_type': self.question_type}
def __unicode__(self):
return u'%s' % (self.title,)
class AnswerBase(models.Model):
'''Answer single answer of respondent. Saves number of points for
each available_answer.
[(1, 0.0), (2, 0.5), (3, 0.5)]
'''
question = models.ForeignKey(QuestionBase)
respondent = models.ForeignKey(MobileUser)
answers = models.TextField()
objects = InheritanceManager()
def answers_to_list(self):
return json.loads(self.answers)
def list_to_answers(self, list_of_answers):
answers = list(list_of_answers)
self.answers = json.dumps(answers)
def __unicode__(self):
return u'answer to %s' % (self.question.title)
class SingleChoiceQuestion(QuestionBase):
points = 1.0
time_to_complete = 0.25
class MultipleChoiceQuestion(QuestionBase):
points = 1.5
time_to_complete = 0.5
class StarQuestion(QuestionBase):
points = 1.0
time_to_complete = 0.25
def available_answers_to_list(self):
return None
def list_to_available_answers(self, answers_list):
return None
def wrap_to_dict(self):
return {'id': self.id,
'title': self.title,
'available_answers': "",
'question_type': self.question_type}
class RankedQuestion(QuestionBase):
points = 2.0
time_to_complete = 0.75
| mit | 3,513,476,015,671,460,000 | 29.116667 | 109 | 0.617229 | false |
bretttegart/treadmill | tests/apptrace/zk_test.py | 1 | 14073 | """Unit test for Treadmill ZK apptrace module.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import time
import sqlite3
import mock
import kazoo
import kazoo.client
from treadmill.apptrace import zk
from tests.testutils import mockzk
class AppTraceZKTest(mockzk.MockZookeeperTestCase):
"""Mock test for treadmill.apptrace.
"""
@mock.patch('kazoo.client.KazooClient.delete', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
def test_trace_pruning(self):
"""Tests trace pruning.
"""
zk_content = {
'trace': {
'0001': {
'app1#001,1000.0,s1,service_running,uniq1.service1': {},
'app1#001,1001.0,s1,service_exited,uniq1.service1.0.0': {},
'app1#001,1002.0,s1,service_running,uniq1.service1': {},
'app1#001,1003.0,s1,service_exited,uniq1.service1.0.0': {},
'app1#001,1004.0,s1,service_running,uniq1.service1': {},
'app1#001,1005.0,s1,service_exited,uniq1.service1.0.0': {},
'app1#001,1006.0,s1,service_running,uniq1.service1': {},
},
'0002': {
'app1#002,1000.0,s1,service_running,uniq1.service1': {},
'app1#002,1001.0,s1,service_exited,uniq1.service1.0.0': {},
'app1#002,1002.0,s1,service_running,uniq1.service1': {},
'app1#002,1003.0,s1,service_exited,uniq1.service1.0.0': {},
},
'0003': {
'app1#003,1000.0,s1,service_running,uniq1.service1': {},
'app1#003,1001.0,s1,service_running,uniq1.service2': {},
'app1#003,1002.0,s1,service_running,uniq1.service3': {},
'app1#003,1003.0,s1,service_running,uniq1.service4': {},
'app1#003,1004.0,s1,service_running,uniq1.service5': {},
},
'0004': {
'app1#004,1000.0,s1,service_running,uniq1.service1': {},
'app1#004,1001.0,s2,service_running,uniq2.service1': {},
'app1#004,1002.0,s3,service_running,uniq3.service1': {},
'app1#004,1003.0,s4,service_running,uniq4.service1': {},
'app1#004,1004.0,s5,service_running,uniq5.service1': {},
},
},
}
self.make_mock_zk(zk_content)
zkclient = kazoo.client.KazooClient()
zk.prune_trace(zkclient, 4)
self.assertEqual(kazoo.client.KazooClient.delete.call_args_list, [
mock.call('/trace/0001/'
'app1#001,1002.0,s1,service_running,uniq1.service1'),
mock.call('/trace/0001/'
'app1#001,1001.0,s1,service_exited,uniq1.service1.0.0'),
mock.call('/trace/0001/'
'app1#001,1000.0,s1,service_running,uniq1.service1'),
])
@mock.patch('kazoo.client.KazooClient.delete', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=1000))
@mock.patch('sqlite3.connect', mock.Mock())
def test_trace_cleanup(self):
"""Tests tasks cleanup.
"""
zk_content = {
'scheduled': {
'app1#0003': {},
'app1#0004': {},
},
'trace': {
'0001': {
'app1#0001,1000.00,s1,configured,2DqcoXnaIXEgy': {},
'app1#0001,1001.00,configured,2DqcoXnaIXEgy': {},
'app1#0001,1003.00,configured,2DqcoXnaIXEgy': {},
'app1#0001,1004.00,configured,2DqcoXnaIXEgy': {},
'app1#0001,1005.00,configured,2DqcoXnaIXEgy': {},
'app1#0001,1006.00,configured,2DqcoXnaIXEgy': {},
},
'0002': {
'app1#0002,1000.00,s1,configured,2DqcoXnaIXEgy': {},
'app1#0002,1001.00,configured,2DqcoXnaIXEgy': {},
'app1#0002,1003.00,configured,2DqcoXnaIXEgy': {},
'app1#0002,1004.00,configured,2DqcoXnaIXEgy': {},
'app1#0002,1005.00,configured,2DqcoXnaIXEgy': {},
'app1#0002,1006.00,configured,2DqcoXnaIXEgy': {},
},
'0003': {
'app1#0003,1000.00,s1,configured,2DqcoXnaIXEgy': {},
'app1#0003,1001.00,configured,2DqcoXnaIXEgy': {},
'app1#0003,1003.00,configured,2DqcoXnaIXEgy': {},
'app1#0003,1004.00,configured,2DqcoXnaIXEgy': {},
'app1#0003,1005.00,configured,2DqcoXnaIXEgy': {},
'app1#0003,1006.00,configured,2DqcoXnaIXEgy': {},
},
'0004': {
'app1#0004,1000.00,s1,configured,2DqcoXnaIXEgy': {},
'app1#0004,1001.00,configured,2DqcoXnaIXEgy': {},
'app1#0004,1003.00,configured,2DqcoXnaIXEgy': {},
'app1#0004,1004.00,configured,2DqcoXnaIXEgy': {},
'app1#0004,1005.00,configured,2DqcoXnaIXEgy': {},
'app1#0004,1006.00,configured,2DqcoXnaIXEgy': {},
},
},
'finished': {
},
'trace.history': {
},
'finished.history': {
},
}
self.make_mock_zk(zk_content)
zkclient = kazoo.client.KazooClient()
conn_mock = mock.MagicMock()
sqlite3.connect.return_value = conn_mock
# Current time - 1000, expiration - 3 seconds, there are < 10 events
# that are expired, nothing is uploaded.
zk.cleanup_trace(zkclient, 10, 3)
self.assertEqual(0, len(zk_content['trace.history']))
self.assertFalse(kazoo.client.KazooClient.create.called)
# There are twelve expired events, expect batch to be uploaded.
# Instances app1#0003 and 0004 are running and will not be included.
time.time.return_value = 1100
zk.cleanup_trace(zkclient, 10, 3)
conn_mock.executemany.assert_called_once_with(
"""
INSERT INTO trace (
path, timestamp, data, directory, name
) VALUES(?, ?, ?, ?, ?)
""",
[
('/trace/0001/app1#0001,1000.00,s1,configured,2DqcoXnaIXEgy',
1000.0,
None,
'/trace/0001',
'app1#0001,1000.00,s1,configured,2DqcoXnaIXEgy'),
('/trace/0002/app1#0002,1000.00,s1,configured,2DqcoXnaIXEgy',
1000.0,
None,
'/trace/0002',
'app1#0002,1000.00,s1,configured,2DqcoXnaIXEgy'),
('/trace/0001/app1#0001,1001.00,configured,2DqcoXnaIXEgy',
1001.0,
None,
'/trace/0001',
'app1#0001,1001.00,configured,2DqcoXnaIXEgy'),
('/trace/0002/app1#0002,1001.00,configured,2DqcoXnaIXEgy',
1001.0,
None,
'/trace/0002',
'app1#0002,1001.00,configured,2DqcoXnaIXEgy'),
('/trace/0001/app1#0001,1003.00,configured,2DqcoXnaIXEgy',
1003.0,
None,
'/trace/0001',
'app1#0001,1003.00,configured,2DqcoXnaIXEgy'),
('/trace/0002/app1#0002,1003.00,configured,2DqcoXnaIXEgy',
1003.0,
None,
'/trace/0002',
'app1#0002,1003.00,configured,2DqcoXnaIXEgy'),
('/trace/0001/app1#0001,1004.00,configured,2DqcoXnaIXEgy',
1004.0,
None,
'/trace/0001',
'app1#0001,1004.00,configured,2DqcoXnaIXEgy'),
('/trace/0002/app1#0002,1004.00,configured,2DqcoXnaIXEgy',
1004.0,
None,
'/trace/0002',
'app1#0002,1004.00,configured,2DqcoXnaIXEgy'),
('/trace/0001/app1#0001,1005.00,configured,2DqcoXnaIXEgy',
1005.0,
None,
'/trace/0001',
'app1#0001,1005.00,configured,2DqcoXnaIXEgy'),
('/trace/0002/app1#0002,1005.00,configured,2DqcoXnaIXEgy',
1005.0,
None,
'/trace/0002',
'app1#0002,1005.00,configured,2DqcoXnaIXEgy')
]
)
kazoo.client.KazooClient.create.assert_called_with(
'/trace.history/trace.db.gzip-',
mock.ANY,
acl=mock.ANY,
makepath=True, ephemeral=False, sequence=True,
)
self.assertEqual(10, kazoo.client.KazooClient.delete.call_count)
self.assertEqual(kazoo.client.KazooClient.delete.call_args_list, [
(('/trace/0001/app1#0001,1000.00,s1,configured,2DqcoXnaIXEgy',),),
(('/trace/0002/app1#0002,1000.00,s1,configured,2DqcoXnaIXEgy',),),
(('/trace/0001/app1#0001,1001.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0002/app1#0002,1001.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0001/app1#0001,1003.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0002/app1#0002,1003.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0001/app1#0001,1004.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0002/app1#0002,1004.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0001/app1#0001,1005.00,configured,2DqcoXnaIXEgy',),),
(('/trace/0002/app1#0002,1005.00,configured,2DqcoXnaIXEgy',),),
])
@mock.patch('kazoo.client.KazooClient.delete', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=1000))
@mock.patch('sqlite3.connect', mock.Mock())
def test_finished_cleanup(self):
"""Tests tasks cleanup.
"""
data = b"{data: '1.0', host: foo, state: finished, when: '123.45'}\n"
zk_content = {
'trace': {
'0001': {
},
'0002': {
},
},
'finished': {
'app1#0001': {'.metadata': {'last_modified': 1000},
'.data': data},
'app1#0002': {'.metadata': {'last_modified': 1000},
'.data': data},
'app1#0003': {'.metadata': {'last_modified': 1000},
'.data': data},
'app1#0004': {'.metadata': {'last_modified': 1000},
'.data': data},
'app1#0005': {'.metadata': {'last_modified': 1000},
'.data': data},
'app1#0006': {'.metadata': {'last_modified': 1000},
'.data': data},
'app1#0007': {'.metadata': {'last_modified': 1000},
'.data': data},
},
'trace.history': {
},
'finished.history': {
},
}
self.make_mock_zk(zk_content)
zkclient = kazoo.client.KazooClient()
conn_mock = mock.MagicMock()
sqlite3.connect.return_value = conn_mock
zk.cleanup_finished(zkclient, 10, 3)
self.assertFalse(kazoo.client.KazooClient.create.called)
# Current time - 1000, expiration - 3 seconds, there are < 10 events
# that are expired, nothing is uploaded.
self.assertEqual(0, len(zk_content['finished.history']))
time.time.return_value = 1100
# There are twelve expired events, expect batch to be uploaded
zk.cleanup_finished(zkclient, 5, 3)
conn_mock.executemany.assert_called_once_with(
"""
INSERT INTO finished (
path, timestamp, data, directory, name
) VALUES(?, ?, ?, ?, ?)
""",
[
('/finished/app1#0001', 1000.0,
"{data: '1.0', host: foo, state: finished, when: '123.45'}\n",
'/finished', 'app1#0001'),
('/finished/app1#0002', 1000.0,
"{data: '1.0', host: foo, state: finished, when: '123.45'}\n",
'/finished', 'app1#0002'),
('/finished/app1#0003', 1000.0,
"{data: '1.0', host: foo, state: finished, when: '123.45'}\n",
'/finished', 'app1#0003'),
('/finished/app1#0004', 1000.0,
"{data: '1.0', host: foo, state: finished, when: '123.45'}\n",
'/finished', 'app1#0004'),
('/finished/app1#0005', 1000.0,
"{data: '1.0', host: foo, state: finished, when: '123.45'}\n",
'/finished', 'app1#0005')
]
)
kazoo.client.KazooClient.create.assert_called_with(
'/finished.history/finished.db.gzip-',
mock.ANY,
acl=mock.ANY,
makepath=True, ephemeral=False, sequence=True,
)
self.assertEqual(5, kazoo.client.KazooClient.delete.call_count)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -1,182,870,445,241,275,600 | 41.516616 | 79 | 0.51105 | false |
YcheLanguageStudio/PythonStudy | bioinformatics/dynamic_programming/global_alignment.py | 1 | 2764 | import numpy as np
def global_alignment(seq0, seq1):
def get_dp_table():
dp_score_table = np.ndarray(shape=(len(seq0) + 1, len(seq1) + 1), dtype=int)
dp_score_table.fill(0)
for col_idx in range(dp_score_table.shape[1]):
dp_score_table[0][col_idx] = (-1) * col_idx
for row_idx in range(dp_score_table.shape[0]):
dp_score_table[row_idx][0] = (-1) * row_idx
min_size = min(len(seq0), len(seq1))
def match_score(i, j):
return 1 if seq0[i - 1] == seq1[j - 1] else -1
def transition_computation(i, j):
gap_penalty = -1
diagonal_val = dp_score_table[i - 1][j - 1] + match_score(i, j)
right_val = dp_score_table[i][j - 1] + gap_penalty
down_val = dp_score_table[i - 1][j] + gap_penalty
dp_score_table[i][j] = max(diagonal_val, right_val, down_val)
for iter_num in xrange(1, min_size + 1):
transition_computation(iter_num, iter_num)
# move right
for col_idx in xrange(iter_num + 1, dp_score_table.shape[1]):
transition_computation(iter_num, col_idx)
# move down
for row_idx in xrange(iter_num + 1, dp_score_table.shape[0]):
transition_computation(row_idx, iter_num)
return dp_score_table
def traceback(table):
"""
:type table: np.ndarray
"""
gap_penalty = -1
def match_score(i, j):
return 1 if seq0[i - 1] == seq1[j - 1] else -1
def dfs_detail(row_idx, col_idx, level):
blank_str = ''.join([" "] * level)[:-1] + '|_' if level > 0 else 'root'
print '%-50s' % (blank_str + str((row_idx, col_idx))), 'score at', str((row_idx, col_idx)), ':', \
table[row_idx][col_idx]
if row_idx != 0 and col_idx != 0:
# diagonal
if table[row_idx - 1][col_idx - 1] + match_score(row_idx, col_idx) == table[row_idx][col_idx]:
dfs_detail(row_idx - 1, col_idx - 1, level + 1)
# down
if table[row_idx - 1][col_idx] + gap_penalty == table[row_idx][col_idx]:
dfs_detail(row_idx - 1, col_idx, level + 1)
# right
if table[row_idx][col_idx - 1] + gap_penalty == table[row_idx][col_idx]:
dfs_detail(row_idx, col_idx - 1, level + 1)
dfs_detail(table.shape[0] - 1, table.shape[1] - 1, 0)
dp_score_table = get_dp_table()
print 'global alignment table:\n', dp_score_table, '\n'
traceback(dp_score_table)
if __name__ == '__main__':
seq_str0 = 'GGTTGACTA'
seq_str1 = 'TGTTACGG'
global_alignment(seq0=seq_str1, seq1=seq_str0)
| mit | 6,954,767,011,223,040,000 | 37.929577 | 110 | 0.517366 | false |
wizzard/sdk | tests/sync_test_megacli.py | 1 | 5819 | """
Application for testing syncing algorithm
(c) 2013-2014 by Mega Limited, Wellsford, New Zealand
This file is part of the MEGA SDK - Client Access Engine.
Applications using the MEGA API must present a valid application key
and comply with the the rules set forth in the Terms of Service.
The MEGA SDK is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
@copyright Simplified (2-clause) BSD License.
You should have received a copy of the license along with this
program.
"""
import sys
import os
import time
import shutil
import unittest
import xmlrunner
import subprocess
import re
from sync_test_app import SyncTestApp
from sync_test import SyncTest
import logging
import argparse
class SyncTestMegaCliApp(SyncTestApp):
"""
operates with megacli application
"""
def __init__(self, local_mount_in, local_mount_out, delete_tmp_files=True, use_large_files=True, check_if_alive=True):
"""
local_mount_in: local upsync folder
local_mount_out: local downsync folder
"""
self.work_dir = os.path.join(".", "work_dir")
SyncTestApp.__init__(self, local_mount_in, local_mount_out, self.work_dir, delete_tmp_files, use_large_files)
self.check_if_alive = check_if_alive
def sync(self):
time.sleep(5)
def start(self):
# try to create work dir
return True
def finish(self):
try:
shutil.rmtree(self.work_dir)
except OSError, e:
logging.error("Failed to remove dir: %s (%s)" % (self.work_dir, e))
def is_alive(self):
"""
return True if application instance is running
"""
if not self.check_if_alive:
return True
s = subprocess.Popen(["ps", "axw"], stdout=subprocess.PIPE)
for x in s.stdout:
if re.search("megacli", x):
return True
return False
def pause(self):
"""
pause application
"""
# TODO: implement this !
raise NotImplementedError("Not Implemented !")
def unpause(self):
"""
unpause application
"""
# TODO: implement this !
raise NotImplementedError("Not Implemented !")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--test1", help="test_create_delete_files", action="store_true")
parser.add_argument("--test2", help="test_create_rename_delete_files", action="store_true")
parser.add_argument("--test3", help="test_create_delete_dirs", action="store_true")
parser.add_argument("--test4", help="test_create_rename_delete_dirs", action="store_true")
parser.add_argument("--test5", help="test_sync_files_write", action="store_true")
parser.add_argument("--test6", help="test_local_operations", action="store_true")
parser.add_argument("--test7", help="test_update_mtime", action="store_true")
parser.add_argument("--test8", help="test_create_rename_delete_unicode_files_dirs", action="store_true")
parser.add_argument("-a", "--all", help="run all tests", action="store_true")
parser.add_argument("-b", "--basic", help="run basic, stable tests", action="store_true")
parser.add_argument("-d", "--debug", help="use debug output", action="store_true")
parser.add_argument("-l", "--large", help="use large files for testing", action="store_true")
parser.add_argument("-n", "--nodelete", help="Do not delete work files", action="store_false")
parser.add_argument("-c", "--check", help="Do not check if megacli is running (useful, if other application is used for testing)", action="store_false")
parser.add_argument("upsync_dir", help="local upsync directory")
parser.add_argument("downsync_dir", help="local downsync directory")
args = parser.parse_args()
if args.debug:
lvl = logging.DEBUG
else:
lvl = logging.INFO
if args.all:
args.test1 = args.test2 = args.test3 = args.test4 = args.test5 = args.test6 = args.test7 = args.test8 = True
if args.basic:
args.test1 = args.test2 = args.test3 = args.test4 = True
logging.StreamHandler(sys.stdout)
logging.basicConfig(format='[%(asctime)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=lvl)
logging.info("")
logging.info("1) Start the first [megacli] and run the following command: sync " + args.upsync_dir + " [remote folder]")
logging.info("2) Start the second [megacli] and run the following command: sync " + args.downsync_dir + " [remote folder]")
logging.info("3) Wait for both folders get fully synced")
logging.info("4) Run: python %s", sys.argv[0])
logging.info("")
time.sleep(5)
with SyncTestMegaCliApp(args.upsync_dir, args.downsync_dir, args.nodelete, args.large, args.check) as app:
suite = unittest.TestSuite()
if args.test1:
suite.addTest(SyncTest("test_create_delete_files", app))
if args.test2:
suite.addTest(SyncTest("test_create_rename_delete_files", app))
if args.test3:
suite.addTest(SyncTest("test_create_delete_dirs", app, ))
if args.test4:
suite.addTest(SyncTest("test_create_rename_delete_dirs", app))
if args.test5:
suite.addTest(SyncTest("test_sync_files_write", app))
if args.test6:
suite.addTest(SyncTest("test_local_operations", app))
if args.test7:
suite.addTest(SyncTest("test_update_mtime", app))
if args.test8:
suite.addTest(SyncTest("test_create_rename_delete_unicode_files_dirs", app))
testRunner = xmlrunner.XMLTestRunner(output='test-reports')
testRunner.run(suite)
| bsd-2-clause | 4,365,006,259,359,789,000 | 35.829114 | 156 | 0.647534 | false |
elitegreg/mudpy | mudpy/gameproperty.py | 1 | 1060 | # Game properties should be registered at import time
class GameProperty:
__slots__ = ('__propname', '__default', '__readonly')
def __init__(self, propname, default=None, readonly=False, tmp=False):
if not tmp:
self.__propname = '_GameProperty_' + propname
else:
self.__propname = '_TempGameProperty_' + propname
self.__default = default
self.__readonly = readonly
def __get__(self, obj, type=None):
return getattr(obj, self.__propname, self.__default)
def __set__(self, obj, value):
if self.__readonly:
raise AttributeError("{} is readonly".format(self.__propname))
obj._Object__propdict[self.__propname] = value
def __delete__(self, obj):
if self.__readonly:
raise AttributeError("{} is readonly".format(self.__propname))
delattr(obj, self.__propname)
def add_gameproperty(klass, propname, default=None, readonly=False, tmp=False):
setattr(klass, propname, GameProperty(propname, default, readonly, tmp))
| gpl-3.0 | -9,018,803,080,407,469,000 | 34.333333 | 79 | 0.614151 | false |
GlobalFishingWatch/vessel-classification | classification/metadata_test.py | 1 | 6965 | # Copyright 2017 Google Inc. and Skytruth Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import csv
import numpy as np
from . import metadata
import tensorflow as tf
from datetime import datetime
import six
class VesselMetadataFileReaderTest(tf.test.TestCase):
raw_lines = [
'id,label,length,split,idhash\n',
'100001,drifting_longlines,10.0,Test,2\n',
'100002,drifting_longlines,24.0,Training,3\n',
'100003,drifting_longlines,7.0,Training,4\n',
'100004,drifting_longlines,8.0,Test,5\n',
'100005,trawlers,10.0,Test,6\n',
'100006,trawlers,24.0,Test,7\n',
'100007,passenger,24.0,Training,8\n',
'100008,trawlers,24.0,Training,9\n',
'100009,trawlers,10.0,Test,10\n',
'100010,trawlers,24.0,Training,11\n',
'100011,tug,60.0,Test,12\n',
'100012,tug,5.0,Training,13\n',
'100014,tug,24.0,Test,14\n',
'100013,tug|trawlers,5.0,Training,15\n',
]
fishing_range_dict = {
b'100001': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100002': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100003': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100004': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100005': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100006': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100007': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100008': [metadata.FishingRange(
datetime(2015, 3, 1), datetime(2015, 3, 2), 1.0)],
b'100009':
[metadata.FishingRange(datetime(2015, 3, 1), datetime(2015, 3, 4), 1.0)
], # Thrice as much fishing
b'100010': [],
b'100011': [],
b'100012': [],
b'100013': [],
}
def test_metadata_file_reader(self):
parsed_lines = csv.DictReader(self.raw_lines)
available_vessels = set(six.ensure_binary(str(x)) for x in range(100001, 100014))
result = metadata.read_vessel_multiclass_metadata_lines(
available_vessels, parsed_lines, {})
# First one is test so weighted as 1 for now
self.assertEqual(1.0, result.vessel_weight(b'100001'))
self.assertEqual(1.118033988749895, result.vessel_weight(b'100002'))
self.assertEqual(1.0, result.vessel_weight(b'100008'))
self.assertEqual(1.2909944487358056, result.vessel_weight(b'100012'))
self.assertEqual(1.5811388300841898, result.vessel_weight(b'100007'))
self.assertEqual(1.1454972243679027, result.vessel_weight(b'100013'))
self._check_splits(result)
def test_fixed_time_reader(self):
parsed_lines = csv.DictReader(self.raw_lines)
available_vessels = set(six.ensure_binary(str(x)) for x in range(100001, 100014))
result = metadata.read_vessel_time_weighted_metadata_lines(
available_vessels, parsed_lines, self.fishing_range_dict,
'Test')
self.assertEqual(1.0, result.vessel_weight(b'100001'))
self.assertEqual(1.0, result.vessel_weight(b'100002'))
self.assertEqual(3.0, result.vessel_weight(b'100009'))
self.assertEqual(0.0, result.vessel_weight(b'100012'))
self._check_splits(result)
def _check_splits(self, result):
self.assertTrue('Training' in result.metadata_by_split)
self.assertTrue('Test' in result.metadata_by_split)
self.assertTrue('passenger', result.vessel_label('label', b'100007'))
print(result.metadata_by_split['Test'][b'100001'][0])
self.assertEqual(result.metadata_by_split['Test'][b'100001'][0],
{'label': 'drifting_longlines',
'length': '10.0',
'id': '100001',
'split': 'Test',
'idhash' : '2'})
self.assertEqual(result.metadata_by_split['Test'][b'100005'][0],
{'label': 'trawlers',
'length': '10.0',
'id': '100005',
'split': 'Test',
'idhash' : '6'})
self.assertEqual(result.metadata_by_split['Training'][b'100002'][0],
{'label': 'drifting_longlines',
'length': '24.0',
'id': '100002',
'split': 'Training',
'idhash' : '3'})
self.assertEqual(result.metadata_by_split['Training'][b'100003'][0],
{'label': 'drifting_longlines',
'length': '7.0',
'id': '100003',
'split': 'Training',
'idhash' : '4'})
def _get_metadata_files():
from pkg_resources import resource_filename
for name in ["training_classes.csv"]:
# TODO: rework to test encounters as well.
yield os.path.abspath(resource_filename('classification.data', name))
class MetadataConsistencyTest(tf.test.TestCase):
def test_metadata_consistency(self):
for metadata_file in _get_metadata_files():
self.assertTrue(os.path.exists(metadata_file))
# By putting '' in these sets we can safely remove it later
labels = set([''])
for row in metadata.metadata_file_reader(metadata_file):
label_str = row['label']
for lbl in label_str.split('|'):
labels.add(lbl.strip())
labels.remove('')
expected = set([lbl for (lbl, _) in metadata.VESSEL_CATEGORIES])
assert expected >= labels, (expected - labels, labels - expected)
class MultihotLabelConsistencyTest(tf.test.TestCase):
def test_fine_label_consistency(self):
names = []
for coarse, fine_list in metadata.VESSEL_CATEGORIES:
for fine in fine_list:
if fine not in names:
names.append(fine)
self.assertEqual(
sorted(names), sorted(metadata.VESSEL_CLASS_DETAILED_NAMES))
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 7,179,996,861,872,246,000 | 40.706587 | 89 | 0.576741 | false |
RIFTIO/rift.ware-descriptor-packages | 4.3/src/vnfd/ping_vnf/scripts/ping_set_rate.py | 1 | 3804 | #!/usr/bin/env python3
############################################################################
# Copyright 2017 RIFT.IO Inc #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
############################################################################
import argparse
import logging
import os
import subprocess
import sys
import time
import yaml
def ping_set_rate(yaml_cfg, logger):
'''Use curl and set traffic rate on ping vnf'''
def set_rate(mgmt_ip, port, rate):
curl_cmd = '''curl -D /dev/null \
-H "Accept: application/vnd.yang.data+xml" \
-H "Content-Type: application/vnd.yang.data+json" \
-X POST \
-d "{{ \\"rate\\":{ping_rate} }}" \
http://{ping_mgmt_ip}:{ping_mgmt_port}/api/v1/ping/rate
'''.format(ping_mgmt_ip=mgmt_ip,
ping_mgmt_port=port,
ping_rate=rate)
logger.debug("Executing cmd: %s", curl_cmd)
subprocess.check_call(curl_cmd, shell=True)
# Get the ping rate
rate = yaml_cfg['parameter']['rate']
# Set ping rate
for index, vnfr in yaml_cfg['vnfr'].items():
logger.debug("VNFR {}: {}".format(index, vnfr))
# Check if it is pong vnf
if 'ping_vnfd' in vnfr['name']:
vnf_type = 'ping'
port = 18888
set_rate(vnfr['mgmt_ip_address'], port, rate)
break
def main(argv=sys.argv[1:]):
try:
parser = argparse.ArgumentParser()
parser.add_argument("yaml_cfg_file", type=argparse.FileType('r'))
parser.add_argument("-q", "--quiet", dest="verbose", action="store_false")
args = parser.parse_args()
run_dir = os.path.join(os.environ['RIFT_INSTALL'], "var/run/rift")
if not os.path.exists(run_dir):
os.makedirs(run_dir)
log_file = "{}/ping_set_rate-{}.log".format(run_dir, time.strftime("%Y%m%d%H%M%S"))
logging.basicConfig(filename=log_file, level=logging.DEBUG)
logger = logging.getLogger()
except Exception as e:
print("Exception in {}: {}".format(__file__, e))
sys.exit(1)
try:
ch = logging.StreamHandler()
if args.verbose:
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.INFO)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
except Exception as e:
logger.exception(e)
raise e
try:
yaml_str = args.yaml_cfg_file.read()
# logger.debug("Input YAML file:\n{}".format(yaml_str))
yaml_cfg = yaml.load(yaml_str)
logger.debug("Input YAML: {}".format(yaml_cfg))
ping_set_rate(yaml_cfg, logger)
except Exception as e:
logger.exception(e)
raise e
if __name__ == "__main__":
main()
| apache-2.0 | -337,360,539,746,339,460 | 33.899083 | 93 | 0.518139 | false |
jmartinz/pyCrawler | 10.contratacionE/pce_extrae_contratos.py | 1 | 6399 | # coding=utf-8
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException, TimeoutException
from bs4 import BeautifulSoup
import sys
#phantonPath = "/home/jmartinz/00.py/phantomjs/phantomjs"
phantonPath = "../phantomjs/phantomjs"
contratacionPage = "https://contrataciondelestado.es/wps/portal/!ut/p/b1/lZDLDoIwEEU_aaYParssrwLxAVZQujEsjMH42Bi_30rcGCPq7CZz7pzkgoOWKC6kYBPYgDt3t37fXfvLuTs-die2PFlEUZpRlJbFSKdxXYvMrybwQOsB_DAah3xopdQh0YislqhFVUXK_0HFnvmARbwpmlLY3CDmWRpPaxKgoeI3_4jgxW_sjPhzwkRAkRhLn_mPAvqn_13wJb8GNyBjDQzAWMXjEgrz7HLaQeuxyVY3SaVzxXARLj1WlLNVaShB5LCCNoGTO6Z-VH7g3R2UoLEz/dl4/d5/L2dBISEvZ0FBIS9nQSEh/pw/Z7_AVEQAI930OBRD02JPMTPG21004/act/id=0/p=javax.servlet.include.path_info=QCPjspQCPbusquedaQCPBusquedaVIS_UOE.jsp/299420689304/-/"
#contratacionPage="https://contrataciondelestado.es"
""" Móudlo para extraer datos de la página de contatación
del estado
"""
class Contratos():
""" Clase que devuelve los contratos de un ministerio entre unas fechas usando el dirver que se indique
driverType=1 (Firefox, online) / 2(phantomjs)
ministry:
6: MAGRAMA
7: MAExCoop
8. MDEfensa
9: MINECO
10:MEDCD
11:MESS
12:MFOM
13:MINHAP
14:MINET
15:MINJUS
16:MINPRES
17:MSSSI
18:MinTraInm
19:MinInt
20: Presidencia Gobierno
fini: dd-mm-aaaa
ffin: dd-mm-aaaa
"""
driver = "" #webdriver.PhantomJS(phantonPath, service_args=['--ignore-ssl-errors=true'])
driverType=1
expedientes =[]
ministerio = 'tafelTree_maceoArbol_id_'
ministry=0
fIni= '01-01-2015'
fFin='10-01-2015'
nContratos = 0
nPagTotal = 0
def __init__(self, driverType=1, ministry='17', fini='01-01-2015',ffin='10-01-2015'):
self.driverType=driverType
self.ministry = ministry
if driverType==1:
self.driver = webdriver.Firefox()
elif driverType==2:
self.driver = webdriver.PhantomJS(phantonPath, service_args=['--ignore-ssl-errors=true'])
self.driver.set_window_size(1120, 550)
self.ministerio = self.ministerio + ministry
self.fIni = fini
self.fFin = ffin
# self.debugPhanton()
self.extraecontratos()
def cargaPagina(self):
#Carga página
if self.driverType==2:
self.driver.implicitly_wait(10)
self.driver.set_page_load_timeout(10)
try:
self.driver.get(contratacionPage)
except TimeoutException as e: #Handle y
#Handle your exception here
print(e)
def debugPhanton(self):
self.cargaPagina()
# check phantomjs
print(self.driver.page_source)
def extraecontratos(self):
self.cargaPagina()
#Selecciona ministerio
self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:idSeleccionarOCLink').click() # Organización contratante -> seleccionar
self.driver.find_elements_by_class_name('tafelTreeopenable')[1].click() # Selecciona AGE
self.driver.find_element_by_id(self.ministerio).click() # Selecciona el Ministerio pasado por parámetros
self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:botonAnadirMostrarPopUpArbolEO').click()
# han añadido el boton añadir
self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:botonAnadirMostrarPopUpArbolEO').click()
#Fecha publicacion entre fIni
fDesde = self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:textMinFecAnuncioMAQ2')
fDesde.send_keys(self.fIni)
# y fFin
fHasta = self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:textMaxFecAnuncioMAQ')
fHasta.send_keys(self.fFin)
# pulsa el botón de buscar
self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:button1').click()
#Obtine el número de elementos
self.nContratos=self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:textfooterTotalTotalMAQ').text
# y de páginas totales
self.nPagTotal = self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:textfooterInfoTotalPaginaMAQ').text
# Recorre todas las páginas de resultados
while True: # Se ejecuta siempre hasta que no exista el enlace "siguiente"
nPag = self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:textfooterInfoNumPagMAQ').text
# En linea saca los expedientes de la página
html_page = self.driver.page_source
soup = BeautifulSoup(html_page, "html5lib")
# tableExp = soup.find("table", { "id" : "myTablaBusquedaCustom" })
#
# expedientes_pag = [c.text for c in soup.findAll('td', {'class':'tdExpediente'})]
expedientes_pag = []
# Añade sólo las líneas que son de contratos
for row in soup.findAll("tr", {'class': ['rowClass1', 'rowClass2']}):
expedientes_pag.append(row)
# Los añade a los expedientes totales
self.expedientes.extend(expedientes_pag)
# Pulsa enlace siguiente, si no lo encuentra se sale del bucle
try:
enlaceSiguiente= self.driver.find_element_by_id('viewns_Z7_AVEQAI930OBRD02JPMTPG21004_:form1:footerSiguiente')
enlaceSiguiente.click()
except NoSuchElementException:
break
# Cierra el driver
self.driver.quit()
# Sólo para probar que funcina
def main():
contratosMSSSI=Contratos(driverType=2)
print(contratosMSSSI.nContratos)
print(contratosMSSSI.nPagTotal)
# abre fichero
f = open('workfile', 'w')
for exp in contratosMSSSI.expedientes:
f.write(exp.encode("UTF-8")+ "\n")
f.close()
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | -4,426,364,085,573,958,000 | 36.757396 | 514 | 0.63501 | false |
project-asap/IReS-Platform | asap-tools/monitoring/reporter_cli.py | 1 | 5614 | #!/usr/bin/env python
from cement.core import foundation, controller
from lib import get_backend, set_backend
from monitor import *
from pprint import PrettyPrinter
pprint = PrettyPrinter(indent=2).pprint
backend = get_backend()
def my_split(p):
"""
splits args based on '=' delim
:return:
"""
if p is None: return {}
delim = '='
def mini_split(t):
splitted = t.split(delim)
if len(splitted)<2:
raise Exception("could not split '{0}' based on '{1}'".format(t, delim))
return splitted
return dict(map(mini_split, p))
# define an application base controller
class MyAppBaseController(controller.CementBaseController):
class Meta:
label = 'base'
description = "My Application does amazing things!"
# the arguments recieved from command line
arguments = [
(['-r', '--retrieve-monitoring'], dict(action='store_true', help='retrieve the monitroing metrics from their temp file')),
(['-m', '--metrics'], dict(action='store', help='the metrics to report', nargs='*')),
(['-b', '--backend'], dict(action='store', help='the backend configuration parameters', nargs='*')),
(['-e', '--experiment-name'], dict(action='store', help='the name of the reported experiment')),
(['-q', '--query'], dict(action='store', help='the query to execute in the backend storage system')),
(['-pp', '--plot-params'], dict(action='store', help='parameters of the plot', nargs='*')),
(['-dict',], dict(action='store_true', help='get the query result in a dict')),
(['-cm', '--collect-metrics'], dict(action='store_true', help='collect the metrics of an active monitoring process')),
(['-cs', '--collect-streaming-metrics'], dict(action='store_true', help='collect the metrics of an finished streaming experiment'))
]
@controller.expose(hide=True, aliases=['run'])
def default(self):
self.app.log.error('You need to choose one of the options, or -h for help.')
@controller.expose(help='show examples of execution')
def show_examples(self):
print \
"""
# set a sqlite reporting backend with a specific sqlite file
./reporter_cli.py set-backend -b backend=sqlitebackend file=my_database.db
# Report, for experiment 'my_experiment', some metrics and their values
./reporter_cli.py report -e my_experiment -m metric1=test metric2=2
# plot a timeline of metric 'my_metric'
./reporter_cli.py plot-query -q "select cast(strftime('%s',date) as long) , my_metric from my_table;" -pp xlabel=bull title='my title'
"""
@controller.expose(aliases=['set-backend'])
def set_reporting_backend(self):
self.app.log.info("Setting reporting back-end")
if self.app.pargs.backend:
conf = my_split(self.app.pargs.backend)
set_backend(conf)
else:
self.app.log.error('No backend conf specified')
@controller.expose()
def show_backend(self):
self.app.log.info("Showing reporting back-end")
print backend
@controller.expose(help="store the required params", aliases=['r'])
def report(self):
experiment = self.app.pargs.experiment_name
if not experiment:
self.app.log.error("No experiment name provided. Please use the -e/--experiment-name parameter ")
exit()
metrics ={}
cli_metrics = my_split(self.app.pargs.metrics) # metrics from cmd args
# metrics stored into a file in the past
file_metrics = collect_future_metrics()
streaming_metrics = ganglia_metrics = {}
if self.app.pargs.collect_streaming_metrics:
# wait for and collect the streaming metrics if required
streaming_metrics = collect_streaming_metrics()
if self.app.pargs.collect_metrics:
# collect ganglia monitoring metrics if required
ganglia_metrics = collect_ganglia_metrics()
# update the metrics variable so that common common entries (if any) follow the priority
# 1)cli 2)future file 3)streaming 4)ganglia
metrics.update(ganglia_metrics)
metrics.update(streaming_metrics)
metrics.update(file_metrics)
metrics.update(cli_metrics)
# report the metrics to the backend
backend.report_dict(experiment, metrics)
@controller.expose(help="execute a query to the backend and prints the results")
def query(self):
if self.app.pargs.dict:
res = backend.dict_query(self.app.pargs.experiment_name, self.app.pargs.query)
pprint(res)
else:
res = backend.query(self.app.pargs.experiment_name, self.app.pargs.query)
for r in res:
print r
@controller.expose(help="store some metrics in a local file so that they can be reported later")
def future_report(self):
metrics = my_split(self.app.pargs.metrics)
store_future_metrics(metrics)
@controller.expose(help="execute a query to the backend and plot the results")
def plot_query(self):
pparams = self.app.pargs.plot_params
if pparams is not None: pparams = my_split(self.app.pargs.plot_params)
else: pparams = {}
backend.plot_query(self.app.pargs.experiment_name, self.app.pargs.query, **pparams)
class MyApp(foundation.CementApp):
class Meta:
label = 'reporter'
base_controller = MyAppBaseController
with MyApp() as app:
app.run() | apache-2.0 | -2,881,919,297,781,870,000 | 37.724138 | 144 | 0.633238 | false |
istb-mia/miapy | miapy/data/conversion.py | 1 | 6690 | """This module holds classes related to image conversion.
The main purpose of this module is the conversion between SimpleITK images and numpy arrays.
"""
import typing
import SimpleITK as sitk
import numpy as np
class ImageProperties:
"""Represents ITK image properties.
Holds common ITK image meta-data such as the size, origin, spacing, and direction.
See Also:
SimpleITK provides `itk::simple::Image::CopyInformation`_ to copy image information.
.. _itk::simple::Image::CopyInformation:
https://itk.org/SimpleITKDoxygen/html/classitk_1_1simple_1_1Image.html#afa8a4757400c414e809d1767ee616bd0
"""
def __init__(self, image: sitk.Image):
"""Initializes a new instance of the ImageProperties class.
Args:
image (sitk.Image): The image whose properties to hold.
"""
self.size = image.GetSize()
self.origin = image.GetOrigin()
self.spacing = image.GetSpacing()
self.direction = image.GetDirection()
self.dimensions = image.GetDimension()
self.number_of_components_per_pixel = image.GetNumberOfComponentsPerPixel()
self.pixel_id = image.GetPixelID()
def is_two_dimensional(self) -> bool:
"""Determines whether the image is two-dimensional.
Returns:
bool: True if the image is two-dimensional; otherwise, False.
"""
return self.dimensions == 2
def is_three_dimensional(self) -> bool:
"""Determines whether the image is three-dimensional.
Returns:
bool: True if the image is three-dimensional; otherwise, False.
"""
return self.dimensions == 3
def is_vector_image(self) -> bool:
"""Determines whether the image is a vector image.
Returns:
bool: True for vector images; False for scalar images.
"""
return self.number_of_components_per_pixel > 1
def __str__(self):
"""Gets a printable string representation.
Returns:
str: String representation.
"""
return 'ImageProperties:\n' \
' size: {self.size}\n' \
' origin: {self.origin}\n' \
' spacing: {self.spacing}\n' \
' direction: {self.direction}\n' \
' dimensions: {self.dimensions}\n' \
' number_of_components_per_pixel: {self.number_of_components_per_pixel}\n' \
' pixel_id: {self.pixel_id}\n' \
.format(self=self)
def __eq__(self, other):
"""Determines the equality of two ImageProperties classes.
Notes
The equality does not include the number_of_components_per_pixel and pixel_id.
Args:
other (object): An ImageProperties instance or any other object.
Returns:
bool: True if the ImageProperties are equal; otherwise, False.
"""
if isinstance(other, self.__class__):
return self.size == other.size and \
self.origin == other.origin and \
self.spacing == other.spacing and \
self.direction == other.direction and \
self.dimensions == other.dimensions
return NotImplemented
def __ne__(self, other):
"""Determines the non-equality of two ImageProperties classes.
Notes
The non-equality does not include the number_of_components_per_pixel and pixel_id.
Args:
other (object): An ImageProperties instance or any other object.
Returns:
bool: True if the ImageProperties are non-equal; otherwise, False.
"""
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
"""Gets the hash.
Returns:
int: The hash of the object.
"""
return hash(tuple(sorted(self.__dict__.items())))
class NumpySimpleITKImageBridge:
"""A numpy to SimpleITK bridge, which provides static methods to convert between numpy array and SimpleITK image."""
@staticmethod
def convert(array: np.ndarray, properties: ImageProperties) -> sitk.Image:
"""Converts a numpy array to a SimpleITK image.
Args:
array (np.ndarray): The image as numpy array. The shape can be either:
- shape=(n,), where n = total number of voxels
- shape=(n,v), where n = total number of voxels and v = number of components per pixel (vector image)
- shape=(<reversed image size>), what you get from sitk.GetArrayFromImage()
- shape=(<reversed image size>,v), what you get from sitk.GetArrayFromImage()
and v = number of components per pixel (vector image)
properties (ImageProperties): The image properties.
Returns:
sitk.Image: The SimpleITK image.
"""
is_vector = False
if not array.shape == properties.size[::-1]:
# we need to reshape the array
if array.ndim == 1:
array = array.reshape(properties.size[::-1])
elif array.ndim == 2:
is_vector = True
array = array.reshape((properties.size[::-1] + (array.shape[1],)))
elif array.ndim == len(properties.size) + 1:
is_vector = True
# no need to reshape
else:
raise ValueError('array shape {} not supported'.format(array.shape))
image = sitk.GetImageFromArray(array, is_vector)
image.SetOrigin(properties.origin)
image.SetSpacing(properties.spacing)
image.SetDirection(properties.direction)
return image
class SimpleITKNumpyImageBridge:
"""A SimpleITK to numpy bridge.
Converts SimpleITK images to numpy arrays. Use the ``NumpySimpleITKImageBridge`` to convert back.
"""
@staticmethod
def convert(image: sitk.Image) -> typing.Tuple[np.ndarray, ImageProperties]:
"""Converts an image to a numpy array and an ImageProperties class.
Args:
image (SimpleITK.Image): The image.
Returns:
A Tuple[np.ndarray, ImageProperties]: The image as numpy array and the image properties.
Raises:
ValueError: If `image` is `None`.
"""
if image is None:
raise ValueError('Parameter image can not be None')
return sitk.GetArrayFromImage(image), ImageProperties(image)
| apache-2.0 | -5,434,569,577,237,771,000 | 34.210526 | 120 | 0.588789 | false |
libvirt/libvirt-test-API | libvirttestapi/repos/storage/define_iscsi_pool.py | 1 | 1805 | # Copyright (C) 2010-2012 Red Hat, Inc.
# This work is licensed under the GNU GPLv2 or later.
# Define a storage pool of 'iscsi' type
from libvirt import libvirtError
from libvirttestapi.src import sharedmod
from libvirttestapi.repos.storage import storage_common
required_params = ('poolname', 'sourcehost', 'sourcepath',)
optional_params = {'targetpath': '/dev/disk/by-path',
'xml': 'xmls/iscsi_pool.xml',
}
def define_iscsi_pool(params):
"""
Defines a iscsi based storage pool from xml.
"""
logger = params['logger']
poolname = params['poolname']
xmlstr = params['xml']
conn = sharedmod.libvirtobj['conn']
if not storage_common.check_pool(conn, poolname, logger):
logger.error("%s storage pool is ALREADY defined" % poolname)
return 1
logger.debug("storage pool xml:\n%s" % xmlstr)
pool_num1 = conn.numOfDefinedStoragePools()
logger.info("original storage pool define number: %s" % pool_num1)
storage_common.display_pool_info(conn, logger)
try:
logger.info("define %s storage pool" % poolname)
conn.storagePoolDefineXML(xmlstr, 0)
pool_num2 = conn.numOfDefinedStoragePools()
logger.info("current storage pool define number: %s" % pool_num2)
storage_common.display_pool_info(conn, logger)
if storage_common.check_pool_define(poolname, logger) and pool_num2 > pool_num1:
logger.info("define %s storage pool is successful" % poolname)
else:
logger.error("%s storage pool is undefined" % poolname)
return 1
except libvirtError as e:
logger.error("API error message: %s, error code is %s"
% (e.get_error_message(), e.get_error_code()))
return 1
return 0
| gpl-2.0 | 7,912,506,781,527,067,000 | 34.392157 | 88 | 0.646537 | false |
rpm-software-management/rpmlint | test/test_polkit.py | 1 | 2216 | import os
import pytest
from rpmlint.checks.PolkitCheck import PolkitCheck
from rpmlint.filter import Filter
import Testing
from Testing import get_tested_package
def get_polkit_check(config_path):
from rpmlint.config import Config
if not os.path.isabs(config_path):
config_path = Testing.testpath() / 'configs' / config_path
config = Config([config_path])
config.info = True
output = Filter(config)
test = PolkitCheck(config, output)
return output, test
@pytest.fixture(scope='function', autouse=True)
def polkit_check():
return get_polkit_check(Testing.TEST_CONFIG[0])
@pytest.mark.parametrize('package', ['binary/testpolkitcheck'])
def test_check_actions_malformatted(tmpdir, package, polkit_check):
output, test = polkit_check
test.check(get_tested_package(package, tmpdir))
out = output.print_results(output.results)
assert 'testpolkitcheck.x86_64: E: polkit-xml-exception /usr/share/polkit-1/actions/malformatted.xml.policy raised an exception: mismatched tag: line 23, column 51' in out
@pytest.mark.parametrize('package', ['binary/testpolkitcheck'])
def test_check_actions_ghost_file(tmpdir, package, polkit_check):
output, test = polkit_check
test.check(get_tested_package(package, tmpdir))
out = output.print_results(output.results)
assert 'testpolkitcheck.x86_64: E: polkit-ghost-file /usr/share/polkit-1/actions/ghost.policy' in out
@pytest.mark.parametrize('package', ['binary/testpolkitcheck'])
def test_check_actions_missing_allow_type(tmpdir, package, polkit_check):
output, test = polkit_check
test.check(get_tested_package(package, tmpdir))
out = output.print_results(output.results)
assert 'testpolkitcheck.x86_64: E: polkit-untracked-privilege missing.allow.type (no:auth_admin_keep:auth_admin_keep)' in out
@pytest.mark.parametrize('package', ['binary/testpolkitcheck'])
def test_check_actions_auth_admin(tmpdir, package, polkit_check):
output, test = polkit_check
test.check(get_tested_package(package, tmpdir))
out = output.print_results(output.results)
assert 'testpolkitcheck.x86_64: E: polkit-untracked-privilege auth.admin.policy (auth_admin:no:auth_admin_keep)' in out
| gpl-2.0 | -8,447,820,223,201,931,000 | 37.877193 | 175 | 0.743682 | false |
spacecowboy/pysurvival-ann | setup.py | 1 | 2269 | #!/usr/bin/env python
"""
General instructions:
python setup.py build
python setup.py install
To include parts that depend on R's survival module, do:
python setup.py build --with-R
Info: This package depends on numpy, and optionally R, RInside
"""
from distutils.core import setup, Extension
import subprocess
import numpy
import sys
sources = ['src/PythonModule.cpp',
'src/ErrorFunctions.cpp',
'src/ErrorFunctionsGeneral.cpp',
'src/ErrorFunctionsSurvival.cpp',
'src/Statistics.cpp',
'src/RPropNetworkWrapper.cpp',
'src/RPropNetwork.cpp',
'src/drand.cpp',
'src/activationfunctions.cpp',
'src/c_index.cpp', 'src/CIndexWrapper.cpp',
'src/MatrixNetwork.cpp',
'src/MatrixNetworkWrapper.cpp',
'src/GeneticNetwork.cpp',
'src/GeneticFitness.cpp',
'src/GeneticSelection.cpp',
'src/GeneticMutation.cpp',
'src/GeneticCrossover.cpp',
'src/GeneticNetworkWrapper.cpp',
'src/ErrorFunctionsWrapper.cpp',
'src/WrapperHelpers.cpp',
'src/Random.cpp']
# Numpy stuff
numpy_include = numpy.get_include()
compileargs = []
libs = []
libdirs = []
linkargs = []
#if ("--help" in sys.argv or
if ("-h" in sys.argv or
len(sys.argv) == 1):
sys.exit(__doc__)
# Python setup
_ann = Extension('ann._ann',
sources = sources,
include_dirs = [numpy_include],
extra_compile_args = ['-std=c++0x',
'-Wall',
'-O3',
'-fopenmp'] + compileargs,
extra_link_args = ['-fopenmp'] + linkargs,
libraries=libs, library_dirs=libdirs)
setup(name = 'pysurvival-ann',
version = '0.9',
description = 'A C++ neural network package for survival data',
author = 'Jonas Kalderstam',
author_email = '[email protected]',
url = 'https://github.com/spacecowboy/pysurvival-ann',
packages = ['ann'],
package_dir = {'ann': 'ann'},
ext_modules = [_ann],
setup_requires = ['numpy'],
install_requires = ['numpy>=1.7.1']
)
| gpl-2.0 | 3,438,694,046,496,193,500 | 28.467532 | 69 | 0.557074 | false |
horazont/aioxmpp | tests/test_stanza.py | 1 | 36069 | ########################################################################
# File name: test_stanza.py
# This file is part of: aioxmpp
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import contextlib
import enum
import io
import itertools
import unittest
import unittest.mock
import aioxmpp.xso as xso
import aioxmpp.stanza as stanza
import aioxmpp.structs as structs
import aioxmpp.errors as errors
import aioxmpp.xml
from aioxmpp.utils import namespaces
TEST_FROM = structs.JID.fromstr("[email protected]")
TEST_TO = structs.JID.fromstr("[email protected]")
@stanza.IQ.as_payload_class
class TestPayload(xso.XSO):
TAG = "foo", "bar"
def __repr__(self):
return "foobar"
class TestStanzaBase(unittest.TestCase):
class FakeStanza(stanza.StanzaBase, protect=False):
pass
def test_declare_ns(self):
self.assertDictEqual(
stanza.StanzaBase.DECLARE_NS,
{}
)
def test_from_attr(self):
self.assertIsInstance(
stanza.StanzaBase.from_,
xso.Attr)
self.assertEqual(
(None, "from"),
stanza.StanzaBase.from_.tag)
self.assertIsInstance(
stanza.StanzaBase.from_.type_,
xso.JID)
def test_to_attr(self):
self.assertIsInstance(
stanza.StanzaBase.to,
xso.Attr)
self.assertEqual(
(None, "to"),
stanza.StanzaBase.to.tag)
self.assertIsInstance(
stanza.StanzaBase.to.type_,
xso.JID)
def test_lang_attr(self):
self.assertIsInstance(
stanza.StanzaBase.lang,
xso.LangAttr)
def test_error_attr(self):
self.assertIsInstance(
stanza.StanzaBase.error,
xso.Child)
self.assertIs(stanza.StanzaBase.error.default, None)
def test_autoset_id_generates_random_str_on_unset(self):
s = self.FakeStanza()
s.autoset_id()
id1 = s.id_
self.assertTrue(id1.startswith(":"))
self.assertTrue(s.id_)
del s.id_
s.autoset_id()
self.assertTrue(s.id_)
self.assertNotEqual(id1, s.id_)
self.assertIsInstance(s.id_, str)
self.assertTrue(s.id_.startswith(":"))
# ensure that there are not too many A chars (i.e. zero bits)
self.assertLess(sum(1 for c in id1 if c == "A"), 5)
def test_autoset_id_generates_random_str_on_None(self):
s = self.FakeStanza()
s.id_ = None
s.autoset_id()
id1 = s.id_
self.assertTrue(id1.startswith(":"))
self.assertTrue(s.id_)
del s.id_
s.autoset_id()
self.assertTrue(s.id_)
self.assertNotEqual(id1, s.id_)
self.assertIsInstance(s.id_, str)
self.assertTrue(s.id_.startswith(":"))
# ensure that there are not too many A chars (i.e. zero bits)
self.assertLess(sum(1 for c in id1 if c == "A"), 5)
def test_autoset_id_does_not_override(self):
s = self.FakeStanza()
s.id_ = "foo"
s.autoset_id()
self.assertEqual("foo", s.id_)
def test_init(self):
id_ = "someid"
s = self.FakeStanza(
from_=TEST_FROM,
to=TEST_TO,
id_=id_)
self.assertEqual(
TEST_FROM,
s.from_)
self.assertEqual(
TEST_TO,
s.to)
self.assertEqual(
id_,
s.id_)
def test_xso_error_handler_raises_StanzaError(self):
s = stanza.StanzaBase()
with self.assertRaisesRegex(
stanza.StanzaError,
"failed to parse stanza") as ctx:
s.xso_error_handler(
unittest.mock.sentinel.descriptor,
unittest.mock.sentinel.ev_args,
unittest.mock.sentinel.exc_info,
)
self.assertIs(
ctx.exception.ev_args,
unittest.mock.sentinel.ev_args,
)
self.assertIs(
ctx.exception.descriptor,
unittest.mock.sentinel.descriptor,
)
self.assertIs(
ctx.exception.partial_obj,
s
)
class TestBody(unittest.TestCase):
def test_tag(self):
self.assertEqual(
(namespaces.client, "body"),
stanza.Body.TAG)
def test_lang_attr(self):
self.assertIsInstance(
stanza.Body.lang,
xso.LangAttr)
def test_text_attr(self):
self.assertIsInstance(
stanza.Body.text,
xso.Text)
class TestSubject(unittest.TestCase):
def test_tag(self):
self.assertEqual(
(namespaces.client, "subject"),
stanza.Subject.TAG)
def test_lang_attr(self):
self.assertIsInstance(
stanza.Subject.lang,
xso.LangAttr)
def test_text_attr(self):
self.assertIsInstance(
stanza.Subject.text,
xso.Text)
class TestMessage(unittest.TestCase):
def test_inheritance(self):
self.assertTrue(issubclass(
stanza.Message,
stanza.StanzaBase))
def test_unknown_child_policy(self):
self.assertEqual(
stanza.Message.UNKNOWN_CHILD_POLICY,
xso.UnknownChildPolicy.DROP
)
def test_id_attr(self):
self.assertIsInstance(
stanza.Message.id_,
xso.Attr)
self.assertEqual(
(None, "id"),
stanza.Message.id_.tag)
self.assertIs(stanza.Message.id_.default, None)
def test_tag(self):
self.assertEqual(
("jabber:client", "message"),
stanza.Message.TAG)
def test_type_attr(self):
self.assertIsInstance(
stanza.Message.type_,
xso.Attr
)
self.assertEqual(
(None, "type"),
stanza.Message.type_.tag
)
self.assertIsInstance(
stanza.Message.type_.type_,
xso.EnumCDataType,
)
self.assertIs(
stanza.Message.type_.type_.enum_class,
structs.MessageType,
)
self.assertEqual(
stanza.Message.type_.default,
structs.MessageType.NORMAL,
)
def test_body_attr(self):
self.assertIsInstance(
stanza.Message.body,
xso.ChildTextMap)
self.assertSetEqual(
{stanza.Body},
set(stanza.Message.body._classes)
)
def test_subject_attr(self):
self.assertIsInstance(
stanza.Message.subject,
xso.ChildTextMap)
self.assertSetEqual(
{stanza.Subject},
set(stanza.Message.subject._classes)
)
def test_thread_attr(self):
self.assertIsInstance(
stanza.Message.thread,
xso.Child)
self.assertSetEqual(
{stanza.Thread},
set(stanza.Message.thread._classes)
)
def test_init(self):
s = stanza.Message(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.MessageType.GROUPCHAT,
)
self.assertEqual(
TEST_FROM,
s.from_
)
self.assertEqual(
structs.MessageType.GROUPCHAT,
s.type_
)
def test_reject_init_without_type(self):
with self.assertRaisesRegex(TypeError, "type_"):
stanza.Message()
def test_make_reply(self):
s = stanza.Message(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.MessageType.GROUPCHAT,
)
r = s.make_reply()
self.assertEqual(
r.type_,
s.type_)
self.assertEqual(
TEST_FROM,
r.to)
self.assertEqual(
TEST_TO,
r.from_)
self.assertIsNone(r.id_)
def test_make_error(self):
e = stanza.Error(
condition=errors.ErrorCondition.FEATURE_NOT_IMPLEMENTED
)
s = stanza.Message(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.MessageType.GROUPCHAT
)
r = s.make_error(e)
self.assertIsInstance(r, stanza.Message)
self.assertEqual(
r.type_,
structs.MessageType.ERROR)
self.assertEqual(
TEST_FROM,
r.to)
self.assertEqual(
TEST_TO,
r.from_)
self.assertEqual(
s.id_,
r.id_)
def test_repr(self):
s = stanza.Message(from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.MessageType.GROUPCHAT)
self.assertEqual(
"<message from='[email protected]' to='[email protected]'"
" id='someid' type=<MessageType.GROUPCHAT: 'groupchat'>>",
repr(s)
)
def test_repr_works_with_mostly_uninitialised_attributes(self):
s = stanza.Message.__new__(stanza.Message)
self.assertEqual(
repr(s),
"<message from=None to=None id=None type=<MessageType.NORMAL: 'normal'>>"
)
def test_repr_works_with_incomplete_attributes(self):
s = stanza.Message.__new__(stanza.Message)
stanza.Message.from_.mark_incomplete(s)
stanza.Message.to.mark_incomplete(s)
stanza.Message.type_.mark_incomplete(s)
stanza.Message.id_.mark_incomplete(s)
self.assertEqual(
repr(s),
"<message from=<incomplete> to=<incomplete> "
"id=<incomplete> type=<incomplete>>"
)
def test_random_type_is_equal_to_normal(self):
buf = io.BytesIO(b"<message xmlns='jabber:client' type='fnord'/>")
s = aioxmpp.xml.read_single_xso(buf, stanza.Message)
self.assertIs(s.type_, structs.MessageType.NORMAL)
def test_absent_type_is_normal(self):
buf = io.BytesIO(b"<message xmlns='jabber:client'/>")
s = aioxmpp.xml.read_single_xso(buf, stanza.Message)
self.assertIs(s.type_, structs.MessageType.NORMAL)
class TestStatus(unittest.TestCase):
def test_tag(self):
self.assertEqual(
(namespaces.client, "status"),
stanza.Status.TAG)
def test_lang_attr(self):
self.assertIsInstance(
stanza.Status.lang,
xso.LangAttr)
def test_text_attr(self):
self.assertIsInstance(
stanza.Status.text,
xso.Text)
class TestPresence(unittest.TestCase):
def test_inheritance(self):
self.assertIsInstance(
stanza.Presence(),
stanza.StanzaBase)
def test_id_attr(self):
self.assertIsInstance(
stanza.Presence.id_,
xso.Attr)
self.assertEqual(
(None, "id"),
stanza.Presence.id_.tag)
self.assertIs(stanza.Presence.id_.default, None)
def test_tag(self):
self.assertEqual(
("jabber:client", "presence"),
stanza.Presence.TAG)
def test_type_attr(self):
self.assertIsInstance(
stanza.Presence.type_,
xso.Attr,
)
self.assertEqual(
(None, "type"),
stanza.Presence.type_.tag,
)
self.assertIsInstance(
stanza.Presence.type_.type_,
xso.EnumCDataType,
)
self.assertIs(
stanza.Presence.type_.type_.enum_class,
structs.PresenceType,
)
self.assertIs(
stanza.Presence.type_.default,
structs.PresenceType.AVAILABLE,
)
def test_show_attr(self):
self.assertIsInstance(
stanza.Presence.show,
xso.ChildText,
)
self.assertEqual(
(namespaces.client, "show"),
stanza.Presence.show.tag,
)
self.assertIsInstance(
stanza.Presence.show.type_,
xso.EnumCDataType,
)
self.assertIs(
stanza.Presence.show.type_.enum_class,
structs.PresenceShow,
)
self.assertIs(
stanza.Presence.show.default,
structs.PresenceShow.NONE,
)
def test_status_attr(self):
self.assertIsInstance(
stanza.Presence.status,
xso.ChildTextMap)
self.assertSetEqual(
{stanza.Status},
set(stanza.Presence.status._classes)
)
def test_priority_attr(self):
self.assertIsInstance(
stanza.Presence.priority,
xso.ChildText)
self.assertEqual(
(namespaces.client, "priority"),
stanza.Presence.priority.tag
)
self.assertIsInstance(
stanza.Presence.priority.type_,
xso.Integer
)
self.assertEqual(
0,
stanza.Presence.priority.default
)
def test_error_attr(self):
self.assertIsInstance(
stanza.Presence.error,
xso.Child)
def test_init(self):
s = stanza.Presence(
from_=TEST_FROM,
type_=structs.PresenceType.PROBE,
show=structs.PresenceShow.AWAY,
)
self.assertEqual(
TEST_FROM,
s.from_
)
self.assertEqual(
structs.PresenceType.PROBE,
s.type_
)
self.assertEqual(
structs.PresenceShow.AWAY,
s.show,
)
def test_init_compat(self):
s = stanza.Presence(
from_=TEST_FROM,
type_=structs.PresenceType.PROBE,
show="xa",
)
self.assertEqual(
TEST_FROM,
s.from_
)
self.assertEqual(
structs.PresenceType.PROBE,
s.type_
)
self.assertEqual(
structs.PresenceShow.XA,
s.show,
)
def test_default(self):
s = stanza.Presence()
self.assertEqual(
s.type_,
structs.PresenceType.AVAILABLE,
)
self.assertEqual(s.show, structs.PresenceShow.NONE)
def test_make_error(self):
e = stanza.Error(
condition=errors.ErrorCondition.GONE
)
s = stanza.Presence(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.PresenceType.UNAVAILABLE,
)
r = s.make_error(e)
self.assertIsInstance(r, stanza.Presence)
self.assertEqual(
r.type_,
structs.PresenceType.ERROR
)
self.assertEqual(
TEST_FROM,
r.to
)
self.assertEqual(
TEST_TO,
r.from_
)
self.assertEqual(
s.id_,
r.id_
)
def test_repr(self):
s = stanza.Presence(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.PresenceType.PROBE,
)
self.assertEqual(
"<presence from='[email protected]' to='[email protected]'"
" id='someid' type=<PresenceType.PROBE: 'probe'>>",
repr(s)
)
s = stanza.Presence(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.PresenceType.AVAILABLE
)
self.assertEqual(
"<presence from='[email protected]' to='[email protected]'"
" id='someid' type=<PresenceType.AVAILABLE: None>>",
repr(s)
)
def test_collector(self):
self.assertIsInstance(
stanza.Presence.unhandled_children,
xso.Collector
)
def test_repr_works_with_mostly_uninitialised_attributes(self):
s = stanza.Presence.__new__(stanza.Presence)
self.assertEqual(
repr(s),
"<presence from=None to=None id=None type=<PresenceType.AVAILABLE: None>>"
)
def test_repr_works_with_incomplete_attributes(self):
s = stanza.Presence.__new__(stanza.Presence)
stanza.Presence.from_.mark_incomplete(s)
stanza.Presence.to.mark_incomplete(s)
stanza.Presence.id_.mark_incomplete(s)
stanza.Presence.type_.mark_incomplete(s)
self.assertEqual(
repr(s),
"<presence from=<incomplete> to=<incomplete> "
"id=<incomplete> type=<incomplete>>"
)
def test_empty_show_is_equivalent_to_no_show(self):
buf = io.BytesIO(b"<presence xmlns='jabber:client'><show/></presence>")
s = aioxmpp.xml.read_single_xso(buf, stanza.Presence)
self.assertIs(s.show, structs.PresenceShow.NONE)
def test_absent_show(self):
buf = io.BytesIO(b"<presence xmlns='jabber:client'/>")
s = aioxmpp.xml.read_single_xso(buf, stanza.Presence)
self.assertIs(s.show, structs.PresenceShow.NONE)
class TestError(unittest.TestCase):
def test_declare_ns(self):
self.assertDictEqual(
stanza.Error.DECLARE_NS,
{}
)
def test_tag(self):
self.assertEqual(
("jabber:client", "error"),
stanza.Error.TAG)
def test_unknown_child_policy(self):
self.assertIs(
stanza.Error.UNKNOWN_CHILD_POLICY,
xso.UnknownChildPolicy.DROP
)
def test_unknown_attr_policy(self):
self.assertIs(
stanza.Error.UNKNOWN_ATTR_POLICY,
xso.UnknownAttrPolicy.DROP
)
def test_type_attr(self):
self.assertIsInstance(
stanza.Error.type_,
xso.Attr,
)
self.assertEqual(
(None, "type"),
stanza.Error.type_.tag,
)
self.assertIsInstance(
stanza.Error.type_.type_,
xso.EnumCDataType,
)
self.assertIs(
stanza.Error.type_.type_.enum_class,
structs.ErrorType,
)
def test_condition_obj_attr(self):
self.assertIsInstance(
stanza.Error.condition_obj,
xso.Child,
)
self.assertCountEqual(
[
member.xso_class
for member in errors.ErrorCondition
],
stanza.Error.condition_obj._classes,
)
self.assertTrue(stanza.Error.condition_obj.required)
def test_initialises_with_undefined_condition(self):
e = stanza.Error()
self.assertIsInstance(
e.condition_obj,
errors.ErrorCondition.UNDEFINED_CONDITION.xso_class,
)
@unittest.skipIf(aioxmpp.version_info >= (1, 0, 0),
"does not apply to this version of aioxmpp")
def test_init_works_with_tuple(self):
with self.assertWarnsRegex(
DeprecationWarning,
r"as of aioxmpp 1\.0, error conditions must be members of the "
r"aioxmpp\.ErrorCondition enumeration") as ctx:
e = stanza.Error(
errors.ErrorCondition.REMOTE_SERVER_NOT_FOUND.value
)
self.assertEqual(
e.condition,
errors.ErrorCondition.REMOTE_SERVER_NOT_FOUND,
)
self.assertTrue(ctx.filename.endswith("test_stanza.py"))
def test_init_works_with_xso(self):
condition_obj = errors.ErrorCondition.GONE.to_xso()
condition_obj.new_address = "foo"
e = stanza.Error(
condition_obj
)
self.assertIs(e.condition_obj, condition_obj)
def test_condition_reflects_enum_member_of_object_after_init(self):
e = stanza.Error()
self.assertEqual(
errors.ErrorCondition.UNDEFINED_CONDITION,
e.condition,
)
def test_condition_reflects_enum_member_of_object_after_change(self):
e = stanza.Error()
e.condition_obj = errors.ErrorCondition.BAD_REQUEST.xso_class()
self.assertEqual(
errors.ErrorCondition.BAD_REQUEST,
e.condition,
)
def test_setting_condition_replaces_object(self):
e = stanza.Error()
e.condition = errors.ErrorCondition.UNDEFINED_CONDITION
self.assertEqual(
e.condition,
errors.ErrorCondition.UNDEFINED_CONDITION
)
self.assertIsInstance(
e.condition_obj,
errors.ErrorCondition.UNDEFINED_CONDITION.xso_class,
)
def test_setting_condition_keeps_object_if_condition_matches(self):
e = stanza.Error()
old = e.condition_obj
e.condition = errors.ErrorCondition.UNDEFINED_CONDITION
self.assertIs(e.condition_obj, old)
@unittest.skipIf(aioxmpp.version_info >= (1, 0, 0),
"does not apply to this version of aioxmpp")
def test_accepts_tuple_instead_of_enum_for_condition_and_warns(self):
e = stanza.Error()
with self.assertWarnsRegex(
DeprecationWarning,
r"as of aioxmpp 1\.0, error conditions must be members of the "
r"aioxmpp\.ErrorCondition enumeration") as ctx:
e.condition = errors.ErrorCondition.BAD_REQUEST.value
self.assertEqual(
errors.ErrorCondition.BAD_REQUEST,
e.condition,
)
self.assertIsInstance(
e.condition_obj,
errors.ErrorCondition.BAD_REQUEST.xso_class,
)
self.assertTrue(ctx.filename.endswith("test_stanza.py"))
def test_rejects_xso_for_condition(self):
e = stanza.Error()
with self.assertRaises(ValueError):
e.condition = errors.ErrorCondition.BAD_REQUEST.to_xso()
def test_application_condition_attr(self):
self.assertIsInstance(
stanza.Error.application_condition,
xso.Child)
self.assertFalse(stanza.Error.application_condition.required)
def test_from_exception(self):
exc = errors.XMPPWaitError(
condition=errors.ErrorCondition.ITEM_NOT_FOUND,
text="foobar"
)
obj = stanza.Error.from_exception(exc)
self.assertEqual(
structs.ErrorType.WAIT,
obj.type_
)
self.assertEqual(
errors.ErrorCondition.ITEM_NOT_FOUND,
obj.condition
)
self.assertEqual(
"foobar",
obj.text
)
def test_to_exception(self):
types = {
structs.ErrorType.MODIFY: errors.XMPPModifyError,
structs.ErrorType.CANCEL: errors.XMPPCancelError,
structs.ErrorType.AUTH: errors.XMPPAuthError,
structs.ErrorType.WAIT: errors.XMPPWaitError,
structs.ErrorType.CONTINUE: errors.XMPPContinueError,
}
conditions = [
errors.ErrorCondition.BAD_REQUEST,
errors.ErrorCondition.UNDEFINED_CONDITION,
]
texts = [
"foo",
"bar",
None,
]
for (type_name, cls), condition, text in itertools.product(
types.items(),
conditions,
texts):
obj = stanza.Error(
type_=type_name,
condition=condition,
text=text
)
exc = obj.to_exception()
self.assertIsInstance(
exc,
cls
)
self.assertEqual(
condition,
exc.condition
)
self.assertIs(
exc.condition_obj,
obj.condition_obj,
)
self.assertEqual(
text,
exc.text
)
def test_to_exception_with_application_condition(self):
cond = unittest.mock.Mock(["to_exception"])
obj = stanza.Error(
type_=structs.ErrorType.CONTINUE,
condition=errors.ErrorCondition.UNDEFINED_CONDITION
)
obj.application_condition = cond
cond.to_exception.return_value = Exception()
result = obj.to_exception()
self.assertSequenceEqual(
cond.mock_calls,
[
unittest.mock.call.to_exception(obj.type_)
]
)
self.assertEqual(result, cond.to_exception())
def test_to_exception_with_application_condition_only_if_cond_supports(self):
cond = unittest.mock.Mock(["TAG"])
cond.TAG = ("foo", "bar")
obj = stanza.Error(
type_=structs.ErrorType.CONTINUE,
condition=errors.ErrorCondition.UNDEFINED_CONDITION
)
obj.application_condition = cond
result = obj.to_exception()
self.assertIsInstance(
result,
errors.XMPPContinueError
)
self.assertEqual(
result.application_defined_condition,
obj.application_condition,
)
self.assertSequenceEqual(
cond.mock_calls,
[
]
)
def test_override_with_default_exception_if_result_of_app_cond_is_no_exception(self):
cond = unittest.mock.Mock(["to_exception", "TAG"])
cond.TAG = ("foo", "bar")
obj = stanza.Error(
type_=structs.ErrorType.CONTINUE,
condition=errors.ErrorCondition.UNDEFINED_CONDITION
)
obj.application_condition = cond
cond.to_exception.return_value = object()
result = obj.to_exception()
self.assertIsInstance(
result,
errors.XMPPContinueError
)
self.assertSequenceEqual(
cond.mock_calls,
[
unittest.mock.call.to_exception(obj.type_)
]
)
def test_from_exception_with_application_condition(self):
@stanza.Error.as_application_condition
class Foo(xso.XSO):
TAG = ("uri:foo", "test_from_exception_with_application_condition")
obj = Foo()
exc = errors.XMPPAuthError(
errors.ErrorCondition.NOT_AUTHORIZED,
application_defined_condition=obj
)
err = stanza.Error.from_exception(exc)
self.assertIs(
err.application_condition,
obj,
)
def test_repr(self):
obj = stanza.Error()
self.assertEqual(
"<undefined-condition type=<ErrorType.CANCEL: 'cancel'>>",
repr(obj)
)
obj = stanza.Error(
type_=structs.ErrorType.MODIFY,
condition=errors.ErrorCondition.BAD_REQUEST,
text="foobar"
)
self.assertEqual(
"<bad-request type=<ErrorType.MODIFY: 'modify'> text='foobar'>",
repr(obj)
)
def test_as_application_condition(self):
@stanza.Error.as_application_condition
class Foo(xso.XSO):
TAG = ("uri:foo", "test_as_payload_class")
self.assertIn(Foo.TAG, stanza.Error.CHILD_MAP)
self.assertIs(
stanza.Error.CHILD_MAP[Foo.TAG],
stanza.Error.application_condition.xq_descriptor
)
class TestIQ(unittest.TestCase):
def test_inheritance(self):
self.assertTrue(issubclass(
stanza.IQ,
stanza.StanzaBase))
def test_unknown_child_policy(self):
self.assertEqual(
stanza.IQ.UNKNOWN_CHILD_POLICY,
xso.UnknownChildPolicy.FAIL
)
def test_id_attr(self):
self.assertIsInstance(
stanza.IQ.id_,
xso.Attr)
self.assertEqual(
(None, "id"),
stanza.IQ.id_.tag)
def test_tag(self):
self.assertEqual(
("jabber:client", "iq"),
stanza.IQ.TAG)
def test_type_attr(self):
self.assertIsInstance(
stanza.IQ.type_,
xso.Attr
)
self.assertEqual(
(None, "type"),
stanza.IQ.type_.tag
)
self.assertIsInstance(
stanza.IQ.type_.type_,
xso.EnumCDataType
)
self.assertIs(
stanza.IQ.type_.type_.enum_class,
structs.IQType,
)
def test_error(self):
self.assertIsInstance(
stanza.IQ.error,
xso.Child)
def test_payload(self):
self.assertIsInstance(
stanza.IQ.payload,
xso.Child)
self.assertIsNone(stanza.IQ.payload.default)
def test_reject_init_without_type(self):
with self.assertRaisesRegex(TypeError, "type_"):
stanza.IQ()
def test_init(self):
payload = TestPayload()
s = stanza.IQ(
from_=TEST_FROM,
type_=structs.IQType.RESULT,
payload=payload)
self.assertEqual(
TEST_FROM,
s.from_)
self.assertEqual(
structs.IQType.RESULT,
s.type_)
self.assertIs(
payload,
s.payload)
def test_init_error(self):
error = object()
s = stanza.IQ(
from_=TEST_FROM,
type_=structs.IQType.ERROR,
error=error)
self.assertEqual(
structs.IQType.ERROR,
s.type_)
self.assertIs(
error,
s.error)
def test_make_reply(self):
s = stanza.IQ(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.IQType.GET)
r1 = s.make_reply(structs.IQType.ERROR)
self.assertEqual(
s.from_,
r1.to)
self.assertEqual(
s.to,
r1.from_)
self.assertEqual(
s.id_,
r1.id_)
self.assertEqual(
structs.IQType.ERROR,
r1.type_)
def test_make_reply_enforces_request(self):
s = stanza.IQ(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.IQType.ERROR)
with self.assertRaisesRegex(
ValueError,
r"make_reply requires request IQ"):
s.make_reply(unittest.mock.sentinel.type_)
s.type_ = structs.IQType.RESULT
with self.assertRaisesRegex(
ValueError,
r"make_reply requires request IQ"):
s.make_reply(unittest.mock.sentinel.type_)
def test_make_error(self):
e = stanza.Error(
condition=errors.ErrorCondition.BAD_REQUEST
)
s = stanza.IQ(from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.IQType.GET)
r = s.make_error(e)
self.assertIsInstance(r, stanza.IQ)
self.assertEqual(
r.type_,
structs.IQType.ERROR)
self.assertEqual(
TEST_FROM,
r.to)
self.assertEqual(
TEST_TO,
r.from_)
self.assertEqual(
s.id_,
r.id_)
def test_repr(self):
s = stanza.IQ(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.IQType.ERROR)
s.error = stanza.Error()
self.assertEqual(
"<iq from='[email protected]' to='[email protected]'"
" id='someid' type=<IQType.ERROR: 'error'>"
" error=<undefined-condition type=<ErrorType.CANCEL: 'cancel'>>>",
repr(s)
)
s = stanza.IQ(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.IQType.RESULT)
s.payload = TestPayload()
self.assertEqual(
"<iq from='[email protected]' to='[email protected]'"
" id='someid' type=<IQType.RESULT: 'result'>"
" data=foobar>",
repr(s)
)
s = stanza.IQ(
from_=TEST_FROM,
to=TEST_TO,
id_="someid",
type_=structs.IQType.RESULT)
self.assertEqual(
"<iq from='[email protected]' to='[email protected]'"
" id='someid' type=<IQType.RESULT: 'result'>>",
repr(s)
)
def test_repr_works_with_mostly_uninitialised_attributes(self):
s = stanza.IQ.__new__(stanza.IQ)
self.assertEqual(
repr(s),
"<iq from=None to=None id=<unset> type=<unset> "
"error=None data=None>"
)
def test_repr_works_with_incomplete_attributes(self):
s = stanza.IQ.__new__(stanza.IQ)
stanza.IQ.from_.mark_incomplete(s)
stanza.IQ.to.mark_incomplete(s)
stanza.IQ.type_.mark_incomplete(s)
stanza.IQ.id_.mark_incomplete(s)
self.assertEqual(
repr(s),
"<iq from=<incomplete> to=<incomplete> id=<incomplete> type=<incomplete> "
"error=None data=None>"
)
def test__validate_requires_id(self):
iq = stanza.IQ(structs.IQType.GET)
with self.assertRaisesRegex(
ValueError,
"IQ requires ID"):
iq._validate()
def test_as_payload_class(self):
@stanza.IQ.as_payload_class
class Foo(xso.XSO):
TAG = ("uri:foo", "test_as_payload_class")
self.assertIn(Foo.TAG, stanza.IQ.CHILD_MAP)
self.assertIs(
stanza.IQ.CHILD_MAP[Foo.TAG],
stanza.IQ.payload.xq_descriptor
)
def test__validate_rejects_error_without_error(self):
iq = stanza.IQ(structs.IQType.ERROR)
iq.autoset_id()
with self.assertRaisesRegex(
ValueError,
r"IQ with type='error' requires error payload"):
iq._validate()
def test_validate_wraps_exceptions_from__validate(self):
class FooException(Exception):
pass
iq = stanza.IQ(structs.IQType.GET)
with self.assertRaisesRegex(
stanza.StanzaError,
r"invalid IQ stanza"):
iq.validate()
class Testmake_application_error(unittest.TestCase):
def setUp(self):
self._stack_ctx = contextlib.ExitStack()
self._stack = self._stack_ctx.__enter__()
self._as_application_condition = self._stack.enter_context(
unittest.mock.patch.object(stanza.Error,
"as_application_condition")
)
def test_creates_xso_class(self):
Cls = stanza.make_application_error(
"TestError",
("uri:foo", "bar"),
)
self.assertTrue(issubclass(Cls, xso.XSO))
self.assertIsInstance(Cls, xso.model.XMLStreamClass)
self.assertEqual(Cls.TAG, ("uri:foo", "bar"))
self.assertEqual(Cls.__name__, "TestError")
def test_registers_class(self):
Cls = stanza.make_application_error(
"TestError",
("uri:foo", "bar"),
)
self.assertSequenceEqual(
self._as_application_condition.mock_calls,
[
unittest.mock.call(Cls)
]
)
def tearDown(self):
self._stack_ctx.__exit__(None, None, None)
del self._stack
del self._stack_ctx
| lgpl-3.0 | -8,619,957,672,837,083,000 | 27.400787 | 89 | 0.539189 | false |
treyhunner/django-relatives | docs/conf.py | 1 | 8386 | # -*- coding: utf-8 -*-
#
# django-relatives documentation build configuration file, created by
# sphinx-quickstart on Thu May 16 15:28:58 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import re
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
import runtests
import django
django.setup()
project_directory = os.path.join(os.path.basename(__file__), '..')
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-relatives'
copyright = u'2013, Trey Hunner'
parent_dir = os.path.dirname(os.path.dirname(__file__))
def get_version():
with open(os.path.join(parent_dir, 'relatives', '__init__.py')) as f:
return re.search(r'''__version__.*([\d.]+)''', f.read()).group(1)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = get_version()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-relativesdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-relatives.tex', u'django-relatives Documentation',
u'Trey Hunner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-relatives', u'django-relatives Documentation',
[u'Trey Hunner'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-relatives', u'django-relatives Documentation',
u'Trey Hunner', 'django-relatives', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit | 4,974,527,772,359,610,000 | 31.130268 | 80 | 0.704508 | false |
patcamwol/frontendXInterfaces | libsrc/python/input_ports.py | 1 | 7409 | #
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of REDHAWK frontendInterfaces.
#
# REDHAWK frontendInterfaces is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# REDHAWK frontendInterfaces is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
import threading
from redhawk.frontendxInterfaces import FRONTENDX__POA
from redhawk.frontendxInterfaces import FRONTENDX
from redhawk.frontendInterfaces import FRONTEND
import copy
'''provides port(s)'''
class audio_delegation(object):
def getAudioType(id):
raise FRONTEND.NotSupportedException("getAudioType not supported")
def getAudioDeviceControl(id):
raise FRONTEND.NotSupportedException("getAudioDeviceControl not supported")
def getFullBandwidthChannels(id):
raise FRONTEND.NotSupportedException("getFullBandwidthChannels not supported")
def getLowFrequencyEffectChannels(id):
raise FRONTEND.NotSupportedException("getLowFrequencyEffectChannels not supported")
def setAudioEnable(id, enable):
raise FRONTEND.NotSupportedException("setAudioEnable not supported")
def getAudioEnable(id):
raise FRONTEND.NotSupportedException("getAudioEnable not supported")
def setAudioOutputSampleRate(id, sr):
raise FRONTEND.NotSupportedException("setAudioOutputSampleRate not supported")
def getAudioOutputSampleRate(id):
raise FRONTEND.NotSupportedException("getAudioOutputSampleRate not supported")
def getAudioStatus(id):
raise FRONTEND.NotSupportedException("getAudioStatus not supported")
class InFrontendAudioPort(FRONTEND__POA.FrontendAudio):
def __init__(self, name, parent=audio_delegation()):
self.name = name
self.port_lock = threading.Lock()
self.parent = parent
def getAudioType(self, id):
self.port_lock.acquire()
try:
return self.parent.getAudioType(id)
finally:
self.port_lock.release()
def getAudioDeviceControl(self, id):
self.port_lock.acquire()
try:
return self.parent.getAudioDeviceControl(id)
finally:
self.port_lock.release()
def getFullBandwidthChannels(self, id):
self.port_lock.acquire()
try:
return self.parent.getFullBandwidthChannels(id)
finally:
self.port_lock.release()
def getLowFrequencyEffectChannels(self, id):
self.port_lock.acquire()
try:
return self.parent.getLowFrequencyEffectChannels(id)
finally:
self.port_lock.release()
def setAudioEnable(self, id, enable):
self.port_lock.acquire()
try:
return self.parent.setAudioEnable(id,enable)
finally:
self.port_lock.release()
def getAudioEnable(self, id):
self.port_lock.acquire()
try:
return self.parent.getAudioEnable(id)
finally:
self.port_lock.release()
def setAudioOutputSampleRate(self, id, sr):
self.port_lock.acquire()
try:
return self.parent.setAudioOutputSampleRate(id,sr)
finally:
self.port_lock.release()
def getAudioOutputSampleRate(self, id):
self.port_lock.acquire()
try:
return self.parent.getAudioOutputSampleRate(id)
finally:
self.port_lock.release()
def getAudioStatus(self, id):
self.port_lock.acquire()
try:
return self.parent.getAudioStatus(id)
finally:
self.port_lock.release()
class video_delegation(object):
def getVideoType(id):
raise FRONTEND.NotSupportedException("getVideoType not supported")
def getVideoDeviceControl(id):
raise FRONTEND.NotSupportedException("getVideoDeviceControl not supported")
def getChannels(id):
raise FRONTEND.NotSupportedException("getChannels not supported")
def getFrameHeight(id):
raise FRONTEND.NotSupportedException("getFrameHeight not supported")
def getFrameWidth(id):
raise FRONTEND.NotSupportedException("getFrameWidth not supported")
def setVideoEnable(id, enable):
raise FRONTEND.NotSupportedException("setVideoEnable not supported")
def getVideoEnable(id):
raise FRONTEND.NotSupportedException("getVideoEnable not supported")
def setVideoOutputFrameRate(id, fr):
raise FRONTEND.NotSupportedException("setVideoOutputFrameRate not supported")
def getVideoOutputFrameRate(id):
raise FRONTEND.NotSupportedException("getVideoOutputFrameRate not supported")
def getVideoStatus(id):
raise FRONTEND.NotSupportedException("getVideoStatus not supported")
class InFrontendVideoPort(FRONTEND__POA.FrontendVideo):
def __init__(self, name, parent=audio_delegation()):
self.name = name
self.port_lock = threading.Lock()
self.parent = parent
def getVideoType(self, id):
self.port_lock.acquire()
try:
return self.parent.getVideoType(id)
finally:
self.port_lock.release()
def getVideoDeviceControl(self, id):
self.port_lock.acquire()
try:
return self.parent.getVideoDeviceControl(id)
finally:
self.port_lock.release()
def getChannels(self, id):
self.port_lock.acquire()
try:
return self.parent.getChannels(id)
finally:
self.port_lock.release()
def getFrameHeight(self, id):
self.port_lock.acquire()
try:
return self.parent.getFrameHeight(id)
finally:
self.port_lock.release()
def getFrameWidth(self, id):
self.port_lock.acquire()
try:
return self.parent.getFrameWidth(id)
finally:
self.port_lock.release()
def setVideoEnable(self, id, enable):
self.port_lock.acquire()
try:
return self.parent.setVideoEnable(id,enable)
finally:
self.port_lock.release()
def getVideoEnable(self, id):
self.port_lock.acquire()
try:
return self.parent.getVideoEnable(id)
finally:
self.port_lock.release()
def setVideoOutputFrameRate(self, id, fr):
self.port_lock.acquire()
try:
return self.parent.setVideoOutputFrameRate(id,fr)
finally:
self.port_lock.release()
def getVideoOutputFrameRate(self, id):
self.port_lock.acquire()
try:
return self.parent.getVideoOutputFrameRate(id)
finally:
self.port_lock.release()
def getVideoStatus(self, id):
self.port_lock.acquire()
try:
return self.parent.getVideoStatus(id)
finally:
self.port_lock.release()
| lgpl-3.0 | -1,823,136,341,251,483,000 | 33.300926 | 93 | 0.668241 | false |
Azure/azure-sdk-for-python | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2019_06_01/aio/operations/_operations.py | 1 | 4664 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.OperationListResult"]:
"""Gets a list of compute operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2019_06_01.models.OperationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('OperationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.ContainerService/operations'} # type: ignore
| mit | 1,130,047,885,149,254,000 | 43.846154 | 133 | 0.645583 | false |
pbourgel/mybreakout | bounceables.py | 1 | 7962 | ###############################################################################
# #
# bounceables.py #
# #
# Class definitions for obhects off of which the ball can bounce #
# #
# Important classes #
# Wall #
# Paddle #
# Block #
# #
# Note also three rules for the game physics. #
# #
# #
# #
###############################################################################
#If we were to keep this realistic, the only rule in place would be angle
#of incidence equals angle of reflection, but this is video games! Physics
#is just a suggestion!
#AIAR: Angle of incidence equals angle of reflection, or in this
#case, we multiply one of the dx's or dy's by -1
#WARP; adjust according to where the ball collided. If the ball collides with
#the left half of the paddle, it should make the ball move left, and vice versa.
#SPEED: colliding object imparts some speed to the ball when it collides.
#increase the frame rate or scale up the ball movement speed?
#I'm leaving this one to the player. You'll have to refactor
#Ball.process_collision and Paddle.on_collide, and see what adding it does to
#the game.
#CORNER_ENABLED: If the ball hits in a small space designated the corner,
#it will multiply both dx and dy by -1.
from __future__ import division #needed in Python 2.7 so we can divide floats without rounding off
from constants import *
import pygame
#Nothing special here, just a surface to bounce off of.
#All we're concerned with is what axis to reflect on
class Wall():
def __init__(self, x, y, width, height, axis):
self.axis = axis
self.color = BLACK
self.rect = pygame.Rect(x,y,width,height)
def on_collide(self):
return {'AIAR': self.axis}
class Paddle():
def __init__(self, color, x, y):
self.color = color
self.rect = pygame.Rect(y,x, PADDLE_WIDTH, PADDLE_HEIGHT)
self.ul_corner = pygame.Rect(y, x + (BALL_WIDTH - 2), 2, 2)
self.ur_corner = pygame.Rect(y, x + (BALL_WIDTH - 2), 2, 2)
#probably unnecessary abstraction around paddle.rect
def paddle_rect(self):
return self.rect
#handles collision. implements the three main physics rules we want: corner collisions, AIAR, and the warped paddle
def on_collide(self, ball):
if pygame.Rect.colliderect(self.ul_corner,ball.lr_corner) or pygame.Rect.colliderect(self.ur_corner,ball.ll_corner):
return {'CORNER_ENABLED': ''}
#calculate the warp offset by dividing the difference between the middle of the ball and the middle of the paddle by the block width
ball_center = ball.rect.midbottom[0]
warp_offset = (ball_center - self.rect.midtop[0]) / BLOCK_WIDTH
# print 'in on_collide, warp_offset = ' + str(warp_offset)
#the fact that this doesn't handle AIAR collisions the same way might lead to a weird bug where
#you can hold the ball in the paddle as it jiggles around a little. Someone should file a
#ticket in the Github repo about that
return {'WARP': warp_offset,'AIAR': 'y'}
#Handles paddle movement. Checks to make sure the paddle isn't at the left or
#right-hand sides of the screen first
def move_paddle(self,direction):
if direction == '-' and self.rect.x > PADDLE_SPEED and self.rect.x > 0:
self.rect.move_ip((PADDLE_SPEED*-1),0)
self.ul_corner.move_ip((PADDLE_SPEED*-1),0)
self.ur_corner.move_ip((PADDLE_SPEED*-1),0)
elif direction == '+' and (self.rect.x + PADDLE_WIDTH) < SCREENWIDTH:
self.rect.move_ip(PADDLE_SPEED,0)
self.ul_corner.move_ip((PADDLE_SPEED),0)
self.ur_corner.move_ip((PADDLE_SPEED),0)
class Block():
def __init__(self, ccounter, rct):
self.ccounter = ccounter
if self.ccounter == 6:
self.color = CYAN #Yes, this should be indigo. I didn't make it indigo because video games.
elif self.ccounter == 5:
self.color = BLUE
elif self.ccounter == 4:
self.color = GREEN
elif self.ccounter == 3:
self.color = YELLOW
elif self.ccounter == 2:
self.color = ORANGE
elif self.ccounter == 1:
self.color = RED
elif self.ccounter == 0:
self.color = BLACK
#This is the rectangle that gets drawn in the game loop
self.rect = rct
#This is worked out to handle the corner collision rule
self.ul_corner = pygame.Rect(self.rect.y, self.rect.x + (BALL_WIDTH - CORNER_CONSTANT), CORNER_CONSTANT, CORNER_CONSTANT)
self.ll_corner = pygame.Rect(self.rect.y + (BALL_HEIGHT - CORNER_CONSTANT), self.rect.x, CORNER_CONSTANT, CORNER_CONSTANT)
self.ur_corner = pygame.Rect(self.rect.y, self.rect.x + (BALL_WIDTH - CORNER_CONSTANT), CORNER_CONSTANT, CORNER_CONSTANT)
self.lr_corner = pygame.Rect(self.rect.y + (BALL_HEIGHT - CORNER_CONSTANT), self.rect.x + (BALL_WIDTH - CORNER_CONSTANT), CORNER_CONSTANT, CORNER_CONSTANT)
#return CORNER_ENABLED if it hit the corner squares
#else we just do straight angle of incidence equals angle of reflection
#to figure out which axis to reflect the ball on, calculate the ball's position
#one dx in the opposite direction. If that collides with the block, then it means
#we need to go dy y coordinates back, else we reflect on the x axis.
#This comes with one constraint
#1. We can't let the ball move so far into the block before calling on_collide that it screws with the logic, so we constrain BALL_SPEED to +- the smallest dimension of the block.
#Hey, here's an idea: In some Breakout/Arkanoid games, they play a short
#beep whenever a collision occurs. If a bunch of collisions happen in
#close succession, it's like wind chimes. Maybe you should add that.
def on_collide(self,ball):
self.ccounter-=1
self.change_color()
if pygame.Rect.colliderect(self.ll_corner, ball.ur_corner) or pygame.Rect.colliderect(self.ul_corner,ball.lr_corner) or pygame.Rect.colliderect(self.ur_corner, ball.ll_corner) or pygame.Rect.colliderect(self.lr_corner,ball.ul_corner):
return {'CORNER_ENABLED': ''}
else:
if self.rect.colliderect(ball.rect.move(ball.dx*-1,ball.dy)):
return {'AIAR': 'y'}
else:
return {'AIAR': 'x'}
#Changes the brick color in response to a collision.
def change_color(self):
if self.ccounter == 5:
self.color = BLUE
elif self.ccounter == 4:
self.color = GREEN
elif self.ccounter == 3:
self.color = YELLOW
elif self.ccounter == 2:
self.color = ORANGE
elif self.ccounter == 1:
self.color = RED
elif self.ccounter == 0:
self.color = BLACK
#Intentionally leaving this blank
#Hey, I gotta leave something for other people to build on, right?
#You might want to create a separate powerup class for this
# def drop_powerup(self):
# pass ballrect.move(0,1)
| gpl-3.0 | -7,460,730,442,292,494,000 | 49.713376 | 242 | 0.568325 | false |
cymplecy/codebug_tether | setup.py | 1 | 1209 | import sys
from distutils.core import setup
VERSION_FILE = 'codebug_tether/version.py'
PY3 = sys.version_info[0] >= 3
def get_version():
if PY3:
version_vars = {}
with open(VERSION_FILE) as f:
code = compile(f.read(), VERSION_FILE, 'exec')
exec(code, None, version_vars)
return version_vars['__version__']
else:
execfile(VERSION_FILE)
return __version__
setup(
name='codebug_tether',
version=get_version(),
description='Control CodeBug over Serial USB.',
author='Thomas Preston',
author_email='[email protected]',
license='GPLv3+',
url='https://github.com/codebugtools/codebug_tether',
packages=['codebug_tether'],
long_description=open('README.md').read() + open('CHANGELOG').read(),
classifiers=[
"License :: OSI Approved :: GNU Affero General Public License v3 or "
"later (AGPLv3+)",
"Programming Language :: Python :: 3",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='codebug tether raspberrypi openlx',
)
| gpl-3.0 | -6,195,292,297,385,302,000 | 29.225 | 77 | 0.621175 | false |
uclouvain/osis | ddd/logic/learning_unit/domain/model/_titles.py | 1 | 2084 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import attr
from osis_common.ddd import interface
@attr.s(frozen=True, slots=True)
class Titles(interface.ValueObject):
common_fr = attr.ib(type=str)
specific_fr = attr.ib(type=str)
common_en = attr.ib(type=str)
specific_en = attr.ib(type=str)
@property
def complete_fr(self) -> str:
if self.common_fr and self.specific_fr:
return self.common_fr + " - " + self.specific_fr
elif self.common_fr:
return self.common_fr
else:
return self.specific_fr
@property
def complete_en(self) -> str:
if self.common_en and self.specific_en:
return self.common_en + " - " + self.specific_en
elif self.common_en:
return self.common_en
else:
return self.specific_en
| agpl-3.0 | 9,158,577,290,105,660,000 | 37.574074 | 87 | 0.6265 | false |
FreeOpcUa/python-opcua | examples/client_to_prosys.py | 1 | 2365 | import sys
sys.path.insert(0, "..")
import time
import logging
from opcua import Client
from opcua import ua
class SubHandler(object):
"""
Client to subscription. It will receive events from server
"""
def datachange_notification(self, node, val, data):
print("Python: New data change event", node, val)
def event_notification(self, event):
print("Python: New event", event)
if __name__ == "__main__":
#from IPython import embed
logging.basicConfig(level=logging.DEBUG)
client = Client("opc.tcp://localhost:53530/OPCUA/SimulationServer/")
#client = Client("opc.tcp://olivier:olivierpass@localhost:53530/OPCUA/SimulationServer/")
#client.set_security_string("Basic256Sha256,SignAndEncrypt,certificate-example.der,private-key-example.pem")
try:
client.connect()
root = client.get_root_node()
print("Root is", root)
print("childs of root are: ", root.get_children())
print("name of root is", root.get_browse_name())
objects = client.get_objects_node()
print("childs og objects are: ", objects.get_children())
myfloat = client.get_node("ns=4;s=Float")
mydouble = client.get_node("ns=4;s=Double")
myint64 = client.get_node("ns=4;s=Int64")
myuint64 = client.get_node("ns=4;s=UInt64")
myint32 = client.get_node("ns=4;s=Int32")
myuint32 = client.get_node("ns=4;s=UInt32")
var = client.get_node(ua.NodeId("Random1", 5))
print("var is: ", var)
print("value of var is: ", var.get_value())
var.set_value(ua.Variant([23], ua.VariantType.Double))
print("setting float value")
myfloat.set_value(ua.Variant(1.234, ua.VariantType.Float))
print("reading float value: ", myfloat.get_value())
handler = SubHandler()
sub = client.create_subscription(500, handler)
handle = sub.subscribe_data_change(var)
device = objects.get_child(["2:MyObjects", "2:MyDevice"])
method = device.get_child("2:MyMethod")
result = device.call_method(method, ua.Variant("sin"), ua.Variant(180, ua.VariantType.Double))
print("Mehtod result is: ", result)
#embed()
time.sleep(3)
sub.unsubscribe(handle)
sub.delete()
#client.close_session()
finally:
client.disconnect()
| lgpl-3.0 | 5,179,125,964,854,195,000 | 34.298507 | 112 | 0.629598 | false |
naoyat/latin | latin/latindic.py | 1 | 1985 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import latin_noun
import latin_pronoun
import latin_adj
import latin_conj
import latin_prep
import latin_verb_reg
import latin_verb_irreg
import util
class LatinDic:
dic = {}
auto_macron_mode = False
def flatten(text):
return text.replace(u'ā',u'a').replace(u'ē',u'e').replace(u'ī',u'i').replace(u'ō',u'o').replace(u'ū',u'u').replace(u'ȳ',u'y').lower()
def register(surface, info):
if not info.has_key('pos'): return
if LatinDic.auto_macron_mode:
surface = flatten(surface)
if LatinDic.dic.has_key(surface):
LatinDic.dic[surface].append(info)
else:
LatinDic.dic[surface] = [info]
def register_items(items):
for item in items:
register(item['surface'], item)
def lookup(word):
return LatinDic.dic.get(word, None)
def dump():
for k, v in LatinDic.dic.items():
print util.render2(k, v)
def load_def(file, tags={}):
items = []
with open(file, 'r') as fp:
for line in fp:
if len(line) == 0: continue
if line[0] == '#': continue
fs = line.rstrip().split('\t')
if len(fs) < 3: continue
surface = fs[0].decode('utf-8')
pos = fs[1]
ja = fs[2]
items.append(util.aggregate_dicts({'surface':surface, 'pos':pos, 'ja':ja}, tags))
return items
def load(auto_macron_mode=False):
LatinDic.auto_macron_mode = auto_macron_mode
items = []
items += latin_noun.load()
items += latin_pronoun.load()
items += latin_adj.load()
items += latin_conj.load()
items += latin_prep.load()
items += latin_verb_reg.load()
items += latin_verb_irreg.load()
items += load_def('words/adv.def', {'pos':'adv'})
items += load_def('words/other.def')
register_items(items)
# return ld
if __name__ == '__main__':
# for k, v in dic.items():
# print util.render(k), util.render(v)
pass
| mit | 3,132,495,049,686,143,000 | 20.053191 | 137 | 0.583123 | false |
hfaran/ubc-timetabler | timetabler/util.py | 1 | 3085 | from __future__ import division
from math import sqrt
#############
# Constants #
#############
DAY_LIST = ["Mon", "Tue", "Wed", "Thu", "Fri"]
###########
# Helpers #
###########
# General
def chunks(l, n):
"""Yields successive ``n``-sized chunks from ``l``
http://stackoverflow.com/a/312464/1798683
"""
for i in xrange(0, len(l), n):
yield l[i:i + n]
def check_equal(iterable):
"""Check equivalency or all items in ``iterable``
>>> check_equal(xrange(5))
False
>>> check_equal([1, 1, 1])
True
>>> check_equal([1, 2, 1])
False
"""
iterable = iter(iterable)
first = next(iterable)
return all(first == i for i in iterable)
def check_diff(iterable):
"""Returns true if any items in ``iterable`` differ
>>> check_diff([1, 1])
False
>>> check_diff([1, 2])
True
>>> check_diff(xrange(5))
True
"""
iterable = iter(iterable)
first = next(iterable)
return any(first != i for i in iterable)
def all_unique(x):
"""Check if all items in ``x`` are unique
http://stackoverflow.com/a/5281641/1798683
"""
seen = set()
return not any(i in seen or seen.add(i) for i in x)
def stddev(lst):
"""Calculate **population** (not sample) standard deviation of ``lst``
:type lst: list
:param lst: List of numbers
:returns: standard deviation of ``lst``
:rtype: float
>>> act = stddev([13,25,46,255,55])
>>> exp = 89.34517334
>>> abs(act - exp) < 1E-6
True
"""
points = len(lst)
mean = sum(lst)/points
variance = sum((i - mean)**2 for i in lst)/points
return sqrt(variance)
def setup_root_logger(log_level='INFO'):
import logging
import sys
root = logging.getLogger()
root.setLevel(getattr(logging, log_level))
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
# timetabler-specific helpers
def strtime2num(s):
"""Turns ``s`` like "09:00" to 9.5"""
t = s.split(":")
t = map(int, t)
if t[1] == 30:
return t[0] + 0.5
else:
return t[0]
def iter_time(start, end):
"""Returns an iterator that gives a range of half-hourly time
from ``start`` (inclusive) to ``end`` (exclusive)
>>> list(iter_time("09:00", "12:30"))
['09:00', '09:30', '10:00', '10:30', '11:00', '11:30', '12:00']
"""
def time2tuple(t):
return tuple(map(int, t.split(":")))
def tuple2time(t):
return ":".join([str(i).zfill(2) for i in t])
current = start
while current < end:
# Put yield at the time because we do inclusive start, exclusive stop
yield current
_current = time2tuple(current)
if _current[1] == 30:
_current = (_current[0] + 1, 0)
else:
_current = (_current[0], 30)
current = tuple2time(_current)
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit | -6,416,785,595,618,662,000 | 21.035714 | 89 | 0.561426 | false |
SUNET/eduid-webapp | src/eduid_webapp/email/settings/common.py | 1 | 2190 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2013-2016 NORDUnet A/S
# Copyright (c) 2019 SUNET
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# 3. Neither the name of the NORDUnet nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
Configuration (file) handling for the eduID email app.
"""
from eduid_common.config.base import EduIDBaseAppConfig, MagicCookieMixin, AmConfigMixin, MailConfigMixin
class EmailConfig(EduIDBaseAppConfig, MagicCookieMixin, AmConfigMixin, MailConfigMixin):
"""
Configuration for the email app
"""
eduid_site_name: str
eduid_site_url: str
app_name: str = 'email'
email_verification_timeout: int = 86400 # seconds
throttle_resend_seconds: int = 300
email_verify_redirect_url: str = '/profile/emails'
| bsd-3-clause | -3,903,735,646,256,367,600 | 39.555556 | 105 | 0.743836 | false |
cpenner461/tellmewhen | tmw/server.py | 1 | 4361 | '''
Built-in web server using Flask. Should mirror functionality offered by the
cli.
'''
from flask import Flask
from flask import render_template, request, session
import tmw.config as config
import tmw.core as core
import json
from uuid import uuid4
from multiprocessing import Pool
app = Flask(__name__)
pool = Pool(processes=2)
jobs = []
@app.route('/', methods = ["POST", "GET"])
def index():
'''The main landing page and UI for tmw'''
if request.method == "GET":
return render_template('index.html', jobs=jobs)
else:
url = request.form.get('url')
freq = int(request.form.get('frequency'))
num_checks = int(request.form.get('num_checks'))
check_type = request.form.get('check_type')
value = None
if check_type == 'status_code':
value = request.form.get('status_code')
elif check_type == 'string_match' or check_type == 'regex_match':
value = request.form.get('string_match')
check_results = None
total_checks = None
index = None
def _handle_results(results):
(check_results, total_checks, index) = results
jobs[index]['status'] = "success" if check_results else "failure"
job = pool.apply_async(
core.check_until,
(url, check_type, value, freq, num_checks, len(jobs)),
callback=_handle_results
)
jobs.append({ 'url': url, 'value': value, 'status': 'pending' })
return render_template('index.html', jobs=jobs, success=True)
@app.route('/_job_status')
def _job_status():
return json.dumps(jobs)
@app.route('/hello')
def hello():
'''Simple page useful for testing/validating your tmw setup'''
return render_template('hello.html')
@app.route('/settings', methods = ["POST", "GET"])
def settings():
'''Settings page'''
status = None
if request.method == "POST":
f = request.form
conf = config.load_config()
_set_config_param(conf, 'smtp', 'username', f)
_set_config_param(conf, 'smtp', 'sender', f)
_set_config_param(conf, 'smtp', 'recipients', f)
_set_config_param(conf, 'smtp', 'server', f)
_set_config_param(conf, 'smtp', 'port', f, number = True)
_set_config_param(conf, 'slack', 'username', f)
_set_config_param(conf, 'slack', 'channel', f, prefix = "#")
config.write_config(conf)
settings = config
status = "success"
else:
conf = config.load_config()
settings = {}
settings['smtp-username'] = _get_config_param(conf, 'smtp', 'username')
settings['smtp-sender'] = _get_config_param(conf, 'smtp', 'sender')
settings['smtp-recipients'] = _get_config_param(conf, 'smtp', 'recipients')
settings['smtp-server'] = _get_config_param(conf, 'smtp', 'server')
settings['smtp-port'] = _get_config_param(conf, 'smtp', 'port')
settings['slack-username'] = _get_config_param(conf, 'slack', 'username')
settings['slack-channel'] = _get_config_param(conf, 'slack', 'channel')
return render_template('settings.html', status=status, settings=settings)
def _set_config_param(conf, service, param, form, number = False, prefix = ""):
if not conf.get(service):
conf[service] = {}
if not conf[service].get(param):
conf[service][param] = None
value = form.get('%s-%s' % (service, param))
if value:
value = prefix + value
if number and value:
value = int(value)
conf[service][param] = value if value else conf[service][param]
def _get_config_param(conf, service, param):
if not conf.get(service):
conf[service] = {}
if not conf[service].get(param):
conf[service][param] = None
return conf[service][param]
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop('_csrf_token', None)
if not token or str(token) != request.form.get('_csrf_token'):
abort(403)
def _generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = uuid4()
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = _generate_csrf_token
#Remove this later
@app.route('/email-notification')
def the_path():
return render_template('email-notification.html')
| mit | -1,459,745,616,760,489,200 | 30.601449 | 83 | 0.608347 | false |
googleapis/googleapis-gen | google/cloud/aiplatform/v1beta1/aiplatform-v1beta1-py/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py | 1 | 26691 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api_core import operation as gac_operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.aiplatform_v1beta1.services.index_service import pagers
from google.cloud.aiplatform_v1beta1.types import deployed_index_ref
from google.cloud.aiplatform_v1beta1.types import index
from google.cloud.aiplatform_v1beta1.types import index as gca_index
from google.cloud.aiplatform_v1beta1.types import index_service
from google.cloud.aiplatform_v1beta1.types import operation as gca_operation
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import struct_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import IndexServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import IndexServiceGrpcAsyncIOTransport
from .client import IndexServiceClient
class IndexServiceAsyncClient:
"""A service for creating and managing Vertex AI's Index
resources.
"""
_client: IndexServiceClient
DEFAULT_ENDPOINT = IndexServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = IndexServiceClient.DEFAULT_MTLS_ENDPOINT
index_path = staticmethod(IndexServiceClient.index_path)
parse_index_path = staticmethod(IndexServiceClient.parse_index_path)
index_endpoint_path = staticmethod(IndexServiceClient.index_endpoint_path)
parse_index_endpoint_path = staticmethod(IndexServiceClient.parse_index_endpoint_path)
common_billing_account_path = staticmethod(IndexServiceClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(IndexServiceClient.parse_common_billing_account_path)
common_folder_path = staticmethod(IndexServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(IndexServiceClient.parse_common_folder_path)
common_organization_path = staticmethod(IndexServiceClient.common_organization_path)
parse_common_organization_path = staticmethod(IndexServiceClient.parse_common_organization_path)
common_project_path = staticmethod(IndexServiceClient.common_project_path)
parse_common_project_path = staticmethod(IndexServiceClient.parse_common_project_path)
common_location_path = staticmethod(IndexServiceClient.common_location_path)
parse_common_location_path = staticmethod(IndexServiceClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
IndexServiceAsyncClient: The constructed client.
"""
return IndexServiceClient.from_service_account_info.__func__(IndexServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
IndexServiceAsyncClient: The constructed client.
"""
return IndexServiceClient.from_service_account_file.__func__(IndexServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> IndexServiceTransport:
"""Returns the transport used by the client instance.
Returns:
IndexServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(type(IndexServiceClient).get_transport_class, type(IndexServiceClient))
def __init__(self, *,
credentials: ga_credentials.Credentials = None,
transport: Union[str, IndexServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the index service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.IndexServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = IndexServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def create_index(self,
request: index_service.CreateIndexRequest = None,
*,
parent: str = None,
index: gca_index.Index = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates an Index.
Args:
request (:class:`google.cloud.aiplatform_v1beta1.types.CreateIndexRequest`):
The request object. Request message for
[IndexService.CreateIndex][google.cloud.aiplatform.v1beta1.IndexService.CreateIndex].
parent (:class:`str`):
Required. The resource name of the Location to create
the Index in. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
index (:class:`google.cloud.aiplatform_v1beta1.types.Index`):
Required. The Index to create.
This corresponds to the ``index`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Index` A representation of a collection of database items organized in a way that
allows for approximate nearest neighbor (a.k.a ANN)
algorithms search.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, index])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = index_service.CreateIndexRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if index is not None:
request.index = index
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_index,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_index.Index,
metadata_type=index_service.CreateIndexOperationMetadata,
)
# Done; return the response.
return response
async def get_index(self,
request: index_service.GetIndexRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> index.Index:
r"""Gets an Index.
Args:
request (:class:`google.cloud.aiplatform_v1beta1.types.GetIndexRequest`):
The request object. Request message for
[IndexService.GetIndex][google.cloud.aiplatform.v1beta1.IndexService.GetIndex]
name (:class:`str`):
Required. The name of the Index resource. Format:
``projects/{project}/locations/{location}/indexes/{index}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Index:
A representation of a collection of
database items organized in a way that
allows for approximate nearest neighbor
(a.k.a ANN) algorithms search.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = index_service.GetIndexRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_index,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_indexes(self,
request: index_service.ListIndexesRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListIndexesAsyncPager:
r"""Lists Indexes in a Location.
Args:
request (:class:`google.cloud.aiplatform_v1beta1.types.ListIndexesRequest`):
The request object. Request message for
[IndexService.ListIndexes][google.cloud.aiplatform.v1beta1.IndexService.ListIndexes].
parent (:class:`str`):
Required. The resource name of the Location from which
to list the Indexes. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.index_service.pagers.ListIndexesAsyncPager:
Response message for
[IndexService.ListIndexes][google.cloud.aiplatform.v1beta1.IndexService.ListIndexes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = index_service.ListIndexesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_indexes,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListIndexesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_index(self,
request: index_service.UpdateIndexRequest = None,
*,
index: gca_index.Index = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates an Index.
Args:
request (:class:`google.cloud.aiplatform_v1beta1.types.UpdateIndexRequest`):
The request object. Request message for
[IndexService.UpdateIndex][google.cloud.aiplatform.v1beta1.IndexService.UpdateIndex].
index (:class:`google.cloud.aiplatform_v1beta1.types.Index`):
Required. The Index which updates the
resource on the server.
This corresponds to the ``index`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
The update mask applies to the resource. For the
``FieldMask`` definition, see
[google.protobuf.FieldMask][google.protobuf.FieldMask].
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Index` A representation of a collection of database items organized in a way that
allows for approximate nearest neighbor (a.k.a ANN)
algorithms search.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([index, update_mask])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = index_service.UpdateIndexRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if index is not None:
request.index = index
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_index,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("index.name", request.index.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_index.Index,
metadata_type=index_service.UpdateIndexOperationMetadata,
)
# Done; return the response.
return response
async def delete_index(self,
request: index_service.DeleteIndexRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes an Index. An Index can only be deleted when all its
[DeployedIndexes][google.cloud.aiplatform.v1beta1.Index.deployed_indexes]
had been undeployed.
Args:
request (:class:`google.cloud.aiplatform_v1beta1.types.DeleteIndexRequest`):
The request object. Request message for
[IndexService.DeleteIndex][google.cloud.aiplatform.v1beta1.IndexService.DeleteIndex].
name (:class:`str`):
Required. The name of the Index resource to be deleted.
Format:
``projects/{project}/locations/{location}/indexes/{index}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = index_service.DeleteIndexRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_index,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-aiplatform",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"IndexServiceAsyncClient",
)
| apache-2.0 | -7,461,925,922,549,992,000 | 41.165877 | 185 | 0.616762 | false |
Magicked/crits | crits/events/event.py | 4 | 3783 | import uuid
try:
from django_mongoengine import Document
except ImportError:
from mongoengine import Document
from mongoengine import StringField, UUIDField, BooleanField
from mongoengine import EmbeddedDocument
from django.conf import settings
from crits.core.crits_mongoengine import CritsBaseAttributes
from crits.core.crits_mongoengine import CritsSourceDocument
from crits.core.crits_mongoengine import CommonAccess, CritsDocumentFormatter
from crits.core.crits_mongoengine import CritsActionsDocument
from crits.events.migrate import migrate_event
from crits.vocabulary.events import EventTypes
class UnreleasableEventError(Exception):
"""
Exception for attempting to release an event relationship that is
unreleasable.
"""
def __init__(self, value, **kwargs):
self.message = "Relationship %s cannot be released to the event's \
releasability list." % value
super(UnreleasableEventError, self).__init__(**kwargs)
def __str__(self):
return repr(self.message)
class Event(CritsBaseAttributes, CritsSourceDocument, CritsActionsDocument,
Document):
"""
Event class.
"""
meta = {
"collection": settings.COL_EVENTS,
"auto_create_index": False,
"crits_type": 'Event',
"latest_schema_version": 3,
"schema_doc": {
'title': 'Title of this event',
'event_id': 'Unique event ID',
'event_type': 'Type of event based on Event Type options',
'description': 'Description of the event',
'source': ('List [] of sources who provided information about this'
' event')
},
"jtable_opts": {
'details_url': 'crits-events-views-view_event',
'details_url_key': 'id',
'default_sort': "created DESC",
'searchurl': 'crits-events-views-events_listing',
'fields': [ "title", "event_type", "created",
"source", "campaign", "status", "id"],
'jtopts_fields': [ "details",
"title",
"event_type",
"created",
"source",
"campaign",
"status",
"favorite",
"id"],
'hidden_fields': [],
'linked_fields': ["source", "campaign", "event_type"],
'details_link': 'details',
'no_sort': ['details']
}
}
title = StringField(required=True)
event_type = StringField(required=True)
# description also exists in CritsBaseAttributes, but this one is required.
description = StringField(required=True)
event_id = UUIDField(binary=True, required=True, default=uuid.uuid4)
def set_event_type(self, event_type):
"""
Set the Event Type.
:param event_type: The event type to set (must exist in DB).
:type event_type: str
"""
if event_type in EventTypes.values():
self.event_type = event_type
def migrate(self):
"""
Migrate to the latest schema version.
"""
migrate_event(self)
class EventAccess(EmbeddedDocument, CritsDocumentFormatter, CommonAccess):
"""
ACL for Events.
"""
add_sample = BooleanField(default=False)
title_edit = BooleanField(default=False)
type_edit = BooleanField(default=False)
| mit | 5,491,923,777,403,173,000 | 33.081081 | 79 | 0.545863 | false |
mitsei/dlkit | dlkit/json_/logging_/record_templates.py | 1 | 2399 | """JSON implementations of logging records."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from .. import utilities
from ..osid import records as osid_records
from dlkit.abstract_osid.logging_ import records as abc_logging_records
class LogEntryRecord(abc_logging_records.LogEntryRecord, osid_records.OsidRecord):
"""A record for a ``LogEntry``.
The methods specified by the record type are available through the
underlying object.
"""
class LogEntryQueryRecord(abc_logging_records.LogEntryQueryRecord, osid_records.OsidRecord):
"""A record for a ``LoglEntryQuery``.
The methods specified by the record type are available through the
underlying object.
"""
class LogEntryFormRecord(abc_logging_records.LogEntryFormRecord, osid_records.OsidRecord):
"""A record for a ``LogEntryForm``.
The methods specified by the record type are available through the
underlying object.
"""
class LogEntrySearchRecord(abc_logging_records.LogEntrySearchRecord, osid_records.OsidRecord):
"""A record for a ``LogEntrySearch``.
The methods specified by the record type are available through the
underlying object.
"""
class LogRecord(abc_logging_records.LogRecord, osid_records.OsidRecord):
"""A record for a ``Log``.
The methods specified by the record type are available through the
underlying object.
"""
class LogQueryRecord(abc_logging_records.LogQueryRecord, osid_records.OsidRecord):
"""A record for a ``LogQuery``.
The methods specified by the record type are available through the
underlying object.
"""
class LogFormRecord(abc_logging_records.LogFormRecord, osid_records.OsidRecord):
"""A record for a ``LogForm``.
The methods specified by the record type are available through the
underlying object.
"""
class LogSearchRecord(abc_logging_records.LogSearchRecord, osid_records.OsidRecord):
"""A record for a ``LogSearch``.
The methods specified by the record type are available through the
underlying object.
"""
| mit | 6,620,585,658,661,614,000 | 26.574713 | 94 | 0.734473 | false |
autopower/thermeq3 | obsolete/lib/dummy.py | 1 | 1621 | import thermeq3
import datetime
def add_dummy(status):
"""
:param status: is window open?
:return: nothing
"""
# valves = {valve_adr: [valve_pos, valve_temp, valve_curtemp, valve_name]}
# rooms = {id : [room_name, room_address, is_win_open, curr_temp, average valve position]}
# devices = {addr: [type, serial, name, room, OW, OW_time, status, info, temp offset]}
thermeq3.t3.eq3.rooms.update({"99": ["Dummy room", "DeadBeefValve", False, 22.0, 22]})
thermeq3.t3.eq3.devices.update({"DeadBeefWindow": [4, "IHADBW", "Dummy window", 99, 0,
datetime.datetime(2016, 01, 01, 12, 00, 00), 18, 16, 7]})
thermeq3.t3.eq3.devices.update({"DeadBeefValve": [1, "IHADBV", "Dummy valve", 99, 0,
datetime.datetime(2016, 01, 01, 12, 00, 00), 18, 56, 7]})
thermeq3.t3.eq3.valves.update({"DeadBeefValve": [20, 22.0, 22.0, "Dummy valve"]})
# TBI open/closed window
if status:
thermeq3.t3.eq3.devices["DeadBeefWindow"][4] = 2
thermeq3.t3.eq3.devices["DeadBeefWindow"][5] = \
datetime.datetime.now() - \
datetime.timedelta(seconds=((thermeq3.t3.eq3.ignore_time + 10) * 60))
thermeq3.t3.eq3.rooms["99"][2] = True
else:
thermeq3.t3.eq3.devices["DeadBeefWindow"][4] = 0
thermeq3.t3.eq3.rooms["99"][2] = False
def remove_dummy():
del thermeq3.t3.eq3.rooms["99"]
del thermeq3.t3.eq3.valves["DeadBeefValve"]
del thermeq3.t3.eq3.devices["DeadBeefWindow"]
del thermeq3.t3.eq3.devices["DeadBeefValve"]
| gpl-3.0 | 619,798,513,470,130,000 | 45.314286 | 112 | 0.592844 | false |
colloquium/spacewalk | client/solaris/smartpm/smart/channels/rpm_md.py | 1 | 5464 | #
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <[email protected]>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.backends.rpm.metadata import RPMMetaDataLoader
from smart.util.filetools import getFileDigest
from smart.util.elementtree import ElementTree
from smart.const import SUCCEEDED, FAILED, NEVER, ALWAYS
from smart.channel import PackageChannel
from smart import *
import posixpath
NS = "{http://linux.duke.edu/metadata/repo}"
DATA = NS+"data"
LOCATION = NS+"location"
CHECKSUM = NS+"checksum"
OPENCHECKSUM = NS+"open-checksum"
class RPMMetaDataChannel(PackageChannel):
def __init__(self, baseurl, *args):
super(RPMMetaDataChannel, self).__init__(*args)
self._baseurl = baseurl
def getCacheCompareURLs(self):
return [posixpath.join(self._baseurl, "repodata/repomd.xml")]
def getFetchSteps(self):
return 3
def fetch(self, fetcher, progress):
fetcher.reset()
repomd = posixpath.join(self._baseurl, "repodata/repomd.xml")
item = fetcher.enqueue(repomd)
fetcher.run(progress=progress)
if item.getStatus() is FAILED:
progress.add(self.getFetchSteps()-1)
if fetcher.getCaching() is NEVER:
lines = [_("Failed acquiring release file for '%s':") % self,
u"%s: %s" % (item.getURL(), item.getFailedReason())]
raise Error, "\n".join(lines)
return False
digest = getFileDigest(item.getTargetPath())
if digest == self._digest:
progress.add(1)
return True
self.removeLoaders()
info = {}
root = ElementTree.parse(item.getTargetPath()).getroot()
for node in root.getchildren():
if node.tag != DATA:
continue
type = node.get("type")
info[type] = {}
for subnode in node.getchildren():
if subnode.tag == LOCATION:
info[type]["url"] = \
posixpath.join(self._baseurl, subnode.get("href"))
if subnode.tag == CHECKSUM:
info[type][subnode.get("type")] = subnode.text
if subnode.tag == OPENCHECKSUM:
info[type]["uncomp_"+subnode.get("type")] = \
subnode.text
if "primary" not in info:
raise Error, _("Primary information not found in repository "
"metadata for '%s'") % self
fetcher.reset()
item = fetcher.enqueue(info["primary"]["url"],
md5=info["primary"].get("md5"),
uncomp_md5=info["primary"].get("uncomp_md5"),
sha=info["primary"].get("sha"),
uncomp_sha=info["primary"].get("uncomp_sha"),
uncomp=True)
flitem = fetcher.enqueue(info["filelists"]["url"],
md5=info["filelists"].get("md5"),
uncomp_md5=info["filelists"].get("uncomp_md5"),
sha=info["filelists"].get("sha"),
uncomp_sha=info["filelists"].get("uncomp_sha"),
uncomp=True)
fetcher.run(progress=progress)
if item.getStatus() == SUCCEEDED and flitem.getStatus() == SUCCEEDED:
localpath = item.getTargetPath()
filelistspath = flitem.getTargetPath()
loader = RPMMetaDataLoader(localpath, filelistspath,
self._baseurl)
loader.setChannel(self)
self._loaders.append(loader)
elif (item.getStatus() == SUCCEEDED and
flitem.getStatus() == FAILED and
fetcher.getCaching() is ALWAYS):
iface.warning(_("You must fetch channel information to "
"acquire needed filelists."))
return False
elif fetcher.getCaching() is NEVER:
lines = [_("Failed acquiring information for '%s':") % self,
u"%s: %s" % (item.getURL(), item.getFailedReason())]
raise Error, "\n".join(lines)
else:
return False
self._digest = digest
return True
def create(alias, data):
return RPMMetaDataChannel(data["baseurl"],
data["type"],
alias,
data["name"],
data["manual"],
data["removable"],
data["priority"])
# vim:ts=4:sw=4:et
| gpl-2.0 | -5,483,863,477,960,673,000 | 38.594203 | 80 | 0.554539 | false |
DTU-ELMA/teaching-games | pay_as_bid/python/init.py | 1 | 8764 | import random, csv
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from itertools import chain,cycle,islice
def roundrobin(*iterables):
"roundrobin('ABC', 'D', 'EF') --> A D E B F C"
# Recipe credited to George Sakkis
pending = len(iterables)
nexts = cycle(iter(it).next for it in iterables)
while pending:
try:
for next in nexts:
yield next()
except StopIteration:
pending -= 1
nexts = cycle(islice(nexts, pending))
def steppify(x,y):
sx = roundrobin(chain([0],x),x)
sy = roundrobin(y,chain(y,[y[-1]]))
return list(sx), list(sy)
class Market:
def __init__(self,bidfile = '../php/bids.txt'):
self.players = {}
self._playerlist = set()
self.bidfile = bidfile
def update(self):
self.load_latest_bids()
self.plot()
def load_latest_bids(self):
for ID,name,bid in self.readfile():
if ID in self._playerlist:
self.players[ID].setbid(float(bid))
self.schedule_production()
price = self.get_current_pay_as_bid_price()
for p in self.players.itervalues():
p.push_bid_and_profit(price)
self.papricelist.append(price)
self.write_stats_file()
def load_first_bids(self):
for ID,name,bid in self.readfile():
self.players[ID] = Player(ID,name)
self.players[ID].setbid(float(bid))
self._playerlist.add(ID)
self.nplayers = len(self._playerlist)
# Set demand so there is a 10% chance of using the large power plant
self.demand = 10*self.nplayers - 5*1.28*0.8165*np.sqrt(self.nplayers)
self.schedule_production()
curprice = self.get_current_pay_as_bid_price()
for p in self.players.itervalues():
p.push_bid_and_profit(curprice)
self.papricelist = [curprice]
self.write_stats_file()
def readfile(self):
return csv.reader(open(self.bidfile))
def schedule_production(self):
x = 0.0
pids = {pid:self.players[pid].curbid for pid in self._playerlist}
pids = sorted(pids.keys(), key=pids.get)
for pid in pids:
x+= self.players[pid].curprod
if x < self.demand:
self.players[pid].schedprod = self.players[pid].curprod
else:
self.players[pid].schedprod = max(0.0,self.demand + self.players[pid].curprod - x)
def get_current_pay_as_bid_price(self):
x = self.demand
pids = {pid:self.players[pid].curbid for pid in self._playerlist}
pids = sorted(pids.keys(), key=pids.get)
for pid in pids:
x -= self.players[pid].curprod
if x < 0:
return self.players[pid].curbid
return 100.00
def get_current_mc_price(self):
x = self.demand
pids = {pid:self.players[pid].curbid for pid in self._playerlist}
pids = sorted(pids.keys(), key=pids.get)
for pid in pids:
x-= self.players[pid].curprod
if x < 0:
return self.players[pid].mc
return 100.00
def plot(self):
plt.ion()
plt.figure(1, figsize=(8,5), dpi=100)
plt.subplot(121)
plt.cla()
self.plot_bid_curve()
plt.subplot(122)
plt.cla()
self.plot_profits()
plt.tight_layout()
plt.savefig('../pic/out.png')
plt.figure(2, figsize=(8,5), dpi=100)
plt.subplot(121)
plt.cla()
self.plot_bid_curve()
plt.subplot(122)
plt.cla()
self.plot_profits()
plt.tight_layout()
def plot_bid_curve(self):
pids = {pid:self.players[pid].curbid for pid in self._playerlist}
pids = sorted(pids.keys(), key=pids.get)
ymc = [self.players[pid].mc for pid in pids]+[100]
ybid = [self.players[pid].curbid for pid in pids]+[100]
x = np.cumsum([self.players[pid].curprod for pid in pids]+[self.demand])
sx,symc = steppify(x,ymc)
sx,sybid = steppify(x,ybid)
tmp = [(xx,yy,zz) for xx,yy,zz in zip(sx,sybid,symc) if xx < self.demand]
tmp.append((self.demand,tmp[-1][1],tmp[-1][2]))
sxless,sybidless,symcless = zip(*tmp)
plt.fill_between(sxless,symcless,sybidless,color = 'g',alpha=0.3)
plt.plot(sx,symc,lw=3,c='k')
plt.plot(sx,sybid,lw=3,c='k')
plt.axvline(self.demand,lw=3,ls='--',c='k')
plt.axhline(sybidless[-1],lw=3,ls='..',c='k')
plt.title('Final price: {:.02f}'.format(sybidless[-1]))
plt.xlabel('Amount [MWh]')
plt.ylabel('Price [$/MWh]')
def plot_mc_curve(self):
pids = {pid:self.players[pid].mc for pid in self._playerlist}
pids = sorted(pids.keys(), key=pids.get)
ymc = [self.players[pid].mc for pid in pids]+[100]
ybid = [self.players[pid].curbid for pid in pids]+[100]
x = np.cumsum([self.players[pid].curprod for pid in pids]+[self.demand])
sx,symc = steppify(x,ymc)
sx,sybid = steppify(x,ybid)
tmp = [(xx,yy,zz) for xx,yy,zz in zip(sx,sybid,symc) if xx < self.demand]
tmp.append((self.demand,tmp[-1][1],tmp[-1][2]))
sxless,sybidless,symcless = zip(*tmp)
plt.fill_between(sxless,symcless,symcless[-1],color = 'g',alpha=0.3)
plt.plot(sx,symc,lw=3,c='k')
plt.plot(sx,sybid,lw=3,c='k')
plt.axvline(self.demand,lw=3,ls='--',c='k')
plt.axhline(sybidless[-1],lw=3,ls=':',c='k')
plt.title('Final price: {:.02f}'.format(symcless[-1]))
def plot_profits(self):
bestprofit = -100.0
for p in self.players.itervalues():
if sum(p.pabprofitlist) > bestprofit:
bestprofit = sum(p.pabprofitlist)
bestname = p.name
plt.plot(np.cumsum(p.pabprofitlist),c='k',marker='.')
# plt.plot(np.cumsum(p.mcprofitlist),c='r',marker='.')
plt.title('Current leader: {0} \n with a profit of {1:.01f}'.format(bestname, bestprofit))
plt.xlabel('Round number')
plt.ylabel('Profit [$]')
def write_stats_file(self):
outArr = []
for pid,p in self.players.iteritems():
outArr.append(map(float,[p.ID,p.curbid,p.curprod,p.schedprod,sum(p.pabprofitlist)]))
np.savetxt('../php/stats.txt',outArr,fmt='%d,%.02f,%.02f,%.02f,%.02f')
def get_pandas_dataframe(self):
df = pd.DataFrame()
for pid, p in self.players.iteritems():
df = df.append(pd.DataFrame({
"player_ID": [pid for _ in p.bidlist],
"round": [i for i,_ in enumerate(p.bidlist)],
"pab_profit": [v for v in p.pabprofitlist],
"up_profit": [v for v in p.mcprofitlist],
"scheduled": [v for v in p.prodlist],
"potential": [v for v in p.potprodlist],
"price": [v for v in p.pricelist]
}), ignore_index=True)
df['cumulative_profit'] = (df.pab_profit - df.up_profit)
df['cumulative_profit'] = df.groupby('player_ID')['cumulative_profit'].cumsum()
self.df = df
return df
def plot_pandas(self):
try:
df = self.df
except AttributeError:
df = self.get_pandas_dataframe()
plt.figure(3, figsize=(8,5), dpi=100)
ax3 = plt.axes()
df.groupby('player_ID').sum().plot(kind='scatter', x='potential', y='pab_profit', ax=ax3)
plt.ylabel('Pay-as-bid profit')
plt.figure(4, figsize=(8,5), dpi=100)
ax4 = plt.axes()
gb = df.groupby('player_ID')
for id, g in gb:
g.plot(x='round', y='cumulative_profit', marker='.', ax=ax4)
plt.xlabel('Round')
plt.ylabel('PAB Profit - UP Profit')
class Player:
def __init__(self, ID = -1,name=''):
self.ID = ID
self.name = name
# self.mc = round((int(ID) * 10.0)/30000 + 5,2)
self.mc = 0
self.bidlist = []
self.pabprofitlist = []
self.mcprofitlist = []
self.prodlist = []
self.potprodlist = []
self.pricelist = []
self.totalprod = 0
def setbid(self, bid):
self.curbid = bid
self.curprod = random.randint(1,3)*5
self.schedprod = 0.0
def push_bid_and_profit(self,price = 0.0):
self.bidlist.append(self.curbid)
self.pabprofitlist.append((self.curbid-self.mc)*self.schedprod)
self.mcprofitlist.append((price-self.mc)*self.schedprod)
self.totalprod += self.schedprod
self.prodlist.append(self.schedprod)
self.potprodlist.append(self.curprod)
self.pricelist.append(price)
| mit | -494,952,331,219,167,040 | 36.775862 | 98 | 0.5623 | false |
chiamingyen/PythonCAD_py3 | Interface/Preview/arc.py | 1 | 3347 | #!/usr/bin/env python
#
# Copyright (c) 2010 Matteo Boscolo
#
# This file is part of PythonCAD.
#
# PythonCAD is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# PythonCAD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public Licensesegmentcmd.py
# along with PythonCAD; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# SegmentPreview object
#
import math
from PyQt5 import QtCore
from Interface.Preview.base import PreviewBase
from Interface.Entity.arc import Arc
from Kernel.entity import Point
from Kernel.exception import *
from Kernel.GeoEntity.point import Point as GeoPoint
from Kernel.GeoUtil.geolib import Vector
#TODO+: find a good way to retrive the geometry staff from a item in Interface.Entity.arc ..
#extend it for all the preview entity
class PreviewArc(PreviewBase):
def __init__(self,command):
super(PreviewArc, self).__init__(command)
@property
def canDraw(self):
if self.value[0]!=None:
self.xc = self.value[0].x()
self.yc = self.value[0].y()
self.h = self.value[1]*2
self.xc=self.xc-(self.h/2.0)
self.yc=self.yc-(self.h/2.0)
self.startAngle = (self.value[2]*180/math.pi)*16
self.spanAngle = (self.value[3]*180/math.pi)*16
return True
return False
def drawGeometry(self, painter,option,widget):
"""
Overloading of the paint method
"""
if self.canDraw:
Arc.__dict__['drawGeometry'](self, painter,option,widget)
def drawShape(self, painterPath):
"""
overloading of the shape method
"""
if self.canDraw:
Arc.__dict__['drawShape'](self, painterPath)
def updatePreview(self, position, distance, kernelCommand):
"""
update the data at the preview item
"""
self.prepareGeometryChange() #qtCommand for update the scene
for i in range(0, len(kernelCommand.value)):
self.value[i]=self.revertToQTObject(kernelCommand.value[i])
# Assing Command Values
index=kernelCommand.valueIndex
try:
raise kernelCommand.exception[index](None)
except(ExcPoint):
self.value[index]=self.revertToQTObject(position)
except(ExcLenght, ExcInt):
if not distance or distance !=None:
self.value[index]=distance
except(ExcAngle):
p1 = kernelCommand.value[0]
p2 = GeoPoint(position.x, position.y)
ang=Vector(p1, p2).absAng
if index==3:
ang=ang-self.value[2]
self.value[index]=ang
except:
print("updatePreview: Exception not managed")
return
| gpl-2.0 | 1,902,915,272,490,773,000 | 34.606383 | 92 | 0.618166 | false |
trevor/calendarserver | calendarserver/webadmin/eventsource.py | 1 | 7812 | # -*- test-case-name: calendarserver.webadmin.test.test_principals -*-
##
# Copyright (c) 2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
"""
Calendar Server principal management web UI.
"""
__all__ = [
"textAsEvent",
"EventSourceResource",
]
from collections import deque
from zope.interface import implementer, Interface
from twistedcaldav.simpleresource import SimpleResource
from twisted.internet.defer import Deferred, succeed
from txweb2.stream import IByteStream, fallbackSplit
from txweb2.http_headers import MimeType
from txweb2.http import Response
def textAsEvent(text, eventID=None, eventClass=None, eventRetry=None):
"""
Format some text as an HTML5 EventSource event. Since the EventSource data
format is text-oriented, this function expects L{unicode}, not L{bytes};
binary data should be encoded as text if it is to be used in an EventSource
stream.
UTF-8 encoded L{bytes} are returned because
http://www.w3.org/TR/eventsource/ states that the only allowed encoding
for C{text/event-stream} is UTF-8.
@param text: The text (ie. the message) to send in the event.
@type text: L{unicode}
@param eventID: An unique identifier for the event.
@type eventID: L{unicode}
@param eventClass: A class name (ie. a categorization) for the event.
@type eventClass: L{unicode}
@param eventRetry: The retry interval (in milliseconds) for the client to
wait before reconnecting if it gets disconnected.
@type eventRetry: L{int}
@return: An HTML5 EventSource event as text.
@rtype: UTF-8 encoded L{bytes}
"""
if text is None:
raise TypeError("text may not be None")
event = []
if eventID is not None:
event.append(u"id: {0}".format(eventID))
if eventClass is not None:
event.append(u"event: {0}".format(eventClass))
if eventRetry is not None:
event.append(u"retry: {0:d}".format(eventRetry))
event.extend(
u"data: {0}".format(l) for l in text.split("\n")
)
return (u"\n".join(event) + u"\n\n").encode("utf-8")
class IEventDecoder(Interface):
"""
An object that can be used to extract data from an application-specific
event object for encoding into an EventSource data stream.
"""
def idForEvent(event):
"""
@return: An unique identifier for the given event.
@rtype: L{unicode}
"""
def classForEvent(event):
"""
@return: A class name (ie. a categorization) for the event.
@rtype: L{unicode}
"""
def textForEvent(event):
"""
@return: The text (ie. the message) to send in the event.
@rtype: L{unicode}
"""
def retryForEvent(event):
"""
@return: The retry interval (in milliseconds) for the client to wait
before reconnecting if it gets disconnected.
@rtype: L{int}
"""
class EventSourceResource(SimpleResource):
"""
Resource that vends HTML5 EventSource events.
Events are stored in a ring buffer and streamed to clients.
"""
addSlash = False
def __init__(self, eventDecoder, principalCollections, bufferSize=400):
"""
@param eventDecoder: An object that can be used to extract data from
an event for encoding into an EventSource data stream.
@type eventDecoder: L{IEventDecoder}
@param bufferSize: The maximum number of events to keep in the ring
buffer.
@type bufferSize: L{int}
"""
super(EventSourceResource, self).__init__(principalCollections, isdir=False)
self._eventDecoder = eventDecoder
self._events = deque(maxlen=bufferSize)
self._streams = set()
def addEvents(self, events):
self._events.extend(events)
# Notify outbound streams that there is new data to vend
for stream in self._streams:
stream.didAddEvents()
def render(self, request):
lastID = request.headers.getRawHeaders(u"last-event-id")
response = Response()
response.stream = EventStream(self._eventDecoder, self._events, lastID)
response.headers.setHeader(
b"content-type", MimeType.fromString(b"text/event-stream")
)
# Keep track of the event streams
def cleanupFilter(_request, _response):
self._streams.remove(response.stream)
return _response
request.addResponseFilter(cleanupFilter)
self._streams.add(response.stream)
return response
@implementer(IByteStream)
class EventStream(object):
"""
L{IByteStream} that streams out HTML5 EventSource events.
"""
length = None
def __init__(self, eventDecoder, events, lastID):
"""
@param eventDecoder: An object that can be used to extract data from
an event for encoding into an EventSource data stream.
@type eventDecoder: L{IEventDecoder}
@param events: Application-specific event objects.
@type events: sequence of L{object}
@param lastID: The identifier for the last event that was vended from
C{events}. Vending will resume starting from the following event.
@type lastID: L{int}
"""
super(EventStream, self).__init__()
self._eventDecoder = eventDecoder
self._events = events
self._lastID = lastID
self._closed = False
self._deferredRead = None
def didAddEvents(self):
d = self._deferredRead
if d is not None:
d.addCallback(lambda _: self.read())
d.callback(None)
def read(self):
if self._closed:
return succeed(None)
lastID = self._lastID
eventID = None
idForEvent = self._eventDecoder.idForEvent
classForEvent = self._eventDecoder.classForEvent
textForEvent = self._eventDecoder.textForEvent
retryForEvent = self._eventDecoder.retryForEvent
for event in self._events:
eventID = idForEvent(event)
# If lastID is not None, skip messages up to and including the one
# referenced by lastID.
if lastID is not None:
if eventID == lastID:
eventID = None
lastID = None
continue
eventClass = classForEvent(event)
eventText = textForEvent(event)
eventRetry = retryForEvent(event)
self._lastID = eventID
return succeed(
textAsEvent(eventText, eventID, eventClass, eventRetry)
)
if eventID is not None:
# We just scanned all the messages, and none are the last one the
# client saw.
self._lastID = None
return succeed(b"")
# # This causes the client to poll, which is undesirable, but the
# # deferred below doesn't seem to work in real use...
# return succeed(None)
d = Deferred()
self._deferredRead = d
return d
def split(self, point):
return fallbackSplit(self, point)
def close(self):
self._closed = True
| apache-2.0 | -369,959,270,512,164,000 | 27.407273 | 84 | 0.634281 | false |
Honzin/ccs | tests/testAdapter/testBtcc/testOrder.py | 1 | 1854 | import unittest
import ccs
import time
####################################################################################################################
# BITFINEX #
####################################################################################################################
class Valid(unittest.TestCase):
def setUp(self):
self.stock = ccs.constants.BTCC
self.base = ccs.constants.BTC
self.quote = ccs.constants.CNY
self.orderbook = ccs.orderbook(self.stock, self.base, self.quote)
self.ordersA = self.orderbook.asks()
self.orderA = self.ordersA[0]
self.ordersB = self.orderbook.bids()
self.orderB = self.ordersB[0]
self.m = ccs.btcc.public.response
# time.sleep(3)
def testPrice(self):
self.assertIsInstance(self.orderA.price(), float)
self.assertIsInstance(self.orderB.price(), float)
def testAmount(self):
self.assertIsInstance(self.orderA.amount(), float)
self.assertIsInstance(self.orderB.amount(), float)
def testStock(self):
self.assertEqual(self.orderA.stock(), self.stock)
self.assertEqual(self.orderB.stock(), self.stock)
def testMethod(self):
self.assertEqual(self.orderA.method(), ccs.constants.ORDER)
self.assertEqual(self.orderB.method(), ccs.constants.ORDER)
def testUsymbol(self):
self.assertEqual(self.orderA.usymbol(), self.base + ":" + self.quote)
self.assertEqual(self.orderB.usymbol(), self.base + ":" + self.quote)
def testOsymbol(self):
pass
def testData(self):
pass
def testRaw(self):
pass
def testStr(self):
pass
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | 2,400,904,288,709,812,700 | 27.96875 | 116 | 0.515102 | false |
lagner/academ-weather | bootstrap/utils.py | 1 | 2005 | import os
import subprocess
import logging as log
from shutil import copy2
from contextlib import contextmanager
@contextmanager
def pushd(newDir):
previousDir = os.getcwd()
os.chdir(newDir)
yield
os.chdir(previousDir)
def static_vars(**kwargs):
def decorate(func):
for k in kwargs:
setattr(func, k, kwargs[k])
return func
return decorate
def run(cmd, check_code=False):
shell = isinstance(cmd, str)
try:
log.debug('run: ' + (cmd if shell else ' '.join(cmd)))
output = subprocess.check_output(
cmd,
shell=shell,
universal_newlines=True
)
return 0, output
except subprocess.CalledProcessError as ex:
log.debug("called proces exception: " + str(ex))
if check_code:
raise
else:
return ex.returncode, ex.output
def sync_file(source, target):
if os.path.exists(target):
s = os.path.getmtime(source)
t = os.path.getmtime(target)
if t >= s:
return
target_dir = os.path.dirname(target)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
copy2(source, target)
def sync_dir(source, target, remove_extra=False):
join = os.path.join
root, dirs, files = next(os.walk(source))
for d in dirs:
sync_dir(join(source, d), join(target, d), remove_extra=remove_extra)
for f in files:
sync_file(join(source, f), join(target, f))
if remove_extra:
*_, tfiles = next(os.walk(target))
for extra in (set(tfiles) - set(files)):
os.remove(extra)
# FIXME: remove extra dirs
def fs_walk(path):
for root, dirs, files in os.walk(path):
for filename in files:
yield os.path.join(root, filename)
def filenames_filter(files, extensions):
for filename in files:
basename, ext = os.path.splitext(filename)
if ext in extensions:
yield filename
| mit | 6,562,177,993,364,735,000 | 23.156627 | 77 | 0.6 | false |
qedsoftware/commcare-hq | corehq/ex-submodules/casexml/apps/case/tests/test_close_extension_chain.py | 1 | 9142 | from django.test import TestCase
from casexml.apps.case.mock import CaseFactory, CaseIndex, CaseStructure
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from casexml.apps.case.xform import get_extensions_to_close
from casexml.apps.phone.tests.utils import create_restore_user
from corehq.apps.domain.models import Domain
from corehq.form_processor.tests.utils import FormProcessorTestUtils, run_with_all_backends
from corehq.util.test_utils import flag_enabled
from corehq.apps.users.dbaccessors.all_commcare_users import delete_all_users
class AutoCloseExtensionsTest(TestCase):
def setUp(self):
super(AutoCloseExtensionsTest, self).setUp()
FormProcessorTestUtils.delete_all_cases()
FormProcessorTestUtils.delete_all_xforms()
delete_all_users()
self.domain = "domain"
self.project = Domain(name=self.domain)
self.user = create_restore_user(self.domain, username='name', password="changeme")
self.factory = CaseFactory(domain=self.domain)
self.extension_ids = ['1', '2', '3']
self.host_id = 'host'
def tearDown(self):
FormProcessorTestUtils.delete_all_cases()
FormProcessorTestUtils.delete_all_xforms()
delete_all_users()
super(AutoCloseExtensionsTest, self).tearDown()
def _create_extension_chain(self):
host = CaseStructure(case_id=self.host_id)
extension = CaseStructure(
case_id=self.extension_ids[0],
indices=[CaseIndex(
related_structure=host,
relationship="extension",
)],
)
extension_2 = CaseStructure(
case_id=self.extension_ids[1],
indices=[CaseIndex(
related_structure=extension,
relationship="extension",
)],
)
extension_3 = CaseStructure(
case_id=self.extension_ids[2],
indices=[CaseIndex(
related_structure=extension_2,
relationship="extension",
)],
)
return self.factory.create_or_update_cases([extension_3])
def _create_extension_loop(self):
extension_3 = CaseStructure(case_id=self.extension_ids[2])
host = CaseStructure(
case_id=self.host_id,
indices=[CaseIndex(
related_structure=extension_3,
relationship="extension",
)],
)
return self.factory.create_or_update_cases([host])
def _create_host_is_subcase_chain(self):
parent = CaseStructure(case_id='parent')
host = CaseStructure(
case_id=self.host_id,
indices=[CaseIndex(
related_structure=parent,
relationship="child",
)],
)
extension = CaseStructure(
case_id=self.extension_ids[0],
indices=[CaseIndex(
related_structure=host,
relationship="extension",
)],
)
extension_2 = CaseStructure(
case_id=self.extension_ids[1],
indices=[CaseIndex(
related_structure=extension,
relationship="extension",
)],
)
return self.factory.create_or_update_cases([extension_2])
@run_with_all_backends
def test_get_extension_chain_simple(self):
host = CaseStructure(case_id=self.host_id)
extension = CaseStructure(
case_id=self.extension_ids[0],
indices=[CaseIndex(
related_structure=host,
relationship="extension",
)],
)
self.factory.create_or_update_cases([extension])
self.assertEqual(
set(self.extension_ids[0]),
CaseAccessors(self.domain).get_extension_chain([self.host_id])
)
@run_with_all_backends
def test_get_extension_chain_multiple(self):
created_cases = self._create_extension_chain()
self.assertEqual(
set(self.extension_ids),
CaseAccessors(self.domain).get_extension_chain([created_cases[-1].case_id])
)
@run_with_all_backends
def test_get_extension_chain_circular_ref(self):
"""If there is a circular reference, this should not hang forever
"""
self._create_extension_chain()
self._create_extension_loop()
self.assertEqual(
set([self.host_id] + self.extension_ids),
CaseAccessors(self.domain).get_extension_chain([self.extension_ids[2]])
)
@flag_enabled('EXTENSION_CASES_SYNC_ENABLED')
@run_with_all_backends
def test_get_extension_to_close(self):
"""should return empty if case is not a host, otherwise should return full chain"""
created_cases = self._create_extension_chain()
# host open, should be empty
no_cases = get_extensions_to_close(created_cases[-1], self.domain)
self.assertEqual(set(), no_cases)
created_cases[-1] = self.factory.create_or_update_case(CaseStructure(
case_id=self.host_id,
attrs={'close': True}
))[0]
# host closed, should get full chain
full_chain = get_extensions_to_close(created_cases[-1], self.domain)
self.assertEqual(set(self.extension_ids), full_chain)
# extension (not a host), should be empty
no_cases = get_extensions_to_close(created_cases[2], self.domain)
self.assertEqual(set(), no_cases)
@flag_enabled('EXTENSION_CASES_SYNC_ENABLED')
@run_with_all_backends
def test_get_extension_to_close_child_host(self):
"""should still return extension chain if outgoing index is a child index"""
created_cases = self._create_host_is_subcase_chain()
# host open, should be empty
no_cases = get_extensions_to_close(created_cases[-1], self.domain)
self.assertEqual(set(), no_cases)
# close parent, shouldn't get extensions
created_cases[-1] = self.factory.create_or_update_case(CaseStructure(
case_id='parent',
attrs={'close': True}
))[0]
no_cases = get_extensions_to_close(created_cases[-1], self.domain)
self.assertEqual(set(), no_cases)
# close host that is also a child
created_cases[-2] = self.factory.create_or_update_case(CaseStructure(
case_id=self.host_id,
attrs={'close': True}
))[0]
full_chain = get_extensions_to_close(created_cases[-2], self.domain)
self.assertEqual(set(self.extension_ids[0:2]), full_chain)
@flag_enabled('EXTENSION_CASES_SYNC_ENABLED')
@run_with_all_backends
def test_close_cases_host(self):
"""Closing a host should close all the extensions"""
self._create_extension_chain()
cases = CaseAccessors(self.domain).get_cases(self.extension_ids)
self.assertFalse(cases[0].closed)
self.assertFalse(cases[1].closed)
self.assertFalse(cases[2].closed)
self.factory.create_or_update_case(CaseStructure(
case_id=self.extension_ids[0],
attrs={'close': True}
))
cases = {
case.case_id: case.closed
for case in CaseAccessors(self.domain).get_cases([self.host_id] + self.extension_ids)
}
self.assertFalse(cases[self.host_id])
self.assertTrue(cases[self.extension_ids[0]])
self.assertFalse(cases[self.extension_ids[1]])
self.assertFalse(cases[self.extension_ids[2]])
self.factory.create_or_update_case(CaseStructure(
case_id=self.host_id,
attrs={'close': True}
))
cases = {
case.case_id: case.closed
for case in CaseAccessors(self.domain).get_cases([self.host_id] + self.extension_ids)
}
self.assertTrue(cases[self.host_id])
self.assertTrue(cases[self.extension_ids[0]])
self.assertTrue(cases[self.extension_ids[1]])
self.assertTrue(cases[self.extension_ids[2]])
@flag_enabled('EXTENSION_CASES_SYNC_ENABLED')
@run_with_all_backends
def test_close_cases_child(self):
"""Closing a host that is also a child should close all the extensions"""
self._create_host_is_subcase_chain()
cases = {
case.case_id: case.closed
for case in CaseAccessors(self.domain).get_cases([self.host_id] + self.extension_ids)
}
self.assertFalse(cases[self.host_id])
self.assertFalse(cases[self.extension_ids[0]])
self.assertFalse(cases[self.extension_ids[1]])
self.factory.create_or_update_case(CaseStructure(
case_id=self.host_id,
attrs={'close': True}
))
cases = {
case.case_id: case.closed
for case in CaseAccessors(self.domain).get_cases(['parent', self.host_id] + self.extension_ids)
}
self.assertFalse(cases['parent'])
self.assertTrue(cases[self.host_id])
self.assertTrue(cases[self.extension_ids[0]])
self.assertTrue(cases[self.extension_ids[1]])
| bsd-3-clause | 5,413,235,155,436,957,000 | 37.737288 | 107 | 0.61037 | false |
changhoonhahn/centralMS | centralms/tests/test_downsampling.py | 1 | 22502 | '''
Test to make sure that the downsampling of the catalog
produces (more or less) the same results as the full catalog
'''
import env
import numpy as np
import catalog as Cat
import evolver as Evol
import observables as Obvs
import util as UT
import matplotlib.pyplot as plt
import corner as DFM
from ChangTools.plotting import prettyplot
from ChangTools.plotting import prettycolors
def test_Catalog_Downsample(test_type, nsnap0, downsampled='14'):
''' Test the downsampling of the catalog
========================================
Everything looks good
========================================
'''
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read() # full sample
subcat_down = subhist.Read(downsampled='14') # downsampled
snaps = []
for ii in range(1, nsnap0+1):
if (ii-1)%5 == 0:
snaps.append(ii)
snaps.append(nsnap0)
pretty_colors = prettycolors()
fig = plt.figure(1)
sub = fig.add_subplot(111)
if test_type == 'hmf': # halo mass function
for i in snaps:
if i == 1:
m_tag = 'halo.m'
else:
m_tag = 'snapshot'+str(i)+'_halo.m'
shmf = Obvs.getMF(subcat[m_tag], weights=subcat['weights'], m_arr=np.arange(10., 15.5, 0.1))
sub.plot(shmf[0], shmf[1], c=pretty_colors[i], lw=1, )
shmf = Obvs.getMF(subcat_down[m_tag], weights=subcat_down['weights'], m_arr=np.arange(10., 15.5, 0.1))
sub.plot(shmf[0], shmf[1], c=pretty_colors[i], lw=3, ls='--')
# x-axis
sub.set_xlim([10., 15.])
# y-axis
sub.set_yscale("log")
elif test_type == 'smf': # stellar mass function
for i in snaps:
if i == 1:
m_tag = 'm.star'
else:
m_tag = 'snapshot'+str(i)+'_m.star'
shmf = Obvs.getMF(subcat[m_tag], weights=subcat['weights'], m_arr=np.arange(8., 12.5, 0.1))
sub.plot(shmf[0], shmf[1], c=pretty_colors[i], lw=1)
shmf = Obvs.getMF(subcat_down[m_tag], weights=subcat_down['weights'], m_arr=np.arange(8., 12.5, 0.1))
sub.plot(shmf[0], shmf[1], c=pretty_colors[i], lw=3, ls='--')
# x-axis
sub.set_xlim([9., 12.])
# y-axis
sub.set_yscale("log")
elif test_type == 'smhmr': # stellar mass - halo mass relation
for i in snaps:
if i == 1:
hm_tag = 'm.max'
sm_tag = 'm.star'
else:
hm_tag = 'snapshot'+str(i)+'_m.max'
sm_tag = 'snapshot'+str(i)+'_m.star'
smhmr = Obvs.Smhmr()
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat[hm_tag], subcat[sm_tag], weights=subcat['weights'])
sub.plot(m_mid, mu_mstar - sig_mstar, c=pretty_colors[i], lw=1)
sub.plot(m_mid, mu_mstar + sig_mstar, c=pretty_colors[i], lw=1)
m_mid, mu_mstar, sig_mstar, cnts = smhmr.Calculate(subcat_down[hm_tag], subcat_down[sm_tag],
weights=subcat_down['weights'])
sub.plot(m_mid, mu_mstar - sig_mstar, c=pretty_colors[i], lw=3, ls='--')
sub.plot(m_mid, mu_mstar + sig_mstar, c=pretty_colors[i], lw=3, ls='--')
sub.set_ylim([8., 12.])
plt.show()
return None
def test_EvolverInitiate_downsample(test, nsnap, nsnap0=20, downsampled=None):
''' Tests for Initiate method in Evolver for specified nsnap snapshot.
========================================
Everything looks good
========================================
'''
if nsnap > nsnap0:
raise ValueError('nsnap has to be less than or equal to nsnap0')
if downsampled is None:
raise ValueError('the whole point of this function is to test downsampling...')
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=None) # full sample
subcat_down = subhist.Read(downsampled=downsampled) # downsampled
theta = Evol.defaultTheta('constant_offset') # load in generic theta (parameters)
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev_down = Evol.Evolver(subcat_down, theta, nsnap0=nsnap0)
eev_down.Initiate()
if test == 'pssfr': # calculate P(SSFR)
obv_ssfr = Obvs.Ssfr()
# full sample P(ssfr)
started = np.where(subcat['nsnap_start'] == nsnap)
ssfr_mids, pssfrs = obv_ssfr.Calculate(
subcat['m.star0'][started],
subcat['sfr0'][started]-subcat['m.star0'][started],
weights=subcat['weights'][started])
x_ssfrs = obv_ssfr.ssfr_bin_edges
# down-sample P(ssfr)
started = np.where(subcat_down['nsnap_start'] == nsnap)
ssfr_mids, pssfrs_down = obv_ssfr.Calculate(
subcat_down['m.star0'][started],
subcat_down['sfr0'][started] - subcat_down['m.star0'][started],
weights=subcat_down['weights'][started])
x_ssfrs_down = obv_ssfr.ssfr_bin_edges
fig = plt.figure(figsize=(20, 5))
bkgd = fig.add_subplot(111, frameon=False)
panel_mass_bins = [[9.7, 10.1], [10.1, 10.5], [10.5, 10.9], [10.9, 11.3]]
for i_m, mass_bin in enumerate(panel_mass_bins):
sub = fig.add_subplot(1, 4, i_m+1)
# plot P(SSFR) full-sample
x_bar, y_bar = UT.bar_plot(x_ssfrs[i_m], pssfrs[i_m])
sub.plot(x_bar, y_bar, lw=2, ls='-', c='k')
# plot P(SSFR) full-sample
x_bar, y_bar = UT.bar_plot(x_ssfrs_down[i_m], pssfrs_down[i_m])
sub.plot(x_bar, y_bar, lw=3, ls='--', c='k')
# mark the SSFR of SFMS and Quiescent peak
sub.vlines(Obvs.SSFR_SFMS(0.5 * np.sum(mass_bin), UT.z_nsnap(nsnap), theta_SFMS=theta['sfms']), 0., 1.7,
color='b', linewidth=2, linestyle='-')
sub.vlines(Obvs.SSFR_Qpeak(0.5 * np.sum(mass_bin)), 0., 1.7,
color='r', linewidth=2, linestyle='-')
massbin_str = ''.join([
r'$\mathtt{log \; M_{*} = [',
str(mass_bin[0]), ',\;',
str(mass_bin[1]), ']}$'
])
sub.text(-12., 1.4, massbin_str, fontsize=20)
# x-axis
sub.set_xlim([-13., -8.])
# y-axis
sub.set_ylim([0.0, 1.7])
sub.set_yticks([0.0, 0.5, 1.0, 1.5])
if i_m == 0:
sub.set_ylabel(r'$\mathtt{P(log \; SSFR)}$', fontsize=25)
else:
sub.set_yticklabels([])
#ax = plt.gca()
#leg = sub.legend(bbox_to_anchor=(-8.5, 1.55), loc='upper right', prop={'size': 20}, borderpad=2,
# bbox_transform=ax.transData, handletextpad=0.5)
bkgd.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
bkgd.set_xlabel(r'$\mathtt{log \; SSFR \;[yr^{-1}]}$', fontsize=25)
plt.show()
elif test == 'fq': # calculate quiescent fraction
obv_fq = Obvs.Fq()
pretty_colors = prettycolors()
fig = plt.figure(figsize=(6,6))
sub = fig.add_subplot(111)
print 'Full Sample'
started = np.where(subcat['nsnap_start'] == nsnap)
print len(started[0]), ' galaxies'
print np.sum(subcat['weights'][started])
m_mid, fq, counts = obv_fq.Calculate(
mass=subcat['m.star0'][started],
sfr=subcat['sfr0'][started],
z=UT.z_nsnap(nsnap), weights= subcat['weights'][started], theta_SFMS=theta['sfms'], counts=True)
print 'Down Sample'
started = np.where(subcat_down['nsnap_start'] == nsnap)
print len(started[0]), ' galaxies'
print np.sum(subcat['weights'][started])
m_mid_down, fq_down, counts_down = obv_fq.Calculate(
mass=subcat_down['m.star0'][started],
sfr=subcat_down['sfr0'][started],
z=UT.z_nsnap(nsnap), weights= subcat_down['weights'][started], theta_SFMS=theta['sfms'], counts=True)
cc = pretty_colors[nsnap]
#sub.scatter(m_mid, fq, c=cc, s=10)
sub.plot(m_mid, fq, c=cc, lw=2)
sub.plot(m_mid_down, fq_down, c=cc, lw=3, ls='--')
sub.plot(m_mid, obv_fq.model(m_mid, UT.z_nsnap(nsnap), lit='cosmos_tinker'), c=cc, ls=':')
#for i in range(len(m_mid)):
# sub.text(m_mid[i], 0.05+fq[i], str(counts[i]))
plt.show()
elif test == 'smf_evol': # check the SMF evolution of the SF population
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
snaps = [] # pick a handful of snapshots
for ii in range(2, nsnap0+1):
if (ii-1)%5 == 0:
snaps.append(ii)
snaps.append(nsnap0)
for n in snaps[::-1]:
# SF population at snapshot (full sample)
pop_sf = np.where(
(subcat['gclass0'] == 'star-forming') &
(subcat['nsnap_quench'] <= n) &
(subcat['weights'] > 0.)
)
smf_sf = Obvs.getMF(
subcat['snapshot'+str(n)+'_m.sham'][pop_sf],
weights=subcat['weights'][pop_sf])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='k', alpha=0.05 * (21. - n))#, label='Snapshot '+str(n))
pop_sf = np.where(
(subcat_down['gclass0'] == 'star-forming') &
(subcat_down['nsnap_quench'] <= n) &
(subcat_down['weights'] > 0.)
)
smf_sf = Obvs.getMF(
subcat_down['snapshot'+str(n)+'_m.sham'][pop_sf],
weights=subcat_down['weights'][pop_sf])
sub.plot(smf_sf[0], smf_sf[1], lw=3, ls='--', c='k', alpha=0.05 * (21. - n))#, label='Snapshot '+str(n))
# nsnap = 1 full sample
pop_sf = np.where(
(subcat['gclass'] == 'star-forming') &
(subcat['weights'] > 0.)
)
smf_sf = Obvs.getMF(
subcat['m.sham'][pop_sf],
weights=subcat['weights'][pop_sf])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='k', ls='-', label='Snapshot 1')
# nsnap = 1 down sample
pop_sf = np.where(
(subcat_down['gclass'] == 'star-forming') &
(subcat_down['weights'] > 0.)
)
smf_sf = Obvs.getMF( subcat_down['m.sham'][pop_sf], weights=subcat_down['weights'][pop_sf])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='k', ls='--')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-5, 10**-1.5])
sub.set_yscale('log')
sub.set_ylabel('$\Phi$', fontsize=25)
sub.legend(loc='upper right')
plt.show()
elif test == 'sfms': # check the SFMS of the initial SFRs of the full vs down-samples
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
# SFMS of the full sample
started = np.where(subcat['nsnap_start'] == nsnap)
DFM.hist2d(subcat['m.star0'][started], subcat['sfr0'][started], weights=subcat['weights'][started],
levels=[0.68, 0.95], range=[[6., 12.], [-4., 2.]], color='#1F77B4',
plot_datapoints=False, fill_contours=False, plot_density=False, ax=sub)
# SFMS of the down sample
started = np.where(subcat_down['nsnap_start'] == nsnap)
DFM.hist2d(subcat_down['m.star0'][started], subcat_down['sfr0'][started], weights=subcat_down['weights'][started],
levels=[0.68, 0.95], range=[[6., 12.], [-4., 2.]], color='#FF7F0E',
plot_datapoints=False, fill_contours=False, plot_density=False, ax=sub)
plt.show()
return None
def test_EvolverEvolve_downsample(test, nsnap0=20, downsampled=None, sfh='constant_offset'):
''' Tests for Evolve method in Evolver
========================================
SMF, P(ssfr), SFMS look good. SMHMR gets
noisy at high masses
========================================
'''
if downsampled is None:
raise ValueError('the whole point of this function is to test downsampling...')
# load in generic theta (parameter values)
theta = Evol.defaultTheta(sfh)
# load in Subhalo Catalog (pure centrals)
subhist = Cat.PureCentralHistory(nsnap_ancestor=nsnap0)
subcat = subhist.Read(downsampled=None) # full sample
eev = Evol.Evolver(subcat, theta, nsnap0=nsnap0)
eev.Initiate()
eev.Evolve()
subcat = eev.SH_catalog
subcat_down = subhist.Read(downsampled=downsampled) # downsampled
eev_down = Evol.Evolver(subcat_down, theta, nsnap0=nsnap0)
eev_down.Initiate()
eev_down.Evolve()
subcat_down = eev_down.SH_catalog
pretty_colors = prettycolors()
if test == 'sf_smf': # stellar mass function of SF population
isSF = np.where((subcat['gclass'] == 'star-forming') & (subcat['weights'] > 0.))
isSF_down = np.where((subcat_down['gclass'] == 'star-forming') & (subcat_down['weights'] > 0.))
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
snaps = [] # pick a handful of snapshots
for ii in range(2, nsnap0+1):
if (ii-1)%5 == 0:
snaps.append(ii)
snaps.append(nsnap0)
for n in snaps[::-1]: # SMF of SF population at select snapshots
# full-sample
smf_sf = Obvs.getMF(
subcat['snapshot'+str(n)+'_m.star'][isSF],
weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='b', alpha=0.05 * (21. - n))
# down-sample
smf_sf = Obvs.getMF(
subcat_down['snapshot'+str(n)+'_m.star'][isSF_down],
weights=subcat_down['weights'][isSF_down])
sub.plot(smf_sf[0], smf_sf[1], lw=2, c='k', alpha=0.05 * (21. - n))
smf_sf = Obvs.getMF(subcat['m.star'][isSF], weights=subcat['weights'][isSF])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='b', ls='-', label='Integrated')
smf_sf = Obvs.getMF(subcat_down['m.star'][isSF_down], weights=subcat_down['weights'][isSF_down])
sub.plot(smf_sf[0], smf_sf[1], lw=3, c='k', ls='-')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-6, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('$\Phi$', fontsize=25)
sub.legend(loc='upper right')
plt.show()
elif test == 'smf': # stellar mass function of all galaxies
isSF = np.where(subcat['gclass'] == 'star-forming')
isnotSF = np.where(subcat['gclass'] != 'star-forming')
isSF_down = np.where(subcat_down['gclass'] == 'star-forming')
isnotSF_down = np.where(subcat_down['gclass'] != 'star-forming')
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
snaps = [] # pick a handful of snapshots
for ii in range(2, nsnap0+1):
if (ii-1)%7 == 0:
snaps.append(ii)
snaps.append(nsnap0)
for n in snaps[::-1]:
# SHAM SMF
smf = Obvs.getMF(subcat['snapshot'+str(n)+'_m.sham'])
sub.plot(smf[0], smf[1], lw=2, c='k', ls=':', alpha=0.05 * (21. - n))
# full-sample
m_all = np.concatenate([subcat['snapshot'+str(n)+'_m.star'][isSF], subcat['snapshot'+str(n)+'_m.sham'][isnotSF]])
w_all = np.concatenate([subcat['weights'][isSF], subcat['weights'][isnotSF]])
smf = Obvs.getMF(m_all, weights=w_all)
sub.plot(smf[0], smf[1], lw=2, c='b', alpha=0.05 * (21. - n))
# down-sample
m_all = np.concatenate([
subcat_down['snapshot'+str(n)+'_m.star'][isSF_down],
subcat_down['snapshot'+str(n)+'_m.sham'][isnotSF_down]])
w_all = np.concatenate([
subcat_down['weights'][isSF_down],
subcat_down['weights'][isnotSF_down]])
smf = Obvs.getMF(m_all, weights=w_all)
sub.plot(smf[0], smf[1], lw=2, c='g', alpha=0.05 * (21. - n))
# at snapshot 1
smf = Obvs.getMF(subcat['m.sham'])
sub.plot(smf[0], smf[1], lw=2, c='k', ls=':')
m_all = np.concatenate([subcat['m.star'][isSF], subcat['m.sham'][isnotSF]])
w_all = np.concatenate([subcat['weights'][isSF], subcat['weights'][isnotSF]])
smf = Obvs.getMF(m_all, weights=w_all)
sub.plot(smf[0], smf[1], lw=3, c='b', ls='-', label='Integrated')
m_all = np.concatenate([
subcat_down['m.star'][isSF_down],
subcat_down['m.sham'][isnotSF_down]])
w_all = np.concatenate([
subcat_down['weights'][isSF_down],
subcat_down['weights'][isnotSF_down]])
smf = Obvs.getMF(m_all, weights=w_all)
sub.plot(smf[0], smf[1], lw=3, c='g')
sub.set_xlim([6., 12.])
sub.set_xlabel('Stellar Masses $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([1e-6, 10**-1.75])
sub.set_yscale('log')
sub.set_ylabel('$\Phi$', fontsize=25)
sub.legend(loc='upper right')
plt.show()
elif test == 'pssfr': # compare the full and down - sampled P(SSFR)s
obv_ssfr = Obvs.Ssfr()
# P(ssfr) at nsnap0
ssfr_bin_mids, ssfr_dists0 = obv_ssfr.Calculate(subcat['m.star0'],
subcat['sfr0'] - subcat['m.star0'],
subcat['weights'])
# full sample
ssfr_bin_mids, pssfrs = obv_ssfr.Calculate(subcat['m.star'],
subcat['sfr'] - subcat['m.star'],
subcat['weights'])
x_ssfrs = obv_ssfr.ssfr_bin_edges
# down sample
ssfr_bin_mids, pssfrs_down = obv_ssfr.Calculate(subcat_down['m.star'],
subcat_down['sfr'] - subcat_down['m.star'],
subcat_down['weights'])
x_ssfrs_down = obv_ssfr.ssfr_bin_edges
fig = plt.figure(figsize=(20, 5))
bkgd = fig.add_subplot(111, frameon=False)
panel_mass_bins = [[9.7, 10.1], [10.1, 10.5], [10.5, 10.9], [10.9, 11.3]]
for i_m, mass_bin in enumerate(panel_mass_bins):
sub = fig.add_subplot(1, 4, i_m+1)
sub.plot(ssfr_bin_mids[i_m], ssfr_dists0[i_m],
lw=3, ls='--', c='b', alpha=0.25)
xx, yy = UT.bar_plot(x_ssfrs[i_m], pssfrs[i_m])
sub.plot(xx, yy, lw=2, ls='-', c='k')
xx, yy = UT.bar_plot(x_ssfrs_down[i_m], pssfrs_down[i_m])
sub.plot(xx, yy, lw=3, ls='--', c='k')
massbin_str = ''.join([
r'$\mathtt{log \; M_{*} = [',
str(mass_bin[0]), ',\;',
str(mass_bin[1]), ']}$'
])
sub.text(-12., 1.4, massbin_str, fontsize=20)
# x-axis
sub.set_xlim([-13., -8.])
# y-axis
sub.set_ylim([0.0, 1.7])
sub.set_yticks([0.0, 0.5, 1.0, 1.5])
if i_m == 0:
sub.set_ylabel(r'$\mathtt{P(log \; SSFR)}$', fontsize=25)
else:
sub.set_yticklabels([])
ax = plt.gca()
leg = sub.legend(bbox_to_anchor=(-8.5, 1.55), loc='upper right', prop={'size': 20}, borderpad=2,
bbox_transform=ax.transData, handletextpad=0.5)
bkgd.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
bkgd.set_xlabel(r'$\mathtt{log \; SSFR \;[yr^{-1}]}$', fontsize=25)
plt.show()
elif test == 'smhmr': # stellar mass to halo mass relation
isSF = np.where(subcat['gclass'] == 'star-forming')
isSF_down = np.where(subcat_down['gclass'] == 'star-forming')
smhmr = Obvs.Smhmr()
fig = plt.figure()
sub = fig.add_subplot(111)
m_mid, mu_mhalo, sig_mhalo, cnts = smhmr.Calculate(subcat['m.star'][isSF], subcat['halo.m'][isSF])
sub.fill_between(m_mid, mu_mhalo - sig_mhalo, mu_mhalo + sig_mhalo, color='k', alpha=0.25, linewidth=0, edgecolor=None)
print cnts[-10:]
m_mid, mu_mhalo, sig_mhalo, cnts = smhmr.Calculate(subcat_down['m.star'][isSF_down], subcat_down['halo.m'][isSF_down],
weights=subcat_down['weights'][isSF_down])
sub.fill_between(m_mid, mu_mhalo - sig_mhalo, mu_mhalo + sig_mhalo, color='b', alpha=0.25, linewidth=0, edgecolor=None)
print cnts[-10:]
sub.set_xlim([8., 12.])
sub.set_xlabel('Stellar Mass $(\mathcal{M}_*)$', fontsize=25)
sub.set_ylim([10., 15.])
sub.set_ylabel('Halo Mass $(\mathcal{M}_{halo})$', fontsize=25)
plt.show()
elif test == 'sfms':
fig = plt.figure(figsize=(7,7))
sub = fig.add_subplot(111)
DFM.hist2d(subcat['m.star'], subcat['sfr'], weights=subcat['weights'],
levels=[0.68, 0.95], range=[[6., 12.], [-4., 2.]], color='#1F77B4',
plot_datapoints=True, fill_contours=False, plot_density=True, ax=sub)
DFM.hist2d(subcat_down['m.star'], subcat_down['sfr'], weights=subcat_down['weights'],
levels=[0.68, 0.95], range=[[6., 12.], [-4., 2.]], color='#FF7F0E',
plot_datapoints=True, fill_contours=False, plot_density=True, ax=sub)
sub.set_xlim([6., 12.])
sub.set_xlabel('$\mathtt{log\;M_*}$', fontsize=25)
sub.set_ylim([-4., 2.])
sub.set_ylabel('$\mathtt{log\;SFR}$', fontsize=25)
plt.show()
return None
if __name__=="__main__":
#test_EvolverEvolve_downsample('smhmr', nsnap0=15, downsampled='14')
#test_EvolverEvolve_downsample('smf', nsnap0=15, downsampled='14', sfh='random_step_abias2')
#for i in [15, 10, 5, 1]:
# test_EvolverInitiate_downsample('sfms', i, nsnap0=15, downsampled='14')
| mit | -8,845,726,252,663,401,000 | 38.826549 | 127 | 0.515821 | false |
dimtion/jml | outputFiles/statistics/archives/ourIA/closest.py/0.7/3/player1.py | 1 | 11241 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
####################################################################################################################################################################################################################################
######################################################################################################## PRE-DEFINED IMPORTS #######################################################################################################
####################################################################################################################################################################################################################################
# Imports that are necessary for the program architecture to work properly
# Do not edit this code
import ast
import sys
import os
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED CONSTANTS ######################################################################################################
####################################################################################################################################################################################################################################
# Possible characters to send to the maze application
# Any other will be ignored
# Do not edit this code
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
####################################################################################################################################################################################################################################
# Name of your team
# It will be displayed in the maze
# You have to edit this code
TEAM_NAME = "closest"
####################################################################################################################################################################################################################################
########################################################################################################## YOUR VARIABLES ##########################################################################################################
####################################################################################################################################################################################################################################
# Stores all the moves in a list to restitute them one by one
allMoves = [UP, RIGHT, UP, RIGHT, UP, RIGHT, RIGHT, RIGHT, RIGHT, RIGHT, UP, UP, UP, UP, UP, RIGHT]
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED FUNCTIONS ######################################################################################################
####################################################################################################################################################################################################################################
# Writes a message to the shell
# Use for debugging your program
# Channels stdout and stdin are captured to enable communication with the maze
# Do not edit this code
def debug (text) :
# Writes to the stderr channel
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
####################################################################################################################################################################################################################################
# Reads one line of information sent by the maze application
# This function is blocking, and will wait for a line to terminate
# The received information is automatically converted to the correct type
# Do not edit this code
def readFromPipe () :
# Reads from the stdin channel and returns the structure associated to the string
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
####################################################################################################################################################################################################################################
# Sends the text to the maze application
# Do not edit this code
def writeToPipe (text) :
# Writes to the stdout channel
sys.stdout.write(text)
sys.stdout.flush()
####################################################################################################################################################################################################################################
# Reads the initial maze information
# The function processes the text and returns the associated variables
# The dimensions of the maze are positive integers
# Maze map is a dictionary associating to a location its adjacent locations and the associated weights
# The preparation time gives the time during which 'initializationCode' can make computations before the game starts
# The turn time gives the time during which 'determineNextMove' can make computations before returning a decision
# Player locations are tuples (line, column)
# Coins are given as a list of locations where they appear
# A boolean indicates if the game is over
# Do not edit this code
def processInitialInformation () :
# We read from the pipe
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
# Reads the information after each player moved
# The maze map and allowed times are no longer provided since they do not change
# Do not edit this code
def processNextInformation () :
# We read from the pipe
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
########################################################################################################## YOUR FUNCTIONS ##########################################################################################################
####################################################################################################################################################################################################################################
# This is where you should write your code to do things during the initialization delay
# This function should not return anything, but should be used for a short preprocessing
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# Nothing to do
pass
####################################################################################################################################################################################################################################
# This is where you should write your code to determine the next direction
# This function should return one of the directions defined in the CONSTANTS section
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# We return the next move as described by the list
global allMoves
nextMove = allMoves[0]
allMoves = allMoves[1:]
return nextMove
####################################################################################################################################################################################################################################
############################################################################################################# MAIN LOOP ############################################################################################################
####################################################################################################################################################################################################################################
# This is the entry point when executing this file
# We first send the name of the team to the maze
# The first message we receive from the maze includes its dimensions and map, the times allowed to the various steps, and the players and coins locations
# Then, at every loop iteration, we get the maze status and determine a move
# Do not edit this code
if __name__ == "__main__" :
# We send the team name
writeToPipe(TEAM_NAME + "\n")
# We process the initial information and have a delay to compute things using it
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
# We decide how to move and wait for the next step
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
####################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################### | mit | -5,020,732,711,863,126,000 | 63.982659 | 228 | 0.356641 | false |
invinst/ResponseBot | responsebot/responsebot_client.py | 1 | 12103 | from __future__ import absolute_import
from decorator import decorate
from tweepy.error import TweepError, RateLimitError
from responsebot.common.constants import TWITTER_PAGE_DOES_NOT_EXISTS_ERROR, TWITTER_TWEET_NOT_FOUND_ERROR, \
TWITTER_USER_NOT_FOUND_ERROR, TWITTER_DELETE_OTHER_USER_TWEET, TWITTER_ACCOUNT_SUSPENDED_ERROR,\
TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER, TWITTER_AUTOMATED_REQUEST_ERROR, TWITTER_OVER_CAPACITY_ERROR,\
TWITTER_DAILY_STATUS_UPDATE_LIMIT_ERROR, TWITTER_CHARACTER_LIMIT_ERROR_1, TWITTER_CHARACTER_LIMIT_ERROR_2, \
TWITTER_STATUS_DUPLICATE_ERROR
from responsebot.common.exceptions import APIQuotaError, AutomatedRequestError, OverCapacityError,\
DailyStatusUpdateError, CharacterLimitError, StatusDuplicateError
from responsebot.models import Tweet, User, List
from responsebot.utils.tweepy import tweepy_list_to_json
def api_error_handle(func):
def func_wrapper(f, *args, **kwargs):
try:
return f(*args, **kwargs)
except RateLimitError as e:
raise APIQuotaError(str(e))
except TweepError as e:
if e.api_code == TWITTER_AUTOMATED_REQUEST_ERROR:
raise AutomatedRequestError
elif e.api_code == TWITTER_OVER_CAPACITY_ERROR:
raise OverCapacityError
elif e.api_code in [TWITTER_CHARACTER_LIMIT_ERROR_1, TWITTER_CHARACTER_LIMIT_ERROR_2]:
raise CharacterLimitError
elif e.api_code == TWITTER_DAILY_STATUS_UPDATE_LIMIT_ERROR:
raise DailyStatusUpdateError
elif e.api_code == TWITTER_STATUS_DUPLICATE_ERROR:
raise StatusDuplicateError
else:
raise
return decorate(func, func_wrapper)
class ResponseBotClient(object):
"""
Wrapper for all Twitter API clients.
"""
def __init__(self, client, config):
self._client = client
self._current_user = None
self.config = config
@property
def tweepy_api(self):
"""
Get the actual client object.
:return: the actual client object
"""
return self._client
def get_current_user(self):
if self._current_user is None:
self._current_user = User(self._client.me()._json)
return self._current_user
@api_error_handle
def tweet(self, text, in_reply_to=None, filename=None, file=None):
"""
Post a new tweet.
:param text: the text to post
:param in_reply_to: The ID of the tweet to reply to
:param filename: If `file` param is not provided, read file from this path
:param file: A file object, which will be used instead of opening `filename`. `filename` is still required, for
MIME type detection and to use as a form field in the POST data
:return: Tweet object
"""
if filename is None:
return Tweet(self._client.update_status(status=text, in_reply_to_status_id=in_reply_to)._json)
else:
return Tweet(self._client.update_with_media(filename=filename, file=file,
status=text, in_reply_to_status_id=in_reply_to)._json)
def retweet(self, id):
"""
Retweet a tweet.
:param id: ID of the tweet in question
:return: True if success, False otherwise
"""
try:
self._client.retweet(id=id)
return True
except TweepError as e:
if e.api_code == TWITTER_PAGE_DOES_NOT_EXISTS_ERROR:
return False
raise
def get_tweet(self, id):
"""
Get an existing tweet.
:param id: ID of the tweet in question
:return: Tweet object. None if not found
"""
try:
return Tweet(self._client.get_status(id=id)._json)
except TweepError as e:
if e.api_code == TWITTER_TWEET_NOT_FOUND_ERROR:
return None
raise
def get_user(self, id):
"""
Get a user's info.
:param id: ID of the user in question
:return: User object. None if not found
"""
try:
return User(self._client.get_user(user_id=id)._json)
except TweepError as e:
if e.api_code == TWITTER_USER_NOT_FOUND_ERROR:
return None
raise
def remove_tweet(self, id):
"""
Delete a tweet.
:param id: ID of the tweet in question
:return: True if success, False otherwise
"""
try:
self._client.destroy_status(id=id)
return True
except TweepError as e:
if e.api_code in [TWITTER_PAGE_DOES_NOT_EXISTS_ERROR, TWITTER_DELETE_OTHER_USER_TWEET]:
return False
raise
def follow(self, user_id, notify=False):
"""
Follow a user.
:param user_id: ID of the user in question
:param notify: whether to notify the user about the following
:return: user that are followed
"""
try:
return User(self._client.create_friendship(user_id=user_id, follow=notify)._json)
except TweepError as e:
if e.api_code in [TWITTER_ACCOUNT_SUSPENDED_ERROR]:
return self.get_user(user_id)
raise
def unfollow(self, user_id):
"""
Follow a user.
:param user_id: ID of the user in question
:return: The user that were unfollowed
"""
return User(self._client.destroy_friendship(user_id=user_id)._json)
###################################################################################
# Lists
###################################################################################
@api_error_handle
def create_list(self, name, mode='public', description=None):
"""
Create a list
:param name: Name of the new list
:param mode: :code:`'public'` (default) or :code:`'private'`
:param description: Description of the new list
:return: The new list object
:rtype: :class:`~responsebot.models.List`
"""
return List(tweepy_list_to_json(self._client.create_list(name=name, mode=mode, description=description)))
@api_error_handle
def destroy_list(self, list_id):
"""
Destroy a list
:param list_id: list ID number
:return: The destroyed list object
:rtype: :class:`~responsebot.models.List`
"""
return List(tweepy_list_to_json(self._client.destroy_list(list_id=list_id)))
@api_error_handle
def update_list(self, list_id, name=None, mode=None, description=None):
"""
Update a list
:param list_id: list ID number
:param name: New name for the list
:param mode: :code:`'public'` (default) or :code:`'private'`
:param description: New description of the list
:return: The updated list object
:rtype: :class:`~responsebot.models.List`
"""
return List(tweepy_list_to_json(
self._client.update_list(list_id=list_id, name=name, mode=mode, description=description))
)
@api_error_handle
def lists(self):
"""
List user's lists
:return: list of :class:`~responsebot.models.List` objects
"""
return [List(tweepy_list_to_json(list)) for list in self._client.lists_all()]
@api_error_handle
def lists_memberships(self):
"""
List lists which user was added
:return: list of :class:`~responsebot.models.List` objects
"""
return [List(tweepy_list_to_json(list)) for list in self._client.lists_memberships()]
@api_error_handle
def lists_subscriptions(self):
"""
List lists which user followed
:return: list of :class:`~responsebot.models.List` objects
"""
return [List(tweepy_list_to_json(list)) for list in self._client.lists_subscriptions()]
@api_error_handle
def list_timeline(self, list_id, since_id=None, max_id=None, count=20):
"""
List the tweets of specified list.
:param list_id: list ID number
:param since_id: results will have ID greater than specified ID (more recent than)
:param max_id: results will have ID less than specified ID (older than)
:param count: number of results per page
:return: list of :class:`~responsebot.models.Tweet` objects
"""
statuses = self._client.list_timeline(list_id=list_id, since_id=since_id, max_id=max_id, count=count)
return [Tweet(tweet._json) for tweet in statuses]
@api_error_handle
def get_list(self, list_id):
"""
Get info of specified list
:param list_id: list ID number
:return: :class:`~responsebot.models.List` object
"""
return List(tweepy_list_to_json(self._client.get_list(list_id=list_id)))
@api_error_handle
def add_list_member(self, list_id, user_id):
"""
Add a user to list
:param list_id: list ID number
:param user_id: user ID number
:return: :class:`~responsebot.models.List` object
"""
return List(tweepy_list_to_json(self._client.add_list_member(list_id=list_id, user_id=user_id)))
@api_error_handle
def remove_list_member(self, list_id, user_id):
"""
Remove a user from a list
:param list_id: list ID number
:param user_id: user ID number
:return: :class:`~responsebot.models.List` object
"""
return List(tweepy_list_to_json(self._client.remove_list_member(list_id=list_id, user_id=user_id)))
@api_error_handle
def list_members(self, list_id):
"""
List users in a list
:param list_id: list ID number
:return: list of :class:`~responsebot.models.User` objects
"""
return [User(user._json) for user in self._client.list_members(list_id=list_id)]
@api_error_handle
def is_list_member(self, list_id, user_id):
"""
Check if a user is member of a list
:param list_id: list ID number
:param user_id: user ID number
:return: :code:`True` if user is member of list, :code:`False` otherwise
"""
try:
return bool(self._client.show_list_member(list_id=list_id, user_id=user_id))
except TweepError as e:
if e.api_code == TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER:
return False
raise
@api_error_handle
def subscribe_list(self, list_id):
"""
Subscribe to a list
:param list_id: list ID number
:return: :class:`~responsebot.models.List` object
"""
return List(tweepy_list_to_json(self._client.subscribe_list(list_id=list_id)))
@api_error_handle
def unsubscribe_list(self, list_id):
"""
Unsubscribe to a list
:param list_id: list ID number
:return: :class:`~responsebot.models.List` object
"""
return List(tweepy_list_to_json(self._client.unsubscribe_list(list_id=list_id)))
@api_error_handle
def list_subscribers(self, list_id):
"""
List subscribers of a list
:param list_id: list ID number
:return: :class:`~responsebot.models.User` object
"""
return [User(user._json) for user in self._client.list_subscribers(list_id=list_id)]
@api_error_handle
def is_subscribed_list(self, list_id, user_id):
"""
Check if user is a subscribed of specified list
:param list_id: list ID number
:param user_id: user ID number
:return: :code:`True` if user is subscribed of list, :code:`False` otherwise
"""
try:
return bool(self._client.show_list_subscriber(list_id=list_id, user_id=user_id))
except TweepError as e:
if e.api_code == TWITTER_USER_IS_NOT_LIST_MEMBER_SUBSCRIBER:
return False
raise
| apache-2.0 | -2,195,622,971,623,110,000 | 33.58 | 119 | 0.592002 | false |
praekeltfoundation/certbot | marathon_acme/server.py | 1 | 3528 | import json
from klein import Klein
from twisted.internet.endpoints import serverFromString
from twisted.logger import Logger
from twisted.web.http import NOT_IMPLEMENTED, OK, SERVICE_UNAVAILABLE
from twisted.web.server import Site
def write_request_json(request, json_obj):
request.setHeader('Content-Type', 'application/json')
request.write(json.dumps(json_obj).encode('utf-8'))
class MarathonAcmeServer(object):
app = Klein()
log = Logger()
def __init__(self, responder_resource):
"""
:param responder_resource:
An ``IResponse`` used to respond to ACME HTTP challenge validation
requests.
"""
self.responder_resource = responder_resource
self.health_handler = None
def listen(self, reactor, endpoint_description):
"""
Run the server, i.e. start listening for requests on the given host and
port.
:param reactor: The ``IReactorTCP`` to use.
:param endpoint_description:
The Twisted description for the endpoint to listen on.
:return:
A deferred that returns an object that provides ``IListeningPort``.
"""
endpoint = serverFromString(reactor, endpoint_description)
return endpoint.listen(Site(self.app.resource()))
@app.route('/.well-known/acme-challenge/', branch=True, methods=['GET'])
def acme_challenge(self, request):
"""
Respond to ACME challenge validation requests on
``/.well-known/acme-challenge/`` using the ACME responder resource.
"""
return self.responder_resource
@app.route('/.well-known/acme-challenge/ping', methods=['GET'])
def acme_challenge_ping(self, request):
"""
Respond to requests on ``/.well-known/acme-challenge/ping`` to debug
path routing issues.
"""
request.setResponseCode(OK)
write_request_json(request, {'message': 'pong'})
def set_health_handler(self, health_handler):
"""
Set the handler for the health endpoint.
:param health_handler:
The handler for health status requests. This must be a callable
that returns a Health object.
"""
self.health_handler = health_handler
@app.route('/health', methods=['GET'])
def health(self, request):
""" Listens to incoming health checks from Marathon on ``/health``. """
if self.health_handler is None:
return self._no_health_handler(request)
health = self.health_handler()
response_code = OK if health.healthy else SERVICE_UNAVAILABLE
request.setResponseCode(response_code)
write_request_json(request, health.json_message)
def _no_health_handler(self, request):
self.log.warn('Request to /health made but no handler is set')
request.setResponseCode(NOT_IMPLEMENTED)
write_request_json(request, {
'error': 'Cannot determine service health: no handler set'
})
class Health(object):
def __init__(self, healthy, json_message={}):
"""
Health objects store the current health status of the service.
:param bool healthy:
The service is either healthy (True) or unhealthy (False).
:param json_message:
An object that can be serialized as JSON that will be sent as a
message when the health status is requested.
"""
self.healthy = healthy
self.json_message = json_message
| mit | 7,013,790,470,706,764,000 | 33.588235 | 79 | 0.637755 | false |
Udzu/pudzu | dataviz/markovtext.py | 1 | 6426 | import pickle
import seaborn as sns
import string
from pudzu.sandbox.markov import *
from pudzu.sandbox.bamboo import *
from pudzu.charts import *
from math import log
CORPUS = "wikienglish"
TITLE = "Letter and next-letter frequencies in English"
SUBTITLE = "measured across 1 million sentences from Wikipedia"
ENCODING = "utf-8"
LETTERS = string.ascii_lowercase + ' '
# Markov generators
def load_generator(n):
try:
logger.info("Loading ../corpora/{}_{}.p".format(CORPUS, n))
with open("../corpora/{}_{}.p".format(CORPUS, n), "rb") as f:
return pickle.load(f)
except:
logger.info("Training {} {}-grams".format(CORPUS, n))
markov = MarkovGenerator(n)
for f in tqdm.tqdm(CORPUS.split("-")):
markov.train_file("../corpora/"+f, encoding=ENCODING, normalise=partial(latin_normalise, letters=LETTERS))
logger.info("Saving to ../corpora/{}_{}.p".format(CORPUS, n))
with open("../corpora/{}_{}.p".format(CORPUS, n), "wb") as f:
pickle.dump(markov, f, pickle.HIGHEST_PROTOCOL)
return markov
g1 = load_generator(1)
g2 = load_generator(2)
# Grid chart
SCALE = 2
BOX_SIZE = 40 * SCALE
BIG_KEY = round(BOX_SIZE*1.5)
SMALL_KEY = round(BIG_KEY/2)
FONT_SIZE = round(18 * SCALE)
MAX_WIDTH = round(200 * SCALE)
MAX_WIDTH2 = round(280 * SCALE)
logger.info("Generating grid chart")
index = sorted([(x, g1.prob_dict[(x,)] / sum(g1.prob_dict.values())) for x in LETTERS if (x,) in g1.prob_dict], key=lambda p: p[1], reverse=True)
array = [[(y,n / sum(g1.markov_dict[(x,)].values())) for y,n in g1.markov_dict[(x,)].most_common()] for x,_ in index]
data = pd.DataFrame(array, index=index)
pone = tmap(RGBA, sns.color_palette("Reds", 8))
ptwo = tmap(RGBA, sns.color_palette("Blues", 8))
color_index = lambda p: 0 if p == 0 else clip(6 + int(log(p, 10) * 2), 0, 6)
def image_fn(pair, palette, row=None, size=BOX_SIZE):
if pair is None: return None
bg = palette[color_index(pair[1])]
img = Image.new("RGBA", (size,size), bg)
img.place(Image.from_text(pair[0], arial(size//2), "black", bg=bg), copy=False)
if row is not None and pair[0] != " ":
if not isinstance(row, str):
twogram = g2.markov_dict[(index[row][0], pair[0])].most_common()
row, _ = twogram[0][0], twogram[0][1] / sum(n for _,n in twogram)
img.place(Image.from_text(row, arial(round(size/3.5)), "black", bg=bg), align=(1,0), padding=(size//8,size//5), copy=False)
return img
grid = grid_chart(data, lambda p, r: image_fn(p, row=r, palette=ptwo), fg="black", bg="white", padding=round(SCALE), row_label=lambda i: image_fn(data.index[i], palette=pone))
# Main legend
type_boxes = Image.from_array([
[image_fn(('a', 0.01), pone, size=BIG_KEY),
Image.from_text("Letters and spaces sorted by overall frequency. Ignores case and accents.", arial(FONT_SIZE), padding=(BOX_SIZE//4,0), max_width=MAX_WIDTH)],
[image_fn(('n', 0.01), ptwo, row='d', size=BIG_KEY),
Image.from_text("Next letter sorted by frequency. Small letter is the most common third letter following the pair.", arial(FONT_SIZE), padding=(BOX_SIZE//4,0), max_width=MAX_WIDTH)]
], bg="white", xalign=0, padding=(0,BOX_SIZE//20))
type_leg = Image.from_column([Image.from_text("Colour key", arial(FONT_SIZE, bold=True)), type_boxes, Image.from_text("Blank letters indicate spaces.", arial(FONT_SIZE))], bg="white", xalign=0, padding=(0,BOX_SIZE//20))
color_from_index = lambda i: 10 ** ((i - 6) / 2)
color_label = lambda i: "{:.1%} to {:.1%}".format(color_from_index(i-1), color_from_index(i))
freq_boxes = Image.from_array([
[Image.new("RGBA", (SMALL_KEY,SMALL_KEY), "white" if i == 6 else pone[i]),
Image.new("RGBA", (SMALL_KEY,SMALL_KEY), ptwo[i]),
Image.from_text(color_label(i), arial(FONT_SIZE), padding=(BOX_SIZE//4,0))]
for i in reversed(range(0, 7))], bg="white", xalign=0)
freq_leg = Image.from_column([Image.from_text("Letter frequencies", arial(FONT_SIZE, bold=True)), freq_boxes], bg="white", xalign=0, padding=(0,BOX_SIZE//8))
legend_inner = Image.from_column([type_leg, freq_leg], bg="white", xalign=0, padding=BOX_SIZE//8)
legend = legend_inner.pad(SCALE, "black").pad((BOX_SIZE//2,0,BOX_SIZE//4,0), "white")
# Generated words
if CORPUS == "wikienglish":
words = ["bastrabot", "dithely", "foriticent", "gamplato", "calpereek", "amorth", "forliatitive", "asocult", "wasions", "quarm", "felogy", "winferlifterand", "loubing", "uniso", "fourn", "hise", "meembege", "whigand", "prouning", "guncelawits", "nown", "rectere", "abrip", "doesium"]
elif CORPUS == "wikifrench":
words = ["cillesil", "sulskini", "lidhemin", "plumeme", "bachogine", "crout", "taphie", "provicas", "copit", "odzzaccet", "extreiles", "pipiphien", "chetratagne", "outif", "suro", "extellages", "nans", "nutopune", "entote", "sporese", "zhiquis", "edes", "aliet", "randamelle"]
else:
words = [g2.render_word() for i in range(24)]
word_array = Image.from_array([
[Image.from_text(words[2*i], arial(FONT_SIZE, italics=True), fg="black", bg="white"),
Image.from_text(words[2*i+1], arial(FONT_SIZE, italics=True), fg="black", bg="white")] for i in range(len(words)//2)], bg="white", padding=(15,2)).pad(BOX_SIZE//8,"white")
word_title = Image.from_column([Image.from_text("Markov generators", arial(FONT_SIZE, bold=True)),
Image.from_text("The letters distributions in the chart can be used to generate pseudowords such as the ones below. A similar approach, at the word level, is used for online parody generators.", arial(FONT_SIZE),max_width=MAX_WIDTH2)], bg="white", xalign=0, padding=(0,BOX_SIZE//8))
word_box = Image.from_column([word_title, word_array], bg="white", padding=BOX_SIZE//8)
word_box = word_box.pad_to_aspect(legend_inner.width, word_box.height, align=0, bg="white").pad(SCALE, "white").pad((BOX_SIZE//2,0,BOX_SIZE//4,0), "white")
# Chart
chart = Image.from_row([grid, legend], bg="white", yalign=0)
chart = chart.place(word_box, align=1, padding=(0,BOX_SIZE))
title = Image.from_column([Image.from_text(TITLE, arial(BOX_SIZE, bold=True), bg="white"),
Image.from_text(SUBTITLE, arial(round(24 * SCALE), bold=True), bg="white")])
full = Image.from_column([title, chart], bg="white", padding=(0,BOX_SIZE//4))
full.save("output/markovtext_{}.png".format(CORPUS))
| mit | 4,370,538,101,888,572,400 | 53.396552 | 287 | 0.644569 | false |
brython-dev/brython | www/src/Lib/test/test_eof.py | 2 | 2490 | """test script for a few new invalid token catches"""
import sys
from test import support
from test.support import script_helper
import unittest
class EOFTestCase(unittest.TestCase):
def test_EOFC(self):
expect = "EOL while scanning string literal (<string>, line 1)"
try:
eval("""'this is a test\
""")
except SyntaxError as msg:
self.assertEqual(str(msg), expect)
else:
raise support.TestFailed
def test_EOFS(self):
expect = ("EOF while scanning triple-quoted string literal "
"(<string>, line 1)")
try:
eval("""'''this is a test""")
except SyntaxError as msg:
self.assertEqual(str(msg), expect)
else:
raise support.TestFailed
def test_eof_with_line_continuation(self):
expect = "unexpected EOF while parsing (<string>, line 1)"
try:
compile('"\\xhh" \\', '<string>', 'exec', dont_inherit=True)
except SyntaxError as msg:
self.assertEqual(str(msg), expect)
else:
raise support.TestFailed
def test_line_continuation_EOF(self):
"""A continuation at the end of input must be an error; bpo2180."""
expect = 'unexpected EOF while parsing (<string>, line 1)'
with self.assertRaises(SyntaxError) as excinfo:
exec('x = 5\\')
self.assertEqual(str(excinfo.exception), expect)
with self.assertRaises(SyntaxError) as excinfo:
exec('\\')
self.assertEqual(str(excinfo.exception), expect)
@unittest.skipIf(not sys.executable, "sys.executable required")
def test_line_continuation_EOF_from_file_bpo2180(self):
"""Ensure tok_nextc() does not add too many ending newlines."""
with support.temp_dir() as temp_dir:
file_name = script_helper.make_script(temp_dir, 'foo', '\\')
rc, out, err = script_helper.assert_python_failure(file_name)
self.assertIn(b'unexpected EOF while parsing', err)
self.assertIn(b'line 2', err)
self.assertIn(b'\\', err)
file_name = script_helper.make_script(temp_dir, 'foo', 'y = 6\\')
rc, out, err = script_helper.assert_python_failure(file_name)
self.assertIn(b'unexpected EOF while parsing', err)
self.assertIn(b'line 2', err)
self.assertIn(b'y = 6\\', err)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 7,089,911,229,699,417,000 | 37.307692 | 77 | 0.590763 | false |
EmanueleCannizzaro/scons | test/Split.py | 1 | 2076 | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Split.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
env = Environment(BBB = 'bbb', CCC = 'ccc')
print Split('aaa')
print Split('aaa $BBB')
print env.Split('bbb $CCC')
print env.Split('$BBB ccc')
print Split(['ddd', 'eee'])
SConscript('SConscript')
""")
test.write('SConscript', """
env = Environment(FFF='fff', JJJ='jjj')
print env.Split('${FFF}.f')
print Split('ggg hhh')
print env.Split(['iii', '$JJJ'])
""")
expect = """\
['aaa']
['aaa', '$BBB']
['bbb', 'ccc']
['bbb', 'ccc']
['ddd', 'eee']
['fff.f']
['ggg', 'hhh']
['iii', 'jjj']
"""
test.run(arguments = ".",
stdout = test.wrap_stdout(read_str = expect,
build_str = "scons: `.' is up to date.\n"))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | 3,093,363,460,903,866,000 | 29.086957 | 87 | 0.69027 | false |
adampresley/bottlepy-bootstrap | model/DateHelper.py | 1 | 2135 | from model.Service import Service
from datetime import tzinfo, timedelta, datetime
from dateutil import tz
class DateHelper(Service):
utc = tz.gettz("UTC")
pyToJsFormatMapping = {
"%m/%d/%Y": "MM/dd/yyyy",
"%d/%m/%Y": "dd/MM/yyyy",
"%Y-%m-%d": "yyyy-MM-dd"
}
def __init__(self, db, timezone = "UTC", dateFormat = "%m/%d/%Y", timeFormat = "%I:%M %p"):
self.db = db
self._timezone = timezone
self._dateFormat = dateFormat
self._timeFormat = timeFormat
def addDays(self, d, numDays = 1, format = "%Y-%m-%d"):
if not self.isDateType(d):
d = datetime.strptime(d, format)
newDate = d + timedelta(days = numDays)
return newDate
def dateFormat(self, d):
return self.utcToTimezone(d, self._timezone).strftime(self._dateFormat)
def dateTimeFormat(self, d):
return self.utcToTimezone(d, self._timezone).strftime("%s %s" % (self._dateFormat, self._timeFormat))
def isDateType(self, d):
result = True
try:
d.today()
except AttributeError as e:
result = False
return result
def localNow(self):
return self.utcToTimezone(datetime.now(self.utc), self._timezone)
def now(self):
return datetime.now(self.utc)
def pyToJsDateFormat(self, pyDateFormat):
return self.pyToJsFormatMapping[pyDateFormat]
def restDateFormat(self, d):
return d.strftime("%Y-%m-%d")
def restDateTime(self, d):
return d.strftime("%Y-%m-%d %H:%M")
def timeFormat(self, d):
return self.utcToTimezone(d, self._timezone).strftime(self._timeFormat)
def utcToTimezone(self, d, timezone):
targetTZ = tz.gettz(timezone)
d = d.replace(tzinfo = self.utc)
return d.astimezone(targetTZ)
def validateDateRange(self, start, end, format = "%Y-%m-%d"):
#
# Basically if the range between start and end is greater than 91
# days kick it back with today's date as default.
#
parsedStart = datetime.strptime(start, format)
parsedEnd = datetime.strptime(end, format)
delta = parsedEnd - parsedStart
newStart = start
newEnd = end
if delta.days > 91:
newStart = self.restDateFormat(self.localNow())
newEnd = self.restDateFormat(self.localNow())
return (newStart, newEnd)
| mit | 6,094,790,128,665,935,000 | 24.129412 | 103 | 0.685714 | false |
alexey4petrov/pythonFlu | Foam/Ref/OpenFOAM_interfaces.py | 1 | 13008 | ## pythonFlu - Python wrapping for OpenFOAM C++ API
## Copyright (C) 2010- Alexey Petrov
## Copyright (C) 2009-2010 Pebble Bed Modular Reactor (Pty) Limited (PBMR)
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
## See http://sourceforge.net/projects/pythonflu
##
## Author : Alexey PETROV, Andrey SIMURZIN
##
#--------------------------------------------------------------------------------------
attr2interface={ 'argList' : 'Foam.src.OpenFOAM.global_.argList.argList',
'messageStream' : 'Foam.src.OpenFOAM.db.error.messageStream.messageStream',
'ext_SeriousError' : 'Foam.src.OpenFOAM.db.error.messageStream.ext_SeriousError',
'ext_Warning' : 'Foam.src.OpenFOAM.db.error.messageStream.ext_Warning',
'ext_Info' : 'Foam.src.OpenFOAM.db.error.messageStream.ext_Info',
'Ostream' : 'Foam.src.OpenFOAM.db.IOstreams.IOstreams.Ostream.Ostream',
'nl' : 'Foam.src.OpenFOAM.db.IOstreams.IOstreams.Ostream.nl',
'tab' : 'Foam.src.OpenFOAM.db.IOstreams.IOstreams.Ostream.tab',
'endl' : 'Foam.src.OpenFOAM.db.IOstreams.IOstreams.Ostream.endl',
'Time': 'Foam.src.OpenFOAM.db.Time.ref_Time.Time',
'functionObject': 'Foam.src.OpenFOAM.db.Time.Time.functionObject',
'autoPtr_functionObject': 'Foam.src.OpenFOAM.db.Time.Time.autoPtr_functionObject',
'functionObjectList': 'Foam.src.OpenFOAM.db.Time.Time.functionObjectList',
'TConstructorToTableCounter_functionObject' : 'Foam.src.OpenFOAM.db.Time.Time.TConstructorToTableCounter_functionObject',
'functionObjectConstructorToTableBase_0' : 'Foam.src.OpenFOAM.db.Time.Time.functionObjectConstructorToTableBase_0',
'IOobject' : 'Foam.src.OpenFOAM.db.IOobject.IOobject',
'IOobjectList' : 'Foam.src.OpenFOAM.db.IOobjectList.IOobjectList',
'fileName' : 'Foam.src.OpenFOAM.primitives.strings.fileName.fileName',
'IOdictionary' : 'Foam.src.OpenFOAM.db.IOdictionary.IOdictionary',
'autoPtr_IOdictionary' : 'Foam.src.OpenFOAM.fields.tmp.autoPtr_IOdictionary.autoPtr_IOdictionary',
'dictionary' : 'Foam.src.OpenFOAM.db.dictionary.dictionary.dictionary',
'word' : 'Foam.src.OpenFOAM.primitives.strings.word.word',
'keyType' : 'Foam.src.OpenFOAM.primitives.strings.keyType.keyType',
'Switch' : 'Foam.src.OpenFOAM.db.Switch.Switch',
'dimensionedScalar' : 'Foam.src.OpenFOAM.dimensionedTypes.dimensionedScalar.dimensionedScalar',
'dimensionedVector' : 'Foam.src.OpenFOAM.dimensionedTypes.dimensionedVector.dimensionedVector',
'dimensionedSymmTensor' : 'Foam.src.OpenFOAM.dimensionedTypes.dimensionedSymmTensor.dimensionedSymmTensor',
'dimensionedTensor' : 'Foam.src.OpenFOAM.dimensionedTypes.dimensionedTensor.dimensionedTensor',
'dimensionSet' : 'Foam.src.OpenFOAM.dimensionSet.dimensionSet',
'dimless' : ' Foam.src.OpenFOAM.dimensionSets.dimless',
'dimMass' : ' Foam.src.OpenFOAM.dimensionSets.dimMass',
'dimLength' : ' Foam.src.OpenFOAM.dimensionSets.dimLength',
'dimTime' : ' Foam.src.OpenFOAM.dimensionSets.dimTime',
'dimTemperature' : ' Foam.src.OpenFOAM.dimensionSets.dimTemperature',
'dimMoles' : ' Foam.src.OpenFOAM.dimensionSets.dimMoles',
'dimCurrent' : ' Foam.src.OpenFOAM.dimensionSets.dimCurrent',
'dimLuminousIntensity' : ' Foam.src.OpenFOAM.dimensionSets.dimLuminousIntensity',
'dimArea' : ' Foam.src.OpenFOAM.dimensionSets.dimArea',
'dimVolume' : ' Foam.src.OpenFOAM.dimensionSets.dimVolume',
'dimVol' : ' Foam.src.OpenFOAM.dimensionSets.dimVol',
'dimDensity' : ' Foam.src.OpenFOAM.dimensionSets.dimDensity',
'dimForce' : ' Foam.src.OpenFOAM.dimensionSets.dimForce',
'dimEnergy' : ' Foam.src.OpenFOAM.dimensionSets.dimEnergy',
'dimPower' : ' Foam.src.OpenFOAM.dimensionSets.dimPower',
'dimVelocity' : ' Foam.src.OpenFOAM.dimensionSets.dimVelocity',
'dimAcceleration' : ' Foam.src.OpenFOAM.dimensionSets.dimAcceleration',
'dimPressure' : ' Foam.src.OpenFOAM.dimensionSets.dimPressure',
'dimGasConstant' : ' Foam.src.OpenFOAM.dimensionSets.dimGasConstant',
'dimSpecificHeatCapacity' : ' Foam.src.OpenFOAM.dimensionSets.dimSpecificHeatCapacity',
'scalar' : float,
'string' : str,
'GREAT' : 'Foam.src.OpenFOAM.primitives.scalar.GREAT',
'SMALL' : 'Foam.src.OpenFOAM.primitives.scalar.SMALL',
'ROOTVSMALL' : 'Foam.src.OpenFOAM.primitives.scalar.ROOTVSMALL',
'readScalar' : 'Foam.src.OpenFOAM.primitives.scalar.readScalar',
'readLabel' : 'Foam.src.OpenFOAM.primitives.label.readLabel',
'readInt' : 'Foam.src.OpenFOAM.primitives.int_.readInt',
'vector' : 'Foam.src.OpenFOAM.primitives.vector.vector',
'symmTensor' : 'Foam.src.OpenFOAM.primitives.symmTensor.symmTensor',
'sphericalTensor' : 'Foam.src.OpenFOAM.primitives.sphericalTensor.sphericalTensor',
'tensor' : 'Foam.src.OpenFOAM.primitives.tensor.tensor',
'readBool' : 'Foam.src.OpenFOAM.primitives.bool.readBool',
'one' : 'Foam.src.OpenFOAM.primitives.one.one',
'complex' : 'Foam.src.OpenFOAM.primitives.complex.complex',
'complexVector' : 'Foam.src.OpenFOAM.primitives.complexVector.complexVector',
'Random' : 'Foam.src.OpenFOAM.primitives.Random.Random',
'IFstream' : 'Foam.src.OpenFOAM.db.IOstreams.Fstreams.IFstream.IFstream',
'OFstream' : 'Foam.src.OpenFOAM.db.IOstreams.Fstreams.OFstream.OFstream',
'Pstream' : 'Foam.src.OpenFOAM.db.IOstreams.Pstreams.Pstream.Pstream',
'solution' : 'Foam.src.OpenFOAM.matrices.solution.solution',
'solution_upgradeSolverDict' : 'Foam.src.OpenFOAM.matrices.solution.solution_upgradeSolverDict',
'ext_solution' : 'Foam.src.OpenFOAM.matrices.ext_solution.ext_solution',
'PtrList_entry' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_entry.PtrList_entry',
'PtrList_TypeHolder' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_GenericType.PtrList_TypeHolder',
'PtrList_TypeBase' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_GenericType.PtrList_TypeBase',
'autoPtr_PtrList_TypeHolder' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_GenericType.autoPtr_PtrList_TypeHolder',
'PtrList_INewBase' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_GenericINew.PtrList_INewBase',
'PtrList_INewHolder' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_GenericINew.PtrList_INewHolder',
'PtrList_Generic' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_Generic.PtrList_Generic',
'uniformDimensionedVectorField' : 'Foam.src.OpenFOAM.fields.UniformDimensionedFields.UniformDimensionedVectorField.uniformDimensionedVectorField',
'PtrList_uniformDimensionedVectorField' : 'Foam.src.OpenFOAM.containers.Lists.PtrList.PtrList_UniformDimensionedVectorField.PtrList_uniformDimensionedVectorField',
'HashPtrTable_IOobject_word_string_hash' : 'Foam.src.OpenFOAM.containers.HashTables.HashPtrTable.HashPtrTable_IOobject_word_string_hash.HashPtrTable_IOobject_word_string_hash',
'HashTable_int_word_string_hash' : 'Foam.src.OpenFOAM.containers.HashTables.HashTable.HashTable_int_word_string_hash.HashTable_int_word_string_hash',
'autoPtr_polyPatch' : 'Foam.src.OpenFOAM.fields.tmp.autoPtr_polyPatch.autoPtr_polyPatch',
'polyPatch' : 'Foam.src.OpenFOAM.fields.tmp.autoPtr_polyPatch.polyPatch',
'scalarField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.scalarField',
'vectorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.vectorField',
'sphericalTensorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.sphericalTensorField',
'symmTensorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.symmTensorField',
'tensorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.tensorField',
'tmp_scalarField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.tmp_scalarField',
'tmp_vectorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.tmp_vectorField',
'tmp_sphericalTensorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.tmp_sphericalTensorField',
'tmp_symmTensorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.tmp_symmTensorField',
'tmp_tensorField' : 'Foam.src.OpenFOAM.fields.Fields.primitiveFields.tmp_tensorField',
'IStringStream' : 'Foam.src.OpenFOAM.db.IOstreams.StringStreams.IStringStream.IStringStream',
'ITstream' : 'Foam.src.OpenFOAM.db.IOstreams.ITstream.ITstream',
'mapDistribute' : 'Foam.src.OpenFOAM.meshes.polyMesh.mapPolyMesh.mapDistribute.mapDistribute.mapDistribute',
'boolList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_bool.List_bool',
'labelList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_label.List_label',
'scalarList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_scalar.List_scalar',
'tensorList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_tensor.List_tensor',
'tokenList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_token.List_token',
'vectorList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_vector.List_vector',
'wordList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_word.List_word',
'complexList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_complex.List_complex',
'cellList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_cell.List_cell',
'faceList' : 'Foam.src.OpenFOAM.containers.Lists.List.List_face.List_face',
'polyPatchListPtr' : 'Foam.src.OpenFOAM.containers.Lists.List.List_polyPatchPtr.List_polyPatchPtr',
'wordHashTable' : 'Foam.src.OpenFOAM.containers.HashTables.HashTable.HashTable_word_word_string_hash.HashTable_word_word_string_hash',
'I' : 'Foam.src.OpenFOAM.primitives.sphericalTensor.I',
'mag' : abs,
'oneField' : 'Foam.src.OpenFOAM.fields.Fields.oneField.oneField',
'oneFieldField' : 'Foam.src.OpenFOAM.fields.FieldFields.oneFieldField.oneFieldField',
'geometricOneField' : 'Foam.src.OpenFOAM.fields.GeometricFields.geometricOneField.geometricOneField',
'complexField' : 'Foam.src.OpenFOAM.fields.Fields.complexFields.complexField',
'ReImSum' : 'Foam.src.OpenFOAM.fields.Fields.complexFields.ReImSum',
'complexVectorField' : 'Foam.src.OpenFOAM.fields.Fields.complexFields.complexVectorField',
'tmp_complexField' : 'Foam.src.OpenFOAM.fields.tmp.tmp_complexField.tmp_complexField',
'tmp_complexVectorField' : 'Foam.src.OpenFOAM.fields.tmp.tmp_complexVectorField.tmp_complexVectorField',
'coordinateSystem' : 'Foam.src.meshTools.coordinateSystems.coordinateSystem.coordinateSystem',
'coordinateSystems' : 'Foam.src.meshTools.coordinateSystems.coordinateSystems.coordinateSystems',
'setRootCase' : 'Foam.OpenFOAM.include.setRootCase',
'createTime' : 'Foam.OpenFOAM.include.createTime',
'createMesh' : 'Foam.OpenFOAM.include.createMesh',
'createMeshNoClear' : 'Foam.OpenFOAM.include.createMeshNoClear',
'token' : 'Foam.src.OpenFOAM.db.IOstreams.ref_token.token' }
| gpl-3.0 | -5,322,187,146,659,395,000 | 84.019608 | 193 | 0.656596 | false |
diogo149/CauseEffectPairsPaper | score.py | 1 | 2288 | #!/bin/env python
import sys
import os
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.linear_model import SGDRegressor
from boomlet.storage import joblib_load
from boomlet.transform.preprocessing import PercentileScaler
from autocause.challenge import target_score
CONFIGS_FOLDER = 'configs'
RESULTS_FILE = "results.txt"
CV_PERCENT = 0.2
y = None
def score(data, clf):
cv_size = int(y.shape[0] * CV_PERCENT)
train, valid = data[:-cv_size], data[-cv_size:]
# reads in nonlocal variable y
y_train, y_valid = y[:-cv_size], y[-cv_size:]
clf.fit(train, y_train)
pred = clf.predict(valid)
return target_score(y_valid, pred)
def gbm_score(data):
# using gbm settings optimized for speed
gbm = GradientBoostingRegressor(max_depth=2, max_features='sqrt', random_state=0)
return score(data, gbm)
def sgd_score(data):
# huber loss to prevent over/under-flow
sgd = SGDRegressor(loss='huber', random_state=0, n_iter=20, shuffle=True)
# squashing to prevent very high values from taking over
return score(PercentileScaler(squash=True).fit_transform(data), sgd)
def write_score(filename):
assert filename.endswith(".py.pkl")
assert filename.startswith(CONFIGS_FOLDER)
# initializing global y
global y
if y is None:
y = joblib_load("y.pkl")
data = joblib_load(filename)
assert data.shape[0] == y.shape[0]
gbm = gbm_score(data)
sgd = sgd_score(data)
results = "\t".join(map(str, [filename, data.shape[1], gbm, sgd]))
print(results)
with open(RESULTS_FILE, 'a') as outfile:
outfile.write(results)
outfile.write('\n')
def write_scores():
configs = os.listdir(CONFIGS_FOLDER)
pickles = filter(lambda x: x.endswith(".py.pkl"), configs)
pickle_paths = [os.path.join(CONFIGS_FOLDER, x) for x in pickles]
try:
with open(RESULTS_FILE) as infile:
lines = infile.readlines()
except IOError:
lines = []
old_paths = set([x.split('\t')[0] for x in lines])
new_paths = filter(lambda x: x not in old_paths, pickle_paths)
map(write_score, new_paths)
if __name__ == "__main__":
if len(sys.argv) > 1:
target = sys.argv[1]
write_score(target)
else:
write_scores()
| mit | 4,506,936,274,967,577,600 | 26.902439 | 85 | 0.659528 | false |
domeav/sonofages-agenda | agenda/events.py | 1 | 2899 | from agenda.model import Occurrence, Event, Venue, Tag, EventTag
from agenda.forms import EventForm
from flask import render_template, request, redirect, url_for
from flask_security import login_required
from datetime import datetime
from agenda import app
@app.route('/')
@app.route('/events/')
def events():
page = request.args.get('p', 1)
target_date = request.args.get('d', datetime.now())
occurrences = Occurrence.select()\
.where(Occurrence.start >= target_date)\
.paginate(page, 30)
return render_template('agenda.html', occurrences=occurrences)
@app.route('/event/<event_id>')
def event(event_id):
event = Event.get(Event.id == event_id)
return render_template('event.html', event=event)
@app.route('/event/edit/<event_id>')
@app.route('/event/edit/')
def edit_event(event_id=None):
event = None
if event_id:
event = Event.get(Event.id == event_id)
form = EventForm(obj=event)
form.set_venues(Venue.select())
return render_template('event_edit.html', form=form,
tags=Tag.select(),
eventtags={et.tag.id for et in event.eventtags})
@app.route('/event/save/', methods=['POST'])
def save_event():
form = EventForm()
form.set_venues(Venue.select())
if not form.validate_on_submit():
return render_template('event_edit.html', form=form)
if form.id.data:
event = Event.get(Event.id == form.id.data)
else:
event = Event()
event.title = form.title.data
event.contact = form.contact.data
event.description = form.description.data
if not event.creation:
event.creation = datetime.now()
event.set_image(form.pic.data, form.pic.data.filename)
event.save()
for entry in form.occurrences.entries:
if entry.data['id']:
occurrence = Occurrence.get(Occurrence.id == entry.data['id'])
else:
occurrence = Occurrence()
occurrence.start = entry.data['start']
occurrence.end = entry.data['end']
occurrence.event = event
if entry.data['venue_id'] != 0:
occurrence.venue_id = entry.data['venue_id']
else:
occurrence.venue_id = None
occurrence.save()
if entry.data['delete_gig']:
occurrence.delete_instance()
existing_tags = { et.tag_id: et for et in event.eventtags }
for key, value in request.form.items():
if key.startswith('tag-'):
tag_id = int(value)
if tag_id not in existing_tags:
et = EventTag(event=event, tag_id=tag_id)
et.save()
else:
del(existing_tags[tag_id])
for key, value in existing_tags.items():
value.delete_instance()
return redirect(url_for('event', event_id=event.id))
| mit | 1,869,846,341,948,956,400 | 34.353659 | 75 | 0.601932 | false |
endthestart/tinsparrow | tinsparrow/tinsparrow/views.py | 1 | 1746 | import os
from django.contrib import messages
from django.contrib.auth import logout as logout_user
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django import http
from django.shortcuts import get_object_or_404
from django.views.generic import TemplateView
from .models import Song
from .forms import LoginForm
def songfile(request, song_id):
song = get_object_or_404(Song, id=song_id)
song_data = open(os.path.join(song.path, song.filename)).read()
return http.HttpResponse(song_data, content_type=song.content_type)
def login(request, template_name='login.html'):
if request.user.is_authenticated():
return redirect('/')
if request.method == "POST":
form = LoginForm(request.POST)
if form.login(request):
messages.success(request, "You have successfully logged in.")
return redirect(request.POST.get('next', '/'))
else:
messages.error(request, "Your username and password do not match.")
else:
form = LoginForm()
return render_to_response(template_name, {'form': form, }, RequestContext(request))
def logout(request):
logout_user(request)
messages.success(request, "You have successfully logged out.")
return redirect('login')
class LibraryView(TemplateView):
template_name = "tinsparrow/library.html"
def get_context_data(self, **kwargs):
context = super(LibraryView, self).get_context_data(**kwargs)
return context
class LayoutView(TemplateView):
template_name = "tinsparrow/layout.html"
def get_context_data(self, **kwargs):
context = super(LayoutView, self).get_context_data(**kwargs)
return context
| mit | 6,564,142,620,204,948,000 | 30.178571 | 87 | 0.699885 | false |
aptivate/sarpaminfohub | django/sarpaminfohub/contactlist/custom_fields.py | 1 | 15793 | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext as _
from django.db import models
#adapted from http://www.djangosnippets.org/snippets/494/
#using UN country and 3 char code list from http://unstats.un.org/unsd/methods/m49/m49alpha.htm
#correct as of 17th October 2008
COUNTRIES = (
('AFG', _('Afghanistan')),
('ALA', _('Aland Islands')),
('ALB', _('Albania')),
('DZA', _('Algeria')),
('ASM', _('American Samoa')),
('AND', _('Andorra')),
('AGO', _('Angola')),
('AIA', _('Anguilla')),
('ATA', _('Antarctica')),
('ATG', _('Antigua and Barbuda')),
('ARG', _('Argentina')),
('ARM', _('Armenia')),
('ABW', _('Aruba')),
('AUS', _('Australia')),
('AUT', _('Austria')),
('AZE', _('Azerbaijan')),
('BHS', _('Bahamas')),
('BHR', _('Bahrain')),
('BGD', _('Bangladesh')),
('BRB', _('Barbados')),
('BLR', _('Belarus')),
('BEL', _('Belgium')),
('BLZ', _('Belize')),
('BEN', _('Benin')),
('BMU', _('Bermuda')),
('BTN', _('Bhutan')),
('BOL', _('Bolivia')),
('BIH', _('Bosnia and Herzegovina')),
('BWA', _('Botswana')),
('BRA', _('Brazil')),
('IOT', _('British Indian Ocean Territory')),
('VGB', _('British Virgin Islands')),
('BRN', _('Brunei Darussalam')),
('BGR', _('Bulgaria')),
('BFA', _('Burkina Faso')),
('BDI', _('Burundi')),
('KHM', _('Cambodia')),
('CMR', _('Cameroon')),
('CAN', _('Canada')),
('CPV', _('Cape Verde')),
('CYM', _('Cayman Islands')),
('CAF', _('Central African Republic')),
('TCD', _('Chad')),
('CIL', _('Channel Islands')),
('CHL', _('Chile')),
('CHN', _('China')),
('HKG', _('China - Hong Kong')),
('MAC', _('China - Macao')),
('CXR', _('Christmas Island')),
('CCK', _('Cocos (Keeling) Islands')),
('COL', _('Colombia')),
('COM', _('Comoros')),
('COG', _('Congo')),
('COK', _('Cook Islands')),
('CRI', _('Costa Rica')),
('CIV', _('Cote d\'Ivoire')),
('HRV', _('Croatia')),
('CUB', _('Cuba')),
('CYP', _('Cyprus')),
('CZE', _('Czech Republic')),
('PRK', _('Democratic People\'s Republic of Korea')),
('COD', _('Democratic Republic of the Congo')),
('DNK', _('Denmark')),
('DJI', _('Djibouti')),
('DMA', _('Dominica')),
('DOM', _('Dominican Republic')),
('ECU', _('Ecuador')),
('EGY', _('Egypt')),
('SLV', _('El Salvador')),
('GNQ', _('Equatorial Guinea')),
('ERI', _('Eritrea')),
('EST', _('Estonia')),
('ETH', _('Ethiopia')),
('FRO', _('Faeroe Islands')),
('FLK', _('Falkland Islands (Malvinas)')),
('FJI', _('Fiji')),
('FIN', _('Finland')),
('FRA', _('France')),
('GUF', _('French Guiana')),
('PYF', _('French Polynesia')),
('ATF', _('French Southern Territories')),
('GAB', _('Gabon')),
('GMB', _('Gambia')),
('GEO', _('Georgia')),
('DEU', _('Germany')),
('GHA', _('Ghana')),
('GIB', _('Gibraltar')),
('GRC', _('Greece')),
('GRL', _('Greenland')),
('GRD', _('Grenada')),
('GLP', _('Guadeloupe')),
('GUM', _('Guam')),
('GTM', _('Guatemala')),
('GGY', _('Guernsey')),
('GIN', _('Guinea')),
('GNB', _('Guinea-Bissau')),
('GUY', _('Guyana')),
('HTI', _('Haiti')),
('VAT', _('Holy See (Vatican City)')),
('HND', _('Honduras')),
('HUN', _('Hungary')),
('ISL', _('Iceland')),
('IND', _('India')),
('IDN', _('Indonesia')),
('IRN', _('Iran')),
('IRQ', _('Iraq')),
('IRL', _('Ireland')),
('IMN', _('Isle of Man')),
('ISR', _('Israel')),
('ITA', _('Italy')),
('JAM', _('Jamaica')),
('JPN', _('Japan')),
('JEY', _('Jersey')),
('JOR', _('Jordan')),
('KAZ', _('Kazakhstan')),
('KEN', _('Kenya')),
('KIR', _('Kiribati')),
('KWT', _('Kuwait')),
('KGZ', _('Kyrgyzstan')),
('LAO', _('Lao People\'s Democratic Republic')),
('LVA', _('Latvia')),
('LBN', _('Lebanon')),
('LSO', _('Lesotho')),
('LBR', _('Liberia')),
('LBY', _('Libyan Arab Jamahiriya')),
('LIE', _('Liechtenstein')),
('LTU', _('Lithuania')),
('LUX', _('Luxembourg')),
('MKD', _('Macedonia')),
('MDG', _('Madagascar')),
('MWI', _('Malawi')),
('MYS', _('Malaysia')),
('MDV', _('Maldives')),
('MLI', _('Mali')),
('MLT', _('Malta')),
('MHL', _('Marshall Islands')),
('MTQ', _('Martinique')),
('MRT', _('Mauritania')),
('MUS', _('Mauritius')),
('MYT', _('Mayotte')),
('MEX', _('Mexico')),
('FSM', _('Micronesia, Federated States of')),
('MCO', _('Monaco')),
('MNG', _('Mongolia')),
('MNE', _('Montenegro')),
('MSR', _('Montserrat')),
('MAR', _('Morocco')),
('MOZ', _('Mozambique')),
('MMR', _('Myanmar')),
('NAM', _('Namibia')),
('NRU', _('Nauru')),
('NPL', _('Nepal')),
('NLD', _('Netherlands')),
('ANT', _('Netherlands Antilles')),
('NCL', _('New Caledonia')),
('NZL', _('New Zealand')),
('NIC', _('Nicaragua')),
('NER', _('Niger')),
('NGA', _('Nigeria')),
('NIU', _('Niue')),
('NFK', _('Norfolk Island')),
('MNP', _('Northern Mariana Islands')),
('NOR', _('Norway')),
('PSE', _('Occupied Palestinian Territory')),
('OMN', _('Oman')),
('PAK', _('Pakistan')),
('PLW', _('Palau')),
('PAN', _('Panama')),
('PNG', _('Papua New Guinea')),
('PRY', _('Paraguay')),
('PER', _('Peru')),
('PHL', _('Philippines')),
('PCN', _('Pitcairn')),
('POL', _('Poland')),
('PRT', _('Portugal')),
('PRI', _('Puerto Rico')),
('QAT', _('Qatar')),
('KOR', _('Republic of Korea')),
('MDA', _('Republic of Moldova')),
('REU', _('Reunion')),
('ROU', _('Romania')),
('RUS', _('Russian Federation')),
('RWA', _('Rwanda')),
('BLM', _('Saint-Barthelemy')),
('SHN', _('Saint Helena')),
('KNA', _('Saint Kitts and Nevis')),
('LCA', _('Saint Lucia')),
('MAF', _('Saint-Martin (French part)')),
('SPM', _('Saint Pierre and Miquelon')),
('VCT', _('Saint Vincent and the Grenadines')),
('WSM', _('Samoa')),
('SMR', _('San Marino')),
('STP', _('Sao Tome and Principe')),
('SAU', _('Saudi Arabia')),
('SEN', _('Senegal')),
('SRB', _('Serbia')),
('SYC', _('Seychelles')),
('SLE', _('Sierra Leone')),
('SGP', _('Singapore')),
('SVK', _('Slovakia')),
('SVN', _('Slovenia')),
('SLB', _('Solomon Islands')),
('SOM', _('Somalia')),
('ZAF', _('South Africa')),
('ESP', _('Spain')),
('LKA', _('Sri Lanka')),
('SDN', _('Sudan')),
('SUR', _('Suriname')),
('SJM', _('Svalbard and Jan Mayen Islands')),
('SWZ', _('Swaziland')),
('SWE', _('Sweden')),
('CHE', _('Switzerland')),
('SYR', _('Syrian Arab Republic')),
('TWN', _('Taiwan')),
('TJK', _('Tajikistan')),
('THA', _('Thailand')),
('TLS', _('Timor-Leste')),
('TGO', _('Togo')),
('TKL', _('Tokelau')),
('TON', _('Tonga')),
('TTO', _('Trinidad and Tobago')),
('TUN', _('Tunisia')),
('TUR', _('Turkey')),
('TKM', _('Turkmenistan')),
('TCA', _('Turks and Caicos Islands')),
('TUV', _('Tuvalu')),
('UGA', _('Uganda')),
('UKR', _('Ukraine')),
('ARE', _('United Arab Emirates')),
('GBR', _('United Kingdom')),
('TZA', _('United Republic of Tanzania')),
('USA', _('United States of America')),
('VIR', _('United States Virgin Islands')),
('URY', _('Uruguay')),
('UZB', _('Uzbekistan')),
('VUT', _('Vanuatu')),
('VEN', _('Venezuela (Bolivarian Republic of)')),
('VNM', _('Viet Nam')),
('WLF', _('Wallis and Futuna Islands')),
('ESH', _('Western Sahara')),
('YEM', _('Yemen')),
('ZMB', _('Zambia')),
('ZWE', _('Zimbabwe')),
)
COUNTRY_DICT = {
u'Afghanistan': 'AFG',
u'Aland Islands': 'ALA',
u'Albania': 'ALB',
u'Algeria': 'DZA',
u'American Samoa': 'ASM',
u'Andorra': 'AND',
u'Angola': 'AGO',
u'Anguilla': 'AIA',
u'Antarctica' : 'ATA',
u'Antigua and Barbuda': 'ATG',
u'Argentina': 'ARG',
u'Armenia': 'ARM',
u'Aruba': 'ABW',
u'Australia': 'AUS',
u'Austria': 'AUT',
u'Azerbaijan': 'AZE',
u'Bahamas': 'BHS',
u'Bahrain': 'BHR',
u'Bangladesh': 'BGD',
u'Barbados': 'BRB',
u'Belarus': 'BLR',
u'Belgium': 'BEL',
u'Belize': 'BLZ',
u'Benin': 'BEN',
u'Bermuda': 'BMU',
u'Bhutan': 'BTN',
u'Bolivia': 'BOL',
u'Bosnia and Herzegovina': 'BIH',
u'Botswana': 'BWA',
u'Brazil': 'BRA',
u'British Indian Ocean Territory': 'IOT',
u'British Virgin Islands': 'VGB',
u'Brunei Darussalam': 'BRN',
u'Bulgaria': 'BGR',
u'Burkina Faso': 'BFA',
u'Burundi': 'BDI',
u'Cambodia': 'KHM',
u'Cameroon': 'CMR',
u'Canada': 'CAN',
u'Cape Verde': 'CPV',
u'Cayman Islands': 'CYM',
u'Central African Republic': 'CAF',
u'Chad': 'TCD',
u'Channel Islands': 'CIL',
u'Chile': 'CHL',
u'China': 'CHN',
u'China - Hong Kong': 'HKG',
u'China - Macao': 'MAC',
u'Christmas Island': 'CXR',
u'Cocos (Keeling) Islands': 'CCK',
u'Colombia': 'COL',
u'Comoros': 'COM',
u'Congo': 'COG',
u'Cook Islands': 'COK',
u'Costa Rica': 'CRI',
u"Cote d'Ivoire": 'CIV',
u"Cote D'Ivoire (Ivory Coast)" : 'CIV',
u'Croatia': 'HRV',
u'Cuba': 'CUB',
u'Cyprus': 'CYP',
u'Czech Republic': 'CZE',
u"Democratic People's Republic of Korea": 'PRK',
u'Democratic Republic of the Congo': 'COD',
u'Denmark': 'DNK',
u'Djibouti': 'DJI',
u'Dominica': 'DMA',
u'Dominican Republic': 'DOM',
u'East Timor': 'TLS',
u'Ecuador': 'ECU',
u'Egypt': 'EGY',
u'El Salvador': 'SLV',
u'Equatorial Guinea': 'GNQ',
u'Eritrea': 'ERI',
u'Estonia': 'EST',
u'Ethiopia': 'ETH',
u'Faeroe Islands': 'FRO',
u'Falkland Islands (Malvinas)': 'FLK',
u'Faroe Islands': 'FRO',
u'Federated States of Micronesia': 'FSM',
u'Fiji': 'FJI',
u'Finland': 'FIN',
u'France': 'FRA',
u'French Guiana': 'GUF',
u'French Polynesia': 'PYF',
u'French Southern Territories': 'ATF',
u'Gabon': 'GAB',
u'Gambia': 'GMB',
u'Georgia': 'GEO',
u'Germany': 'DEU',
u'Ghana': 'GHA',
u'Gibraltar': 'GIB',
u'Greece': 'GRC',
u'Greenland': 'GRL',
u'Grenada': 'GRD',
u'Guadeloupe': 'GLP',
u'Guam': 'GUM',
u'Guatemala': 'GTM',
u'Guernsey': 'GGY',
u'Guinea': 'GIN',
u'Guinea-Bissau': 'GNB',
u'Guyana': 'GUY',
u'Haiti': 'HTI',
u'Holy See (Vatican City)': 'VAT',
u'Honduras': 'HND',
u'Hong Kong': 'HKG',
u'Hungary': 'HUN',
u'Iceland': 'ISL',
u'India': 'IND',
u'Indonesia': 'IDN',
u'Iran': 'IRN',
u'Iraq': 'IRQ',
u'Ireland': 'IRL',
u'Isle of Man': 'IMN',
u'Israel': 'ISR',
u'Italy': 'ITA',
u'Jamaica': 'JAM',
u'Japan': 'JPN',
u'Jersey': 'JEY',
u'Jordan': 'JOR',
u'Kazakhstan': 'KAZ',
u'Kenya': 'KEN',
u'Kiribati': 'KIR',
u'Korea': 'KOR',
u"Korea (North)": 'PRK',
u'Kuwait': 'KWT',
u'Kyrgyzstan': 'KGZ',
u"Lao People's Democratic Republic": 'LAO',
u"Laos": 'LAO',
u'Latvia': 'LVA',
u'Lebanon': 'LBN',
u'Lesotho': 'LSO',
u'Liberia': 'LBR',
u'Libya': 'LBY',
u'Libyan Arab Jamahiriya': 'LBY',
u'Liechtenstein': 'LIE',
u'Lithuania': 'LTU',
u'Luxembourg': 'LUX',
u'Macao': 'MAC',
u'Macedonia': 'MKD',
u'Madagascar': 'MDG',
u'Malawi': 'MWI',
u'Malaysia': 'MYS',
u'Maldives': 'MDV',
u'Mali': 'MLI',
u'Malta': 'MLT',
u'Marshall Islands': 'MHL',
u'Martinique': 'MTQ',
u'Mauritania': 'MRT',
u'Mauritius': 'MUS',
u'Mayotte': 'MYT',
u'Mexico': 'MEX',
u'Micronesia, Federated States of': 'FSM',
u'Moldova': 'MDA',
u'Monaco': 'MCO',
u'Mongolia': 'MNG',
u'Montenegro': 'MNE',
u'Montserrat': 'MSR',
u'Morocco': 'MAR',
u'Mozambique': 'MOZ',
u'Myanmar': 'MMR',
u'Namibia': 'NAM',
u'Nauru': 'NRU',
u'Nepal': 'NPL',
u'Netherlands': 'NLD',
u'Netherlands Antilles': 'ANT',
u'New Caledonia': 'NCL',
u'New Zealand': 'NZL',
u'Nicaragua': 'NIC',
u'Niger': 'NER',
u'Nigeria': 'NGA',
u'Niue': 'NIU',
u'Norfolk Island': 'NFK',
u'Northern Mariana Islands': 'MNP',
u'Norway': 'NOR',
u'Occupied Palestinian Territory': 'PSE',
u'Oman': 'OMN',
u'Pakistan': 'PAK',
u'Palau': 'PLW',
u'Palestinian Territory': 'PSE',
u'Panama': 'PAN',
u'Papua New Guinea': 'PNG',
u'Paraguay': 'PRY',
u'Peru': 'PER',
u'Philippines': 'PHL',
u'Pitcairn': 'PCN',
u'Poland': 'POL',
u'Portugal': 'PRT',
u'Puerto Rico': 'PRI',
u'Qatar': 'QAT',
u'Republic of Korea': 'KOR',
u'Republic of Moldova': 'MDA',
u'Reunion': 'REU',
u'Romania': 'ROU',
u'Russian Federation': 'RUS',
u'Rwanda': 'RWA',
u'Saint Helena': 'SHN',
u'Saint Kitts and Nevis': 'KNA',
u'Saint Lucia': 'LCA',
u'Saint Pierre and Miquelon': 'SPM',
u'Saint Vincent and the Grenadines': 'VCT',
u'Saint-Barthelemy': 'BLM',
u'Saint-Martin (French part)': 'MAF',
u'Samoa': 'WSM',
u'San Marino': 'SMR',
u'Sao Tome and Principe': 'STP',
u'Saudi Arabia': 'SAU',
u'Senegal': 'SEN',
u'Serbia': 'SRB',
u'Seychelles': 'SYC',
u'Sierra Leone': 'SLE',
u'Singapore': 'SGP',
u'Slovak Republic': 'SVK',
u'Slovakia': 'SVK',
u'Slovenia': 'SVN',
u'Solomon Islands': 'SLB',
u'Somalia': 'SOM',
u'South Africa': 'ZAF',
u'Spain': 'ESP',
u'Sri Lanka': 'LKA',
u'Sudan': 'SDN',
u'Suriname': 'SUR',
u'Svalbard and Jan Mayen': 'SJM',
u'Svalbard and Jan Mayen Islands': 'SJM',
u'Swaziland': 'SWZ',
u'Sweden': 'SWE',
u'Switzerland': 'CHE',
u'Syria': 'SYR',
u'Syrian Arab Republic': 'SYR',
u'Tajikistan': 'TJK',
u'Tanzania' : 'TZA',
u'Taiwan' : 'TWN',
u'Thailand': 'THA',
u'Timor-Leste': 'TLS',
u'Togo': 'TGO',
u'Tokelau': 'TKL',
u'Tonga': 'TON',
u'Trinidad and Tobago': 'TTO',
u'Tunisia': 'TUN',
u'Turkey': 'TUR',
u'Turkmenistan': 'TKM',
u'Turks and Caicos Islands': 'TCA',
u'Tuvalu': 'TUV',
u'Uganda': 'UGA',
u'Ukraine': 'UKR',
u'United Arab Emirates': 'ARE',
u'United Kingdom': 'GBR',
u'United Republic of Tanzania': 'TZA',
u'United States Virgin Islands': 'VIR',
u'United States': 'USA',
u'United States of America': 'USA',
u'Uruguay': 'URY',
u'Uzbekistan': 'UZB',
u'Vanuatu': 'VUT',
u'Vatican City State (Holy See)': 'VAT',
u'Venezuela': 'VEN',
u'Venezuela (Bolivarian Republic of)': 'VEN',
u'Viet Nam': 'VNM',
u'Vietnam': 'VNM',
u'Virgin Islands (British)': 'VGB',
u'Virgin Islands (U.S.)': 'VIR',
u'Wallis and Futuna': 'WLF',
u'Wallis and Futuna Islands': 'WLF',
u'Western Sahara': 'ESH',
u'Yemen': 'YEM',
u'Zambia': 'ZMB',
u'Zimbabwe': 'ZWE'
}
class CountryField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_length', 3)
kwargs.setdefault('choices', COUNTRIES)
super(CountryField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "CharField" | gpl-3.0 | 834,791,712,647,451,800 | 27.68985 | 95 | 0.467866 | false |
pschmitt/home-assistant | homeassistant/components/hunterdouglas_powerview/__init__.py | 2 | 6607 | """The Hunter Douglas PowerView integration."""
import asyncio
from datetime import timedelta
import logging
from aiopvapi.helpers.aiorequest import AioRequest
from aiopvapi.helpers.constants import ATTR_ID
from aiopvapi.helpers.tools import base64_to_unicode
from aiopvapi.rooms import Rooms
from aiopvapi.scenes import Scenes
from aiopvapi.shades import Shades
from aiopvapi.userdata import UserData
import async_timeout
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
COORDINATOR,
DEVICE_FIRMWARE,
DEVICE_INFO,
DEVICE_MAC_ADDRESS,
DEVICE_MODEL,
DEVICE_NAME,
DEVICE_REVISION,
DEVICE_SERIAL_NUMBER,
DOMAIN,
FIRMWARE_BUILD,
FIRMWARE_IN_USERDATA,
FIRMWARE_SUB_REVISION,
HUB_EXCEPTIONS,
HUB_NAME,
LEGACY_DEVICE_BUILD,
LEGACY_DEVICE_MODEL,
LEGACY_DEVICE_REVISION,
LEGACY_DEVICE_SUB_REVISION,
MAC_ADDRESS_IN_USERDATA,
MAINPROCESSOR_IN_USERDATA_FIRMWARE,
MODEL_IN_MAINPROCESSOR,
PV_API,
PV_ROOM_DATA,
PV_SCENE_DATA,
PV_SHADE_DATA,
PV_SHADES,
REVISION_IN_MAINPROCESSOR,
ROOM_DATA,
SCENE_DATA,
SERIAL_NUMBER_IN_USERDATA,
SHADE_DATA,
USER_DATA,
)
PARALLEL_UPDATES = 1
DEVICE_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA
)
def _has_all_unique_hosts(value):
"""Validate that each hub configured has a unique host."""
hosts = [device[CONF_HOST] for device in value]
schema = vol.Schema(vol.Unique())
schema(hosts)
return value
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [DEVICE_SCHEMA], _has_all_unique_hosts)},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["cover", "scene", "sensor"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, hass_config: dict):
"""Set up the Hunter Douglas PowerView component."""
hass.data.setdefault(DOMAIN, {})
if DOMAIN not in hass_config:
return True
for conf in hass_config[DOMAIN]:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Hunter Douglas PowerView from a config entry."""
config = entry.data
hub_address = config.get(CONF_HOST)
websession = async_get_clientsession(hass)
pv_request = AioRequest(hub_address, loop=hass.loop, websession=websession)
try:
async with async_timeout.timeout(10):
device_info = await async_get_device_info(pv_request)
async with async_timeout.timeout(10):
rooms = Rooms(pv_request)
room_data = _async_map_data_by_id((await rooms.get_resources())[ROOM_DATA])
async with async_timeout.timeout(10):
scenes = Scenes(pv_request)
scene_data = _async_map_data_by_id(
(await scenes.get_resources())[SCENE_DATA]
)
async with async_timeout.timeout(10):
shades = Shades(pv_request)
shade_data = _async_map_data_by_id(
(await shades.get_resources())[SHADE_DATA]
)
except HUB_EXCEPTIONS:
_LOGGER.error("Connection error to PowerView hub: %s", hub_address)
raise ConfigEntryNotReady
if not device_info:
_LOGGER.error("Unable to initialize PowerView hub: %s", hub_address)
raise ConfigEntryNotReady
async def async_update_data():
"""Fetch data from shade endpoint."""
async with async_timeout.timeout(10):
shade_entries = await shades.get_resources()
if not shade_entries:
raise UpdateFailed("Failed to fetch new shade data.")
return _async_map_data_by_id(shade_entries[SHADE_DATA])
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="powerview hub",
update_method=async_update_data,
update_interval=timedelta(seconds=60),
)
hass.data[DOMAIN][entry.entry_id] = {
PV_API: pv_request,
PV_ROOM_DATA: room_data,
PV_SCENE_DATA: scene_data,
PV_SHADES: shades,
PV_SHADE_DATA: shade_data,
COORDINATOR: coordinator,
DEVICE_INFO: device_info,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_get_device_info(pv_request):
"""Determine device info."""
userdata = UserData(pv_request)
resources = await userdata.get_resources()
userdata_data = resources[USER_DATA]
if FIRMWARE_IN_USERDATA in userdata_data:
main_processor_info = userdata_data[FIRMWARE_IN_USERDATA][
MAINPROCESSOR_IN_USERDATA_FIRMWARE
]
else:
# Legacy devices
main_processor_info = {
REVISION_IN_MAINPROCESSOR: LEGACY_DEVICE_REVISION,
FIRMWARE_SUB_REVISION: LEGACY_DEVICE_SUB_REVISION,
FIRMWARE_BUILD: LEGACY_DEVICE_BUILD,
MODEL_IN_MAINPROCESSOR: LEGACY_DEVICE_MODEL,
}
return {
DEVICE_NAME: base64_to_unicode(userdata_data[HUB_NAME]),
DEVICE_MAC_ADDRESS: userdata_data[MAC_ADDRESS_IN_USERDATA],
DEVICE_SERIAL_NUMBER: userdata_data[SERIAL_NUMBER_IN_USERDATA],
DEVICE_REVISION: main_processor_info[REVISION_IN_MAINPROCESSOR],
DEVICE_FIRMWARE: main_processor_info,
DEVICE_MODEL: main_processor_info[MODEL_IN_MAINPROCESSOR],
}
@callback
def _async_map_data_by_id(data):
"""Return a dict with the key being the id for a list of entries."""
return {entry[ATTR_ID]: entry for entry in data}
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
| apache-2.0 | -8,324,187,766,200,835,000 | 28.895928 | 88 | 0.661268 | false |
unioslo/cerebrum | Cerebrum/modules/no/uio/AutoStud/Select.py | 1 | 21951 | # -*- coding: utf-8 -*-
# Copyright 2019 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""How this stuff works:
SelectTool.select_map_defs defines a mapping of studconfig.xml
select-tags to the corresponding matching class.
While parsing the studconfig.xml file,
ProfileDefinition.add_selection_criteria is called for each
select-criteria that points to that profile (selection_criterias).
Once parsing is complete, SelectTool iterates over all
ProfileDefinitions and calls SelectMap*.set_select_map where
apropriate, making SelectMap*._select_map look like:
{ <studieprogram="TVIJF">: <profile> }
"""
from __future__ import unicode_literals
import pprint
from six import python_2_unicode_compatible
pp = pprint.PrettyPrinter(indent=4)
@python_2_unicode_compatible
class SelectMapSuper(object):
"""SelectMap* provides the rules to match a <select> line in
studconfig.xml with a line from merged_persons.xml.
"""
def __init__(self):
self._select_map = {}
def __str__(self):
return "c={}, select_map: {}".format(
self.__class__.__name__, pp.pformat(self._select_map))
def _append_match(self, lst, profiles, nivakode=0):
if isinstance(profiles, (list, tuple)):
for p in profiles:
lst.append((p, nivakode))
else:
lst.append((profiles, nivakode))
def _normalize_nivakode(self, niva):
niva = int(niva)
if niva < 100: # Forkurs ol.
niva = 50
elif niva >= 100 and niva < 500: # Laveregrad, Cand.Mag, Bachelor
niva = 100
elif niva >= 500 and niva < 900: # Høyeregrad, Profesjon, hovedfag, master
niva = 500
elif niva >= 900: # PHD
niva = 900
return niva
@python_2_unicode_compatible
class SelectMapTag(SelectMapSuper):
"""Map studconfig.xml:
<select><aktiv studieprogram="JFM5-RV"/></select>
To:
<person><aktiv studieprogramkode="JFM5-RV"
studieretningkode="LATAM"/></person>
"""
def __init__(self, config_attr, match_tag, match_attr):
super(SelectMapTag, self).__init__()
if not isinstance(config_attr, (list, tuple)):
config_attr = [config_attr]
if not isinstance(match_attr, (list, tuple)):
match_attr = [match_attr]
assert len(config_attr) == len(match_attr)
self._config_attr = config_attr
self._match_tag = match_tag
self._match_attr = match_attr
def _append_match(self, lst, profiles, pdta):
nivakode = 0
if self._match_attr[0] == 'studieprogramkode':
nivakode = self._normalize_nivakode(
self._pc.autostud.studieprogramkode2info.get(
pdta['studieprogramkode'], {}).get('studienivakode', 0))
super(SelectMapTag, self)._append_match(lst, profiles,
nivakode=nivakode)
def set_select_map(self, select_attrs, profile):
# Build mapping: _select_map[key] = profile where key is
# the attribute-name used in the merged_persons file and value
# is fetched from the corresponding attr from the
# studconfig.xml file. This way we won't need to know the
# config_attr any more. A resulting key may look like:
#
# (('studieprogramkode', 'MNM2-ANMAT'), ('studieretningskode', 'CSC'))
for s_attr in select_attrs:
key = tuple([(self._match_attr[n], s_attr[self._config_attr[n]])
for n in range(len(self._config_attr))
if s_attr.has_key(self._config_attr[n])])
self._select_map[key] = profile
def get_matches(self, person_info, member_groups=None, person_affs=None):
matches = []
# Iterate over all person-info of this _match_tag, and find
# corresponding entries from _select_map.
#
# To have a match, all entries in a _select_map key must match.
# If the person_info entry does not have an entry of the
# corresponding type, a '*' in _select_map is required for match.
for pdta in person_info.get(self._match_tag, []):
for select_attrs, profile in self._select_map.items():
n_matches = 0
for s_attr, s_val in select_attrs:
if s_val == '*' or pdta.get(s_attr, None) == s_val:
n_matches += 1
else:
# not match
break
if n_matches == len(select_attrs):
self._logger.debug("OK: %s -> %s", select_attrs, profile)
self._append_match(matches, profile, pdta)
return matches
def __str__(self):
return "config_attr: {}, match_tag: {}, match_attr: {}, {}".format(
self._config_attr, self._match_tag, self._match_attr,
super(SelectMapTag, self).__str__())
class SelectMapAktivtSted(SelectMapSuper):
def __init__(self):
super(SelectMapAktivtSted, self).__init__()
def _append_match(self, lst, profiles, fs_info):
nivakode = self._normalize_nivakode(fs_info.get('studienivakode', 0))
super(SelectMapAktivtSted, self)._append_match(lst, profiles,
nivakode=nivakode)
def _get_steder(self, institusjon, stedkode, scope):
ret = []
sko = self._pc.lookup_helper.get_stedkode(stedkode, institusjon)
if scope == 'sub':
ret.extend(self._pc.lookup_helper.get_all_child_sko(sko))
else:
ret.append(stedkode)
return ret
def set_select_map(self, select_attrs, profile):
"""build a mapping:
'150000:185:sub:100:499': {
'nivaa_max': '499',
'nivaa_min': '100',
'profiles': [ Profile object(MNF_Laveregrad)],
'steder': [ '150000', .... ]
}"""
for s_criteria in select_attrs:
tmp = ":".join((s_criteria['stedkode'],
s_criteria['institusjon'],
s_criteria['scope'],
s_criteria.get('nivaa_min', ''),
s_criteria.get('nivaa_max', '')))
tmp = self._select_map.setdefault(tmp, {})
tmp.setdefault('profiles', []).append(profile)
if not tmp.has_key('steder'):
tmp['steder'] = self._get_steder(
s_criteria['institusjon'],
s_criteria['stedkode'],
s_criteria['scope'])
tmp['nivaa_min'] = s_criteria.get('nivaa_min', None)
tmp['nivaa_max'] = s_criteria.get('nivaa_max', None)
def get_matches(self, person_info, member_groups=None, person_affs=None):
matches = []
for fs_infodict, match_tag, col_postfix in (
(self._pc.autostud.studieprogramkode2info,
'studieprogramkode', '_studieansv'),
(self._pc.autostud.emnekode2info,
'emnekode', '_reglement')):
# self._logger.debug("Check with %s" % match_tag)
for pdta in person_info.get('aktiv', []):
if not pdta.has_key(match_tag):
continue # emnekode not set for some aktiv tags.
try:
fs_info = fs_infodict[pdta[match_tag]]
except KeyError:
self._logger.error("Ukjent: %s in %s" % (
match_tag, pdta))
continue
sko = "%02i%02i%02i" % (int(fs_info['faknr%s' % col_postfix]),
int(fs_info[
'instituttnr%s' % col_postfix]),
int(fs_info[
'gruppenr%s' % col_postfix]))
# self._logger.debug("Is %s in %s?" % (sko, self._select_map.values()))
for select_attrs in self._select_map.values():
if not sko in select_attrs['steder']:
continue
if ((select_attrs['nivaa_min'] and
int(fs_info['studienivakode']) < int(
select_attrs['nivaa_min'])) or
(select_attrs['nivaa_max'] and
int(fs_info['studienivakode']) > int(
select_attrs['nivaa_max']))):
continue
self._append_match(matches, select_attrs['profiles'],
fs_info)
return matches
class SelectMapEmnestudSted(SelectMapAktivtSted):
def get_matches(self, person_info, member_groups=None, person_affs=None):
# print "SelectMapEmnestudSted.get_matches"
matches = []
for fs_infodict, match_tag, col_postfix in (
(self._pc.autostud.emnekode2info,
'emnekode', '_reglement'),):
# print "***%s" % match_tag
for pdta in person_info.get('emnestud', []):
# print "***emnestud"
if not pdta.has_key(match_tag):
continue # emnekode not set for some aktiv tags.
try:
fs_info = fs_infodict[pdta[match_tag]]
except KeyError:
self._logger.error("Ukjent: %s in %s" % (
match_tag, pdta))
continue
sko = "%02i%02i%02i" % (int(fs_info['faknr%s' % col_postfix]),
int(fs_info[
'instituttnr%s' % col_postfix]),
int(fs_info[
'gruppenr%s' % col_postfix]))
# self._logger.debug("Is %s in %s?" % (sko, self._select_map.values()))
for select_attrs in self._select_map.values():
if not sko in select_attrs['steder']:
continue
self._append_match(matches, select_attrs['profiles'],
fs_info)
return matches
class SelectMapTilbudSted(SelectMapAktivtSted):
def set_select_map(self, select_attrs, profile):
"""build a mapping:
'150000:185:sub:100:499': {
'profiles': [ Profile object(MNF_Laveregrad)],
'steder': [ '150000', .... ]
}"""
for s_criteria in select_attrs:
tmp = ":".join((s_criteria['stedkode'],
s_criteria['institusjon'],
s_criteria['scope']))
tmp = self._select_map.setdefault(tmp, {})
tmp.setdefault('profiles', []).append(profile)
if not tmp.has_key('steder'):
tmp['steder'] = self._get_steder(
s_criteria['institusjon'],
s_criteria['stedkode'],
s_criteria['scope'])
def get_matches(self, person_info, member_groups=None, person_affs=None):
matches = []
for fs_infodict, match_tag, col_postfix in (
(self._pc.autostud.studieprogramkode2info,
'studieprogramkode', '_studieansv'),):
for pdta in person_info.get('tilbud', []):
if not pdta.has_key(match_tag):
continue # emnekode not set for some aktiv tags.
try:
fs_info = fs_infodict[pdta[match_tag]]
except KeyError:
self._logger.error("Ukjent: %s in %s" % (
match_tag, pdta))
continue
sko = "%02i%02i%02i" % (int(fs_info['faknr%s' % col_postfix]),
int(fs_info[
'instituttnr%s' % col_postfix]),
int(fs_info[
'gruppenr%s' % col_postfix]))
# self._logger.debug("Is %s in %s?" % (sko, self._select_map.values()))
for select_attrs in self._select_map.values():
if not sko in select_attrs['steder']:
continue
self._append_match(matches, select_attrs['profiles'],
fs_info)
return matches
class SelectMapEvuSted(SelectMapAktivtSted):
def set_select_map(self, select_attrs, profile):
self._logger.debug("EVU Map: %s -> %s", select_attrs, profile)
super(SelectMapEvuSted, self).set_select_map(select_attrs, profile)
def get_matches(self, person_info, member_groups=None, person_affs=None):
matches = []
for entry in person_info.get('evu', []):
sko = "%02i%02i%02i" % (int(entry['faknr_adm_ansvar']),
int(entry['instituttnr_adm_ansvar']),
int(entry['gruppenr_adm_ansvar']))
for select_attrs in self._select_map.values():
if sko in select_attrs['steder']:
# TBD: finnes det noen nivåkode vi kan bruke?
super(SelectMapAktivtSted, self)._append_match(
matches, select_attrs['profiles'])
return matches
class SelectMapAny(SelectMapSuper):
def set_select_map(self, select_attrs, profile):
if len(select_attrs) > 0:
self._select_map.setdefault('ALL', []).append(profile)
def get_matches(self, person_info, member_groups=None, person_affs=None):
matches = []
for p in self._select_map.get('ALL', []):
self._append_match(matches, p)
return matches
class SelectMapGroupMember(SelectMapSuper):
def set_select_map(self, select_attrs, profile):
for s_attr in select_attrs:
group_id = self._pc.lookup_helper.get_group(s_attr['navn'])
self._select_map.setdefault(group_id, []).append(profile)
def get_matches(self, person_info, member_groups=None, person_affs=None):
matches = []
if not member_groups:
return matches
for g in member_groups:
if self._select_map.has_key(g):
self._append_match(matches, self._select_map[g])
return matches
class SelectMapPersonAffiliation(SelectMapSuper):
def set_select_map(self, select_attrs, profile):
self._logger.debug("Paff Map: %s -> %s" % (select_attrs, profile))
for s_attr in select_attrs:
affiliation = self._pc.autostud.co.PersonAffiliation(
s_attr['affiliation'])
if 'status' not in s_attr:
key = int(affiliation)
self._select_map.setdefault(key, []).append(profile)
else:
aff_status = self._pc.autostud.co.PersonAffStatus(
affiliation, s_attr['status'])
key = (int(affiliation), int(aff_status))
self._select_map.setdefault(key, []).append(profile)
def get_matches(self, person_info, member_groups=None, person_affs=None):
# person_affs are tuples from
# process_students/ExistingPerson.get_affiliations()
matches = []
if not person_affs:
return matches
# try keys `(aff, status)` and `aff`
for p_aff in ([(x[0], x[2]) for x in person_affs] +
[x[0] for x in person_affs]):
if self._select_map.has_key(p_aff):
self._append_match(matches, self._select_map[p_aff])
return matches
class SelectTool(object):
select_map_defs = {
"aktiv": SelectMapTag(['studieprogram', 'studieretning'],
'aktiv',
['studieprogramkode', 'studieretningkode']),
"evu": SelectMapTag(['etterutdkurskode'],
'evu',
['etterutdkurskode']),
"fagperson": SelectMapTag(['instituttnr', 'gruppenr', 'faknr'],
'fagperson',
['instituttnr', 'gruppenr', 'faknr']),
"tilbud": SelectMapTag(['studieprogram', 'studieretning'],
'tilbud',
['studieprogramkode', 'studieretningkode']),
"studierett": SelectMapTag(
['studieprogram', 'studieretning', 'status'],
'opptak',
['studieprogramkode', 'studieretningkode', 'status']),
"privatist_studieprogram": SelectMapTag(
['studieprogram', 'studieretning'],
'privatist_studieprogram',
['studieprogramkode', 'studieretningkode']),
"drgrad": SelectMapTag(['studieprogram'],
'drgrad',
['studieprogramkode']),
"emne": SelectMapTag('emnekode', 'eksamen', 'emnekode'),
"privatist_emne": SelectMapTag('emnekode', 'privatist_emne',
'emnekode'),
"aktivt_sted": SelectMapAktivtSted(),
"tilbud_sted": SelectMapTilbudSted(),
"emnestud_sted": SelectMapEmnestudSted(),
"evu_sted": SelectMapEvuSted(),
"medlem_av_gruppe": SelectMapGroupMember(),
"person_affiliation": SelectMapPersonAffiliation(),
"match_any": SelectMapAny()
}
def __init__(self, profiles, logger, profile_config):
"""Make all SelectMap* instances aware of the ProfileDefinition
instances that points to them"""
self._logger = logger
self._pc = profile_config
for smd in self.select_map_defs.values():
smd._logger = logger
smd._pc = profile_config
for p in profiles:
for select_name, select_attrs in p.selection_criterias.items():
self._logger.debug(
"S: %s -> %s" % (select_name, select_attrs))
sm_obj = self.select_map_defs[select_name]
sm_obj.set_select_map(select_attrs, p)
def _matches_sort(self, x, y):
"""Sort by nivaakode (highest first), then by profile"""
if (x[1] == y[1]):
return cmp(x[0], y[0])
return cmp(y[1], x[1])
def _unique_extend(self, tgt_list, values, profile_name, nivaakode=0):
"""Append all v in values to tgt_list iff they are not already
there. We also store the nivakode for the first time the
value was seen. We store the name of all profiles that has
this value at this nivåkode
"""
if not isinstance(values, (tuple, list)):
values = (values,)
for item in values:
if item not in [x[0] for x in tgt_list]:
tgt_list.append((item, nivaakode, [profile_name]))
else:
for tmp_item, tmp_nivaakode, tmp_profiles in tgt_list:
if (tmp_item == item and tmp_nivaakode == nivaakode and
profile_name not in tmp_profiles):
tmp_profiles.append(profile_name)
def get_person_match(self, person_info, member_groups=None,
person_affs=None):
"""Returns a dict where each key is a configuration item, such
as 'disk' for <disk>, and the value is a list of the relevant
profiles for the specified person. The profiles are sorted by
nivaakode.
"""
matches = []
for mtype, sm in self.select_map_defs.items():
tmp = sm.get_matches(person_info, member_groups=member_groups,
person_affs=person_affs)
self._logger.debug("check-type: %s -> %s", mtype, tmp)
matches.extend(tmp)
self._logger.debug("pre-priority filter: m= %s", matches)
if self._pc.using_priority:
# Only use matches at prioritylevel with lowest value
tmp = []
min_pri = None
for m in matches:
if min_pri is None or m[0].priority < min_pri:
min_pri = m[0].priority
for m in matches:
if m[0].priority == min_pri:
tmp.append(m)
self._logger.debug("Priority filter gave %i -> %i entries" % (
len(matches), len(tmp)))
matches = tmp
self._logger.debug("Matching settings: %s", matches)
# Sort matches on nivåkode, and remove duplicates
matches.sort(self._matches_sort)
matched_settings = {}
for match in matches:
profile, nivaakode = match
for k in profile._settings.keys():
for settings, actual_profile in profile.get_settings(k):
self._unique_extend(matched_settings.setdefault(k, []),
settings, actual_profile,
nivaakode=nivaakode)
return matches, matched_settings
| gpl-2.0 | 4,860,850,367,785,019,000 | 42.632207 | 87 | 0.528182 | false |
code-for-india/sahana_shelter_worldbank | private/templates/IFRC/config.py | 1 | 76404 | # -*- coding: utf-8 -*-
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from datetime import timedelta
from gluon import current
from gluon.storage import Storage
T = current.T
settings = current.deployment_settings
"""
Template settings for IFRC
"""
# =============================================================================
# System Settings
# -----------------------------------------------------------------------------
# Security Policy
settings.security.policy = 8 # Delegations
settings.security.map = True
# Authorization Settings
settings.auth.registration_requires_approval = True
settings.auth.registration_requires_verification = True
settings.auth.registration_requests_organisation = True
settings.auth.registration_organisation_required = True
settings.auth.registration_requests_site = True
settings.auth.registration_link_user_to = {"staff": T("Staff"),
"volunteer": T("Volunteer"),
"member": T("Member")
}
settings.auth.record_approval = True
# @ToDo: Should we fallback to organisation_id if site_id is None?
settings.auth.registration_roles = {"site_id": ["reader",
],
}
# Owner Entity
settings.auth.person_realm_human_resource_site_then_org = True
settings.auth.person_realm_member_org = True
def ifrc_realm_entity(table, row):
"""
Assign a Realm Entity to records
"""
tablename = table._tablename
# Do not apply realms for Master Data
# @ToDo: Restore Realms and add a role/functionality support for Master Data
if tablename in ("hrm_certificate",
"hrm_department",
"hrm_job_title",
"hrm_course",
"hrm_programme",
"member_membership_type",
"vol_award",
):
return None
db = current.db
s3db = current.s3db
# Entity reference fields
EID = "pe_id"
#OID = "organisation_id"
SID = "site_id"
#GID = "group_id"
PID = "person_id"
# Owner Entity Foreign Key
realm_entity_fks = dict(pr_contact = EID,
pr_contact_emergency = EID,
pr_physical_description = EID,
pr_address = EID,
pr_image = EID,
pr_identity = PID,
pr_education = PID,
pr_note = PID,
hrm_human_resource = SID,
inv_recv = SID,
inv_send = SID,
inv_track_item = "track_org_id",
inv_adj_item = "adj_id",
req_req_item = "req_id"
)
# Default Foreign Keys (ordered by priority)
default_fks = ("catalog_id",
"project_id",
"project_location_id"
)
# Link Tables
realm_entity_link_table = dict(
project_task = Storage(tablename = "project_task_project",
link_key = "task_id"
)
)
if tablename in realm_entity_link_table:
# Replace row with the record from the link table
link_table = realm_entity_link_table[tablename]
table = s3db[link_table.tablename]
rows = db(table[link_table.link_key] == row.id).select(table.id,
limitby=(0, 1))
if rows:
# Update not Create
row = rows.first()
# Check if there is a FK to inherit the realm_entity
realm_entity = 0
fk = realm_entity_fks.get(tablename, None)
fks = [fk]
fks.extend(default_fks)
for default_fk in fks:
if default_fk in table.fields:
fk = default_fk
# Inherit realm_entity from parent record
if fk == EID:
ftable = s3db.pr_person
query = (ftable[EID] == row[EID])
else:
ftablename = table[fk].type[10:] # reference tablename
ftable = s3db[ftablename]
query = (table.id == row.id) & \
(table[fk] == ftable.id)
record = db(query).select(ftable.realm_entity,
limitby=(0, 1)).first()
if record:
realm_entity = record.realm_entity
break
#else:
# Continue to loop through the rest of the default_fks
# Fall back to default get_realm_entity function
use_user_organisation = False
# Suppliers & Partners are owned by the user's organisation
if realm_entity == 0 and tablename == "org_organisation":
ott = s3db.org_organisation_type
query = (table.id == row.id) & \
(table.organisation_type_id == ott.id)
row = db(query).select(ott.name,
limitby=(0, 1)
).first()
if row and row.name != "Red Cross / Red Crescent":
use_user_organisation = True
# Groups are owned by the user's organisation
#elif tablename in ("pr_group",):
elif tablename == "pr_group":
use_user_organisation = True
user = current.auth.user
if use_user_organisation and user:
# @ToDo - this might cause issues if the user's org is different from the realm that gave them permissions to create the Org
realm_entity = s3db.pr_get_pe_id("org_organisation",
user.organisation_id)
return realm_entity
settings.auth.realm_entity = ifrc_realm_entity
# -----------------------------------------------------------------------------
# Pre-Populate
settings.base.prepopulate = ("IFRC", "IFRC_Train")
settings.base.system_name = T("Resource Management System")
settings.base.system_name_short = T("RMS")
# -----------------------------------------------------------------------------
# Theme (folder to use for views/layout.html)
settings.base.theme = "IFRC"
settings.base.xtheme = "IFRC/xtheme-ifrc.css"
settings.gis.map_height = 600
settings.gis.map_width = 869
# Display Resources recorded to Admin-Level Locations on the map
# @ToDo: Move into gis_config?
settings.gis.display_L0 = True
# -----------------------------------------------------------------------------
# L10n (Localization) settings
settings.L10n.languages = OrderedDict([
("en-gb", "English"),
("es", "Español"),
("km", "ភាសាខ្មែរ"), # Khmer
("ne", "नेपाली"), # Nepali
("prs", "دری"), # Dari
("ps", "پښتو"), # Pashto
("vi", "Tiếng Việt"), # Vietnamese
("zh-cn", "中文 (简体)"),
])
# Default Language
settings.L10n.default_language = "en-gb"
# Default timezone for users
settings.L10n.utc_offset = "UTC +0700"
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = ","
# Unsortable 'pretty' date format (for use in English)
settings.L10n.date_format = "%d-%b-%Y"
# Make last name in person/user records mandatory
settings.L10n.mandatory_lastname = True
# Uncomment this to Translate Layer Names
settings.L10n.translate_gis_layer = True
# Translate Location Names
settings.L10n.translate_gis_location = True
# -----------------------------------------------------------------------------
# Finance settings
settings.fin.currencies = {
"AUD" : T("Australian Dollars"),
"CAD" : T("Canadian Dollars"),
"EUR" : T("Euros"),
"GBP" : T("Great British Pounds"),
"PHP" : T("Philippine Pesos"),
"CHF" : T("Swiss Francs"),
"USD" : T("United States Dollars"),
}
# -----------------------------------------------------------------------------
# Enable this for a UN-style deployment
#settings.ui.cluster = True
# Enable this to use the label 'Camp' instead of 'Shelter'
settings.ui.camp = True
# -----------------------------------------------------------------------------
# Filter Manager
settings.search.filter_manager = False
# -----------------------------------------------------------------------------
# Messaging
# Parser
settings.msg.parser = "IFRC"
# =============================================================================
# Module Settings
# -----------------------------------------------------------------------------
# Organisation Management
# Enable the use of Organisation Branches
settings.org.branches = True
# Set the length of the auto-generated org/site code the default is 10
settings.org.site_code_len = 3
# Set the label for Sites
settings.org.site_label = "Office/Warehouse/Facility"
# Enable certain fields just for specific Organisations
ARCS = "Afghan Red Crescent Society"
BRCS = "Bangladesh Red Crescent Society"
CVTL = "Timor-Leste Red Cross Society (Cruz Vermelha de Timor-Leste)"
PMI = "Indonesian Red Cross Society (Pelang Merah Indonesia)"
PRC = "Philippine Red Cross"
VNRC = "Viet Nam Red Cross"
settings.org.dependent_fields = \
{"pr_person.middle_name" : (CVTL, VNRC),
"pr_person_details.mother_name" : (BRCS, ),
"pr_person_details.father_name" : (ARCS, BRCS),
"pr_person_details.affiliations" : (PRC, ),
"pr_person_details.company" : (PRC, ),
"vol_details.availability" : (VNRC, ),
"vol_details.card" : (ARCS, ),
"vol_volunteer_cluster.vol_cluster_type_id" : (PRC, ),
"vol_volunteer_cluster.vol_cluster_id" : (PRC, ),
"vol_volunteer_cluster.vol_cluster_position_id" : (PRC, ),
}
# -----------------------------------------------------------------------------
# Human Resource Management
# Uncomment to allow Staff & Volunteers to be registered without an email address
settings.hrm.email_required = False
# Uncomment to filter certificates by (root) Organisation & hence not allow Certificates from other orgs to be added to a profile (except by Admin)
settings.hrm.filter_certificates = True
# Uncomment to show the Organisation name in HR represents
settings.hrm.show_organisation = True
# Uncomment to allow HRs to have multiple Job Titles
settings.hrm.multiple_job_titles = True
# Uncomment to have each root Org use a different Job Title Catalog
settings.hrm.org_dependent_job_titles = True
# Uncomment to disable the use of HR Credentials
settings.hrm.use_credentials = False
# Uncomment to enable the use of HR Education
settings.hrm.use_education = True
# Custom label for Organisations in HR module
settings.hrm.organisation_label = "National Society / Branch"
# Uncomment to consolidate tabs into a single CV
settings.hrm.cv_tab = True
# Uncomment to consolidate tabs into Staff Record
settings.hrm.record_tab = True
# Uncomment to do a search for duplicates in the new AddPersonWidget2
settings.pr.lookup_duplicates = True
# RDRT
settings.deploy.hr_label = "Member"
# Enable the use of Organisation Regions
settings.org.regions = True
# Make Organisation Regions Hierarchical
settings.org.regions_hierarchical = True
# Uncomment to allow hierarchical categories of Skills, which each need their own set of competency levels.
settings.hrm.skill_types = True
# RDRT overrides these within controller:
# Uncomment to disable Staff experience
settings.hrm.staff_experience = False
# Uncomment to disable the use of HR Skills
settings.hrm.use_skills = False
# -----------------------------------------------------------------------------
def ns_only(f, required = True, branches = True, updateable=True):
"""
Function to configure an organisation_id field to be restricted to just NS/Branch
"""
# Label
if branches:
f.label = T("National Society / Branch")
else:
f.label = T("National Society")
# Requires
db = current.db
ttable = db.org_organisation_type
try:
type_id = db(ttable.name == "Red Cross / Red Crescent").select(ttable.id,
limitby=(0, 1)
).first().id
except:
# No IFRC prepop done - skip (e.g. testing impacts of CSS changes in this theme)
return
auth = current.auth
s3_has_role = auth.s3_has_role
Admin = s3_has_role("ADMIN")
if branches:
not_filterby = None
not_filter_opts = None
if Admin:
parent = True
else:
# @ToDo: Set the represent according to whether the user can see resources of just a single NS or multiple
# @ToDo: Consider porting this into core
user = auth.user
if user:
realms = user.realms
delegations = user.delegations
if realms:
parent = True
else:
parent = False
else:
parent = True
else:
# Keep the represent function as simple as possible
parent = False
btable = current.s3db.org_organisation_branch
rows = db(btable.deleted != True).select(btable.branch_id)
branches = [row.branch_id for row in rows]
not_filterby = "id"
not_filter_opts = branches
represent = current.s3db.org_OrganisationRepresent(parent=parent)
f.represent = represent
from s3.s3validators import IS_ONE_OF
requires = IS_ONE_OF(db, "org_organisation.id",
represent,
filterby = "organisation_type_id",
filter_opts = (type_id,),
not_filterby = not_filterby,
not_filter_opts=not_filter_opts,
updateable = updateable,
orderby = "org_organisation.name",
sort = True)
if not required:
from gluon import IS_EMPTY_OR
requires = IS_EMPTY_OR(requires)
f.requires = requires
# Dropdown not Autocomplete
f.widget = None
# Comment
if Admin or s3_has_role("ORG_ADMIN"):
# Need to do import after setting Theme
from s3layouts import S3AddResourceLink
from s3.s3navigation import S3ScriptItem
add_link = S3AddResourceLink(c="org",
f="organisation",
vars={"organisation.organisation_type_id$name":"Red Cross / Red Crescent"},
label=T("Create National Society"),
title=T("National Society"),
)
comment = f.comment
if not comment or isinstance(comment, S3AddResourceLink):
f.comment = add_link
elif isinstance(comment[1], S3ScriptItem):
# Don't overwrite scripts
f.comment[0] = add_link
else:
f.comment = add_link
else:
# Not allowed to add NS/Branch
f.comment = ""
# -----------------------------------------------------------------------------
def user_org_default_filter(selector, tablename=None):
"""
Default filter for organisation_id:
* Use the user's organisation if logged in and associated with an
organisation.
"""
auth = current.auth
user_org_id = auth.is_logged_in() and auth.user.organisation_id
if user_org_id:
return user_org_id
else:
# no default
return {}
# -----------------------------------------------------------------------------
def customise_asset_asset_resource(r, tablename):
s3db = current.s3db
table = s3db.asset_asset
# Organisation needs to be an NS/Branch
ns_only(table.organisation_id,
required = True,
branches = True,
)
# Custom CRUD Form to allow ad-hoc Kits & link to Teams
from s3.s3forms import S3SQLCustomForm, S3SQLInlineComponent
table.kit.readable = table.kit.writable = True
crud_form = S3SQLCustomForm("number",
"type",
"item_id",
"organisation_id",
"site_id",
"kit",
# If not ad-hoc Kit
"sn",
"supply_org_id",
"purchase_date",
"purchase_price",
"purchase_currency",
# If ad-hoc Kit
S3SQLInlineComponent(
"item",
label = T("Items"),
fields = ["item_id",
"quantity",
"sn",
# These are too wide for the screen & hence hide the AddResourceLinks
#"supply_org_id",
#"purchase_date",
#"purchase_price",
#"purchase_currency",
"comments",
],
),
S3SQLInlineComponent(
"group",
label = T("Team"),
fields = [("", "group_id")],
filterby = dict(field = "group_type",
options = 3
),
multiple = False,
),
"comments",
)
from s3.s3filter import S3OptionsFilter
filter_widgets = s3db.get_config(tablename, "filter_widgets")
filter_widgets.insert(-2, S3OptionsFilter("group.group_id",
label = T("Team"),
represent = "%(name)s",
hidden = True,
))
s3db.configure(tablename,
crud_form = crud_form,
)
settings.customise_asset_asset_resource = customise_asset_asset_resource
# -----------------------------------------------------------------------------
def customise_auth_user_controller(**attr):
"""
Customise admin/user() and default/user() controllers
"""
#if "arg" in attr and attr["arg"] == "register":
# Organisation needs to be an NS/Branch
ns_only(current.db.auth_user.organisation_id,
required = True,
branches = True,
updateable = False, # Need to see all Orgs in Registration screens
)
# Different settings for different NS
# Not possible for registration form, so fake with language!
root_org = current.auth.root_org_name()
if root_org == VNRC or current.session.s3.language == "vi":
# Too late to do via settings
#settings.org.site_label = "Office/Center"
current.db.auth_user.site_id.label = T("Office/Center")
return attr
settings.customise_auth_user_controller = customise_auth_user_controller
# -----------------------------------------------------------------------------
def customise_deploy_alert_resource(r, tablename):
current.s3db.deploy_alert_recipient.human_resource_id.label = T("Member")
settings.customise_deploy_alert_resource = customise_deploy_alert_resource
# -----------------------------------------------------------------------------
def customise_deploy_application_resource(r, tablename):
r.table.human_resource_id.label = T("Member")
settings.customise_deploy_application_resource = customise_deploy_application_resource
# -----------------------------------------------------------------------------
def _customise_assignment_fields(**attr):
MEMBER = T("Member")
from gluon.html import DIV
hr_comment = \
DIV(_class="tooltip",
_title="%s|%s" % (MEMBER,
current.messages.AUTOCOMPLETE_HELP))
from s3.s3validators import IS_ONE_OF
atable = current.s3db.deploy_assignment
atable.human_resource_id.label = MEMBER
atable.human_resource_id.comment = hr_comment
field = atable.job_title_id
field.comment = None
field.label = T("Sector")
field.requires = IS_ONE_OF(current.db, "hrm_job_title.id",
field.represent,
filterby = "type",
filter_opts = (4,),
)
return
# -----------------------------------------------------------------------------
def customise_deploy_assignment_controller(**attr):
s3db = current.s3db
table = s3db.deploy_assignment
# Labels
table.job_title_id.label = T("RDRT Type")
table.start_date.label = T("Deployment Date")
#table.end_date.label = T("EOM")
# List fields
list_fields = [(T("Mission"), "mission_id$name"),
(T("Appeal Code"), "mission_id$code"),
(T("Country"), "mission_id$location_id"),
(T("Disaster Type"), "mission_id$event_type_id"),
# @todo: replace by date of first alert?
(T("Date"), "mission_id$created_on"),
"job_title_id",
(T("Member"), "human_resource_id$person_id"),
(T("Deploying NS"), "human_resource_id$organisation_id"),
"start_date",
"end_date",
"appraisal.rating",
# @todo: Comments of the mission (=>XLS only)
]
# Report options
report_fact = [(T("Number of Deployments"), "count(human_resource_id)"),
(T("Average Rating"), "avg(appraisal.rating)"),
]
report_axis = [(T("Appeal Code"), "mission_id$code"),
(T("Country"), "mission_id$location_id"),
(T("Disaster Type"), "mission_id$event_type_id"),
(T("RDRT Type"), "job_title_id"),
(T("Deploying NS"), "human_resource_id$organisation_id"),
]
report_options = Storage(
rows=report_axis,
cols=report_axis,
fact=report_fact,
defaults=Storage(rows="mission_id$location_id",
cols="mission_id$event_type_id",
fact="count(human_resource_id)",
totals=True
)
)
s3db.configure("deploy_assignment",
list_fields = list_fields,
report_options = report_options,
)
# CRUD Strings
current.response.s3.crud_strings["deploy_assignment"] = Storage(
label_create = T("Add Deployment"),
title_display = T("Deployment Details"),
title_list = T("Deployments"),
title_update = T("Edit Deployment Details"),
title_upload = T("Import Deployments"),
label_list_button = T("List Deployments"),
label_delete_button = T("Delete Deployment"),
msg_record_created = T("Deployment added"),
msg_record_modified = T("Deployment Details updated"),
msg_record_deleted = T("Deployment deleted"),
msg_list_empty = T("No Deployments currently registered"))
_customise_assignment_fields()
# Restrict Location to just Countries
from s3.s3fields import S3Represent
field = s3db.deploy_mission.location_id
field.represent = S3Represent(lookup="gis_location", translate=True)
return attr
settings.customise_deploy_assignment_controller = customise_deploy_assignment_controller
# -----------------------------------------------------------------------------
def customise_deploy_mission_controller(**attr):
db = current.db
s3db = current.s3db
s3 = current.response.s3
MEMBER = T("Member")
from gluon.html import DIV
hr_comment = \
DIV(_class="tooltip",
_title="%s|%s" % (MEMBER,
current.messages.AUTOCOMPLETE_HELP))
table = s3db.deploy_mission
table.code.label = T("Appeal Code")
table.event_type_id.label = T("Disaster Type")
table.organisation_id.readable = table.organisation_id.writable = False
# Restrict Location to just Countries
from s3.s3fields import S3Represent
from s3.s3widgets import S3MultiSelectWidget
field = table.location_id
field.label = current.messages.COUNTRY
field.requires = s3db.gis_country_requires
field.widget = S3MultiSelectWidget(multiple=False)
field.represent = S3Represent(lookup="gis_location", translate=True)
rtable = s3db.deploy_response
rtable.human_resource_id.label = MEMBER
rtable.human_resource_id.comment = hr_comment
_customise_assignment_fields()
# Report options
report_fact = [(T("Number of Missions"), "count(id)"),
(T("Number of Countries"), "count(location_id)"),
(T("Number of Disaster Types"), "count(event_type_id)"),
(T("Number of Responses"), "sum(response_count)"),
(T("Number of Deployments"), "sum(hrquantity)"),
]
report_axis = ["code",
"location_id",
"event_type_id",
"status",
]
report_options = Storage(rows = report_axis,
cols = report_axis,
fact = report_fact,
defaults = Storage(rows = "location_id",
cols = "event_type_id",
fact = "sum(hrquantity)",
totals = True,
),
)
s3db.configure("deploy_mission",
report_options = report_options,
)
# CRUD Strings
s3.crud_strings["deploy_assignment"] = Storage(
label_create = T("New Deployment"),
title_display = T("Deployment Details"),
title_list = T("Deployments"),
title_update = T("Edit Deployment Details"),
title_upload = T("Import Deployments"),
label_list_button = T("List Deployments"),
label_delete_button = T("Delete Deployment"),
msg_record_created = T("Deployment added"),
msg_record_modified = T("Deployment Details updated"),
msg_record_deleted = T("Deployment deleted"),
msg_list_empty = T("No Deployments currently registered"))
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if not r.component and r.method == "create":
# Org is always IFRC
otable = s3db.org_organisation
query = (otable.name == "International Federation of Red Cross and Red Crescent Societies")
organisation = db(query).select(otable.id,
limitby = (0, 1),
).first()
if organisation:
r.table.organisation_id.default = organisation.id
return result
s3.prep = custom_prep
return attr
settings.customise_deploy_mission_controller = customise_deploy_mission_controller
# -----------------------------------------------------------------------------
def poi_marker_fn(record):
"""
Function to decide which Marker to use for PoI KML export
"""
db = current.db
table = db.gis_poi_type
type = db(table.id == record.poi_type_id).select(table.name,
limitby=(0, 1)
).first()
if type:
marker = type.name.lower().replace(" ", "_")\
.replace("_cccm", "_CCCM")\
.replace("_nfi_", "_NFI_")\
.replace("_ngo_", "_NGO_")\
.replace("_wash", "_WASH")
marker = "OCHA/%s_40px.png" % marker
else:
# Fallback
marker = "marker_red.png"
return Storage(image=marker)
# -----------------------------------------------------------------------------
def customise_gis_poi_resource(r, tablename):
if r.representation == "kml":
# Custom Marker function
current.s3db.configure("gis_poi",
marker_fn = poi_marker_fn,
)
settings.customise_gis_poi_resource = customise_gis_poi_resource
# -----------------------------------------------------------------------------
def customise_hrm_certificate_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only(current.s3db.hrm_certificate.organisation_id,
required = False,
branches = False,
)
return attr
settings.customise_hrm_certificate_controller = customise_hrm_certificate_controller
# -----------------------------------------------------------------------------
def customise_hrm_course_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only(current.s3db.hrm_course.organisation_id,
required = False,
branches = False,
)
return attr
settings.customise_hrm_course_controller = customise_hrm_course_controller
# -----------------------------------------------------------------------------
def customise_hrm_credential_controller(**attr):
# Currently just used by RDRT
table = current.s3db.hrm_credential
field = table.job_title_id
field.comment = None
field.label = T("Sector")
from s3.s3validators import IS_ONE_OF
field.requires = IS_ONE_OF(current.db, "hrm_job_title.id",
field.represent,
filterby = "type",
filter_opts = (4,),
)
table.organisation_id.readable = table.organisation_id.writable = False
table.performance_rating.readable = table.performance_rating.writable = False
table.start_date.readable = table.start_date.writable = False
table.end_date.readable = table.end_date.writable = False
return attr
settings.customise_hrm_credential_controller = customise_hrm_credential_controller
# -----------------------------------------------------------------------------
def customise_hrm_department_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only(current.s3db.hrm_department.organisation_id,
required = False,
branches = False,
)
return attr
settings.customise_hrm_department_controller = customise_hrm_department_controller
# -----------------------------------------------------------------------------
def customise_hrm_human_resource_controller(**attr):
# Default Filter
from s3 import s3_set_default_filter
s3_set_default_filter("~.organisation_id",
user_org_default_filter,
tablename = "hrm_human_resource")
arcs = False
vnrc = False
if current.request.controller == "vol":
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == ARCS:
arcs = True
settings.L10n.mandatory_lastname = False
settings.hrm.use_code = True
settings.hrm.use_skills = True
settings.hrm.vol_active = True
elif root_org in (CVTL, PMI, PRC):
settings.hrm.vol_active = vol_active
elif root_org == VNRC:
vnrc = True
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
# @ToDo: Make this use the same lookup as in ns_only to check if user can see HRs from multiple NS
settings.org.regions = False
#elif vnrc:
# settings.org.site_label = "Office/Center"
s3db = current.s3db
# Organisation needs to be an NS/Branch
ns_only(s3db.hrm_human_resource.organisation_id,
required = True,
branches = True,
)
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if arcs:
field = s3db.vol_details.card
field.readable = field.writable = True
elif vnrc:
field = r.table.job_title_id
field.readable = field.writable = False
return result
s3.prep = custom_prep
# Custom postp
standard_postp = s3.postp
def custom_postp(r, output):
# Call standard postp
if callable(standard_postp):
output = standard_postp(r, output)
if isinstance(output, dict):
if r.controller == "deploy" and \
"title" in output:
output["title"] = T("RDRT Members")
elif vnrc and \
r.method != "report" and \
"form" in output and \
(r.controller == "vol" or \
r.component_name == "human_resource"):
# Remove the injected Programme field
del output["form"][0].components[4]
del output["form"][0].components[4]
return output
s3.postp = custom_postp
return attr
settings.customise_hrm_human_resource_controller = customise_hrm_human_resource_controller
# -----------------------------------------------------------------------------
def customise_hrm_job_title_controller(**attr):
s3 = current.response.s3
table = current.s3db.hrm_job_title
controller = current.request.controller
if controller == "deploy":
# Filter to just deployables
s3.filter = (table.type == 4)
else:
# Organisation needs to be an NS/Branch
ns_only(table.organisation_id,
required = False,
branches = False,
)
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if controller == "deploy":
field = table.type
field.default = 4
field.readable = field.writable = False
table.organisation_id.readable = False
table.organisation_id.writable = False
SECTOR = T("Sector")
ADD_SECTOR = T("Create Sector")
help = T("If you don't see the Sector in the list, you can add a new one by clicking link 'Create Sector'.")
s3.crud_strings["hrm_job_title"] = Storage(
label_create=T("Create Sector"),
title_display=T("Sector Details"),
title_list=T("Sectors"),
title_update=T("Edit Sector"),
label_list_button=T("List Sectors"),
label_delete_button=T("Delete Sector"),
msg_record_created=T("Sector added"),
msg_record_modified=T("Sector updated"),
msg_record_deleted=T("Sector deleted"),
msg_list_empty=T("No Sectors currently registered"))
return result
s3.prep = custom_prep
return attr
settings.customise_hrm_job_title_controller = customise_hrm_job_title_controller
# -----------------------------------------------------------------------------
def customise_hrm_programme_controller(**attr):
s3db = current.s3db
# Organisation needs to be an NS/Branch
ns_only(s3db.hrm_programme.organisation_id,
required = False,
branches = False,
)
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == ARCS:
settings.L10n.mandatory_lastname = False
settings.hrm.vol_active = True
elif root_org in (CVTL, PMI, PRC):
settings.hrm.vol_active = vol_active
settings.hrm.vol_active_tooltip = "A volunteer is defined as active if they've participated in an average of 8 or more hours of Program work or Trainings per month in the last year"
elif root_org == VNRC:
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
field = s3db.hrm_programme_hours.job_title_id
field.readable = field.writable = False
# @ToDo
# def vn_age_group(age):
# settings.pr.age_group = vn_age_group
return attr
settings.customise_hrm_programme_controller = customise_hrm_programme_controller
# -----------------------------------------------------------------------------
def customise_hrm_programme_hours_controller(**attr):
# Default Filter
from s3 import s3_set_default_filter
s3_set_default_filter("~.person_id$human_resource.organisation_id",
user_org_default_filter,
tablename = "hrm_programme_hours")
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == ARCS:
settings.L10n.mandatory_lastname = False
settings.hrm.vol_active = True
elif root_org in (CVTL, PMI, PRC):
settings.hrm.vol_active = vol_active
elif root_org == VNRC:
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
field = current.s3db.hrm_programme_hours.job_title_id
field.readable = field.writable = False
# Remove link to download Template
attr["csv_template"] = "hide"
return attr
settings.customise_hrm_programme_hours_controller = customise_hrm_programme_hours_controller
# -----------------------------------------------------------------------------
def customise_hrm_training_controller(**attr):
# Default Filter
from s3 import s3_set_default_filter
s3_set_default_filter("~.person_id$human_resource.organisation_id",
user_org_default_filter,
tablename = "hrm_training")
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == ARCS:
settings.L10n.mandatory_lastname = False
settings.hrm.vol_active = True
elif root_org in (CVTL, PMI, PRC):
settings.hrm.vol_active = vol_active
elif root_org == VNRC:
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
# Remove link to download Template
attr["csv_template"] = "hide"
return attr
settings.customise_hrm_training_controller = customise_hrm_training_controller
# -----------------------------------------------------------------------------
def customise_hrm_training_event_controller(**attr):
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == ARCS:
settings.L10n.mandatory_lastname = False
settings.hrm.vol_active = True
elif root_org in (CVTL, PMI, PRC):
settings.hrm.vol_active = vol_active
elif root_org == VNRC:
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
# Remove link to download Template
attr["csv_template"] = "hide"
return attr
settings.customise_hrm_training_event_controller = customise_hrm_training_event_controller
# -----------------------------------------------------------------------------
def customise_inv_warehouse_resource(r, tablename):
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == "Australian Red Cross":
# AusRC use proper Logistics workflow
settings.inv.direct_stock_edits = False
settings.customise_inv_warehouse_resource = customise_inv_warehouse_resource
# -----------------------------------------------------------------------------
def customise_member_membership_controller(**attr):
# @ToDo: If these NS start using Membership module
#s3db = current.s3db
#
# Special cases for different NS
#root_org = current.auth.root_org_name()
#if root_org == ARCS:
# settings.L10n.mandatory_lastname = False
#elif root_org == VNRC:
# settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
# # Remove link to download Template
# attr["csv_template"] = "hide"
# Organisation needs to be an NS/Branch
ns_only(current.s3db.member_membership.organisation_id,
required = True,
branches = True,
)
return attr
settings.customise_member_membership_controller = customise_member_membership_controller
# -----------------------------------------------------------------------------
def customise_member_membership_type_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only(current.s3db.member_membership_type.organisation_id,
required = False,
branches = False,
)
return attr
settings.customise_member_membership_type_controller = customise_member_membership_type_controller
# -----------------------------------------------------------------------------
def customise_org_office_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only(current.s3db.org_office.organisation_id,
required = True,
branches = True,
)
return attr
settings.customise_org_office_controller = customise_org_office_controller
# -----------------------------------------------------------------------------
def customise_org_organisation_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if not r.component or r.component.name == "branch":
if r.interactive or r.representation == "aadata":
s3db = current.s3db
list_fields = ["id",
"name",
"acronym",
"organisation_type_id",
#(T("Sectors"), "sector.name"),
"country",
"website"
]
type_filter = r.get_vars.get("organisation.organisation_type_id$name",
None)
if type_filter:
type_names = type_filter.split(",")
if len(type_names) == 1:
# Strip Type from list_fields
list_fields.remove("organisation_type_id")
if type_filter == "Red Cross / Red Crescent":
# Modify filter_widgets
filter_widgets = s3db.get_config("org_organisation", "filter_widgets")
# Remove type (always 'RC')
filter_widgets.pop(1)
# Remove sector (not relevant)
filter_widgets.pop(1)
# Modify CRUD Strings
ADD_NS = T("Create National Society")
s3.crud_strings.org_organisation = Storage(
label_create=ADD_NS,
title_display=T("National Society Details"),
title_list=T("Red Cross & Red Crescent National Societies"),
title_update=T("Edit National Society"),
title_upload=T("Import Red Cross & Red Crescent National Societies"),
label_list_button=T("List Red Cross & Red Crescent National Societies"),
label_delete_button=T("Delete National Society"),
msg_record_created=T("National Society added"),
msg_record_modified=T("National Society updated"),
msg_record_deleted=T("National Society deleted"),
msg_list_empty=T("No Red Cross & Red Crescent National Societies currently registered")
)
# Add Region to list_fields
list_fields.insert(-1, "region_id")
# Region is required
r.table.region_id.requires = r.table.region_id.requires.other
else:
r.table.region_id.readable = r.table.region_id.writable = False
s3db.configure("org_organisation",
list_fields = list_fields,
)
if r.interactive:
r.table.country.label = T("Country")
from s3.s3forms import S3SQLCustomForm#, S3SQLInlineComponentCheckbox
crud_form = S3SQLCustomForm(
"name",
"acronym",
"organisation_type_id",
"region_id",
"country",
#S3SQLInlineComponentCheckbox(
# "sector",
# label = T("Sectors"),
# field = "sector_id",
# cols = 3,
#),
"phone",
"website",
"logo",
"comments",
)
s3db.configure("org_organisation", crud_form=crud_form)
return result
s3.prep = custom_prep
return attr
settings.customise_org_organisation_controller = customise_org_organisation_controller
# -----------------------------------------------------------------------------
def customise_pr_contact_resource(r, tablename):
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == VNRC:
# Hard to translate in Vietnamese
current.s3db.pr_contact.value.label = ""
settings.customise_pr_contact_resource = customise_pr_contact_resource
# -----------------------------------------------------------------------------
def customise_pr_group_controller(**attr):
s3db = current.s3db
# Organisation needs to be an NS/Branch
table = s3db.org_organisation_team.organisation_id
ns_only(table,
required = False,
branches = True,
)
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.component_name == "group_membership":
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == VNRC:
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
# Update the represent as already set
s3db.pr_group_membership.person_id.represent = s3db.pr_PersonRepresent()
return result
s3.prep = custom_prep
return attr
settings.customise_pr_group_controller = customise_pr_group_controller
# =============================================================================
def vol_active(person_id):
"""
Whether a Volunteer counts as 'Active' based on the number of hours
they've done (both Trainings & Programmes) per month, averaged over
the last year.
If nothing recorded for the last 3 months, don't penalise as assume
that data entry hasn't yet been done.
@ToDo: This should be based on the HRM record, not Person record
- could be active with Org1 but not with Org2
@ToDo: allow to be calculated differently per-Org
"""
now = current.request.utcnow
# Time spent on Programme work
htable = current.s3db.hrm_programme_hours
query = (htable.deleted == False) & \
(htable.person_id == person_id) & \
(htable.date != None)
programmes = current.db(query).select(htable.hours,
htable.date,
orderby=htable.date)
if programmes:
# Ignore up to 3 months of records
three_months_prior = (now - timedelta(days=92))
end = max(programmes.last().date, three_months_prior.date())
last_year = end - timedelta(days=365)
# Is this the Volunteer's first year?
if programmes.first().date > last_year:
# Only start counting from their first month
start = programmes.first().date
else:
# Start from a year before the latest record
start = last_year
# Total hours between start and end
programme_hours = 0
for programme in programmes:
if programme.date >= start and programme.date <= end and programme.hours:
programme_hours += programme.hours
# Average hours per month
months = max(1, (end - start).days / 30.5)
average = programme_hours / months
# Active?
if average >= 8:
return True
else:
return False
else:
return False
# -----------------------------------------------------------------------------
def customise_pr_person_controller(**attr):
s3db = current.s3db
# Special cases for different NS
arcs = False
vnrc = False
root_org = current.auth.root_org_name()
if root_org == ARCS:
arcs = True
settings.L10n.mandatory_lastname = False
# Override what has been set in the model already
s3db.pr_person.last_name.requires = None
settings.hrm.use_code = True
settings.hrm.use_skills = True
settings.hrm.vol_active = True
elif root_org == PMI:
settings.hrm.use_skills = True
settings.hrm.staff_experience = "experience"
settings.hrm.vol_experience = "both"
settings.hrm.vol_active = vol_active
settings.hrm.vol_active_tooltip = "A volunteer is defined as active if they've participated in an average of 8 or more hours of Program work or Trainings per month in the last year"
elif root_org in (CVTL, PRC):
settings.hrm.vol_active = vol_active
settings.hrm.vol_active_tooltip = "A volunteer is defined as active if they've participated in an average of 8 or more hours of Program work or Trainings per month in the last year"
elif root_org == VNRC:
vnrc = True
# Remove 'Commune' level for Addresses
#gis = current.gis
#gis.get_location_hierarchy()
#try:
# gis.hierarchy_levels.pop("L3")
#except:
# # Must be already removed
# pass
settings.gis.postcode_selector = False # Needs to be done before prep as read during model load
settings.hrm.use_skills = True
settings.hrm.vol_experience = "both"
settings.pr.name_format = "%(last_name)s %(middle_name)s %(first_name)s"
try:
settings.modules.pop("asset")
except:
# Must be already removed
pass
if current.request.controller == "deploy":
# Replace default title in imports:
attr["retitle"] = lambda r: {"title": T("Import Members")} \
if r.method == "import" else None
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
component_name = r.component_name
if component_name == "appraisal":
atable = r.component.table
atable.organisation_id.readable = atable.organisation_id.writable = False
# Organisation needs to be an NS
#ns_only(atable.organisation_id,
# required = True,
# branches = False,
# )
field = atable.supervisor_id
field.readable = field.writable = False
field = atable.job_title_id
field.comment = None
field.label = T("Sector") # RDRT-specific
from s3.s3validators import IS_ONE_OF
field.requires = IS_ONE_OF(db, "hrm_job_title.id",
field.represent,
filterby = "type",
filter_opts = (4,),
)
elif r.method == "cv" or component_name == "education":
if vnrc:
# Don't enable Legacy Freetext field
# Hide the 'Name of Award' field
field = s3db.pr_education.award
field.readable = field.writable = False
elif arcs:
# Don't enable Legacy Freetext field
pass
else:
# Enable Legacy Freetext field
field = s3db.pr_education.level
field.readable = field.writable = True
field.label = T("Other Level")
field.comment = T("Use main dropdown whenever possible")
if arcs:
if not r.component:
s3db.pr_person_details.father_name.label = T("Name of Grandfather")
elif vnrc:
if not r.component:
# Use a free-text version of religion field
field = s3db.pr_person_details.religion_other
field.label = T("Religion")
field.readable = field.writable = True
# Also hide some other fields
from s3.s3forms import S3SQLCustomForm
crud_form = S3SQLCustomForm("first_name",
"middle_name",
"last_name",
"date_of_birth",
#"initials",
#"preferred_name",
#"local_name",
"gender",
"person_details.marital_status",
"person_details.nationality",
"person_details.religion_other",
"person_details.mother_name",
"person_details.father_name",
#"person_details.occupation",
#"person_details.company",
"person_details.affiliations",
"comments",
)
s3db.configure("pr_person",
crud_form = crud_form,
)
if r.method == "record" or \
component_name == "human_resource":
field = s3db.hrm_human_resource.job_title_id
field.readable = field.writable = False
field = s3db.hrm_programme_hours.job_title_id
field.readable = field.writable = False
elif component_name == "address":
settings.gis.building_name = False
settings.gis.latlon_selector = False
settings.gis.map_selector = False
elif component_name == "identity":
table = s3db.pr_identity
table.description.readable = False
table.description.writable = False
pr_id_type_opts = {1: T("Passport"),
2: T("National ID Card"),
}
from gluon.validators import IS_IN_SET
table.type.requires = IS_IN_SET(pr_id_type_opts,
zero=None)
elif component_name == "hours":
field = s3db.hrm_programme_hours.job_title_id
field.readable = field.writable = False
elif component_name == "physical_description":
# Add the less-specific blood types (as that's all the data currently available in some cases)
field = s3db.pr_physical_description.blood_type
from gluon.validators import IS_EMPTY_OR, IS_IN_SET
blood_type_opts = ("A+", "A-", "B+", "B-", "AB+", "AB-", "O+", "O-", "A", "B", "AB", "O")
field.requires = IS_EMPTY_OR(IS_IN_SET(blood_type_opts))
elif r.method == "cv" or component_name == "experience":
table = s3db.hrm_experience
# Use simple free-text variants
table.organisation.readable = True
table.organisation.writable = True
table.job_title.readable = True
table.job_title.writable = True
table.comments.label = T("Main Duties")
from s3.s3forms import S3SQLCustomForm
crud_form = S3SQLCustomForm("organisation",
"job_title",
"comments",
"start_date",
"end_date",
)
s3db.configure("hrm_experience",
crud_form = crud_form,
list_fields = ["id",
"organisation",
"job_title",
"comments",
"start_date",
"end_date",
],
)
return result
s3.prep = custom_prep
attr["rheader"] = lambda r, vnrc=vnrc: pr_rheader(r, vnrc)
if vnrc:
# Link to customised download Template
#attr["csv_template"] = ("../../themes/IFRC/formats", "volunteer_vnrc")
# Remove link to download Template
attr["csv_template"] = "hide"
return attr
settings.customise_pr_person_controller = customise_pr_person_controller
# -----------------------------------------------------------------------------
def pr_rheader(r, vnrc):
"""
Custom rheader for vol/person for vnrc
"""
if vnrc and current.request.controller == "vol":
# Simplify RHeader
settings.hrm.vol_experience = None
s3db = current.s3db
s3db.hrm_vars()
return s3db.hrm_rheader(r)
# -----------------------------------------------------------------------------
def customise_req_commit_controller(**attr):
# Request is mandatory
field = current.s3db.req_commit.req_id
field.requires = field.requires.other
return attr
settings.customise_req_commit_controller = customise_req_commit_controller
# -----------------------------------------------------------------------------
def customise_req_req_controller(**attr):
# Request is mandatory
field = current.s3db.req_commit.req_id
field.requires = field.requires.other
return attr
settings.customise_req_req_controller = customise_req_req_controller
# -----------------------------------------------------------------------------
def customise_survey_series_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only(current.s3db.survey_series.organisation_id,
required = False,
branches = True,
)
return attr
settings.customise_survey_series_controller = customise_survey_series_controller
# -----------------------------------------------------------------------------
# Projects
# Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR)
settings.project.mode_3w = True
# Uncomment this to use DRR (Disaster Risk Reduction) extensions
settings.project.mode_drr = True
# Uncomment this to use Codes for projects
settings.project.codes = True
# Uncomment this to call project locations 'Communities'
settings.project.community = True
# Uncomment this to enable Hazards in 3W projects
settings.project.hazards = True
# Uncomment this to use multiple Budgets per project
settings.project.multiple_budgets = True
# Uncomment this to use multiple Organisations per project
settings.project.multiple_organisations = True
# Uncomment this to enable Themes in 3W projects
settings.project.themes = True
# Uncomment this to customise
# Links to Filtered Components for Donors & Partners
settings.project.organisation_roles = {
1: T("Host National Society"),
2: T("Partner"),
3: T("Donor"),
#4: T("Customer"), # T("Beneficiary")?
#5: T("Supplier"),
9: T("Partner National Society"),
}
# -----------------------------------------------------------------------------
def customise_project_project_controller(**attr):
s3db = current.s3db
tablename = "project_project"
# Load normal model
table = s3db[tablename]
# @ToDo: S3SQLInlineComponent for Project orgs
# Get IDs for PartnerNS/Partner-Donor
# db = current.db
# ttable = db.org_organisation_type
# rows = db(ttable.deleted != True).select(ttable.id,
# ttable.name,
# )
# rc = []
# not_rc = []
# nappend = not_rc.append
# for row in rows:
# if row.name == "Red Cross / Red Crescent":
# rc.append(row.id)
# elif row.name == "Supplier":
# pass
# else:
# nappend(row.id)
# Custom Fields
# Organisation needs to be an NS (not a branch)
f = table.organisation_id
ns_only(f,
required = True,
branches = False,
)
f.label = T("Host National Society")
# Custom Crud Form
from s3.s3forms import S3SQLCustomForm, S3SQLInlineComponent, S3SQLInlineComponentCheckbox
crud_form = S3SQLCustomForm(
"organisation_id",
"name",
"code",
"description",
"status_id",
"start_date",
"end_date",
#S3SQLInlineComponent(
# "location",
# label = T("Countries"),
# fields = ["location_id"],
#),
# Outputs
S3SQLInlineComponent(
"output",
label = T("Outputs"),
#comment = "Bob",
fields = ["name", "status"],
),
S3SQLInlineComponentCheckbox(
"hazard",
label = T("Hazards"),
field = "hazard_id",
cols = 4,
translate = True,
),
S3SQLInlineComponentCheckbox(
"sector",
label = T("Sectors"),
field = "sector_id",
cols = 4,
translate = True,
),
S3SQLInlineComponentCheckbox(
"theme",
label = T("Themes"),
field = "theme_id",
cols = 4,
translate = True,
# Filter Theme by Sector
filter = {"linktable": "project_theme_sector",
"lkey": "theme_id",
"rkey": "sector_id",
},
script = '''
S3OptionsFilter({
'triggerName':'defaultsector-sector_id',
'targetName':'defaulttheme-theme_id',
'targetWidget':'defaulttheme-theme_id_widget',
'lookupResource':'theme',
'lookupURL':S3.Ap.concat('/project/theme_sector_widget?sector_ids='),
'getWidgetHTML':true,
'showEmptyField':false
})'''
),
"drr.hfa",
"objectives",
"human_resource_id",
# Disabled since we need organisation_id filtering to either organisation_type_id == RC or NOT
# & also hiding Branches from RCs
# Partner NS
# S3SQLInlineComponent(
# "organisation",
# name = "partnerns",
# label = T("Partner National Societies"),
# fields = ["organisation_id",
# "comments",
# ],
# Filter Organisation by Type
# filter = ["organisation_id": {"filterby": "organisation_type_id",
# "filterfor": rc,
# }],
# filterby = dict(field = "role",
# options = [9])
# ),
# Partner Orgs
# S3SQLInlineComponent(
# "organisation",
# name = "partner",
# label = T("Partner Organizations"),
# fields = ["organisation_id",
# "comments",
# ],
# Filter Organisation by Type
# filter = ["organisation_id": {"filterby": "organisation_type_id",
# "filterfor": not_rc,
# }],
# filterby = dict(field = "role",
# options = [2])
# ),
# Donors
# S3SQLInlineComponent(
# "organisation",
# name = "donor",
# label = T("Donor(s)"),
# fields = ["organisation_id",
# "amount",
# "currency"],
# Filter Organisation by Type
# filter = ["organisation_id": {"filterby": "organisation_type_id",
# "filterfor": not_rc,
# }],
# filterby = dict(field = "role",
# options = [3])
# ),
#"budget",
#"currency",
"comments",
)
s3db.configure(tablename,
crud_form = crud_form,
)
return attr
settings.customise_project_project_controller = customise_project_project_controller
# -----------------------------------------------------------------------------
def customise_project_location_resource(r, tablename):
from s3.s3forms import S3SQLCustomForm, S3SQLInlineComponentCheckbox
crud_form = S3SQLCustomForm(
"project_id",
"location_id",
# @ToDo: Grouped Checkboxes
S3SQLInlineComponentCheckbox(
"activity_type",
label = T("Activity Types"),
field = "activity_type_id",
cols = 3,
# Filter Activity Type by Sector
filter = {"linktable": "project_activity_type_sector",
"lkey": "activity_type_id",
"rkey": "sector_id",
"lookuptable": "project_project",
"lookupkey": "project_id",
},
translate = True,
),
"comments",
)
current.s3db.configure(tablename,
crud_form = crud_form,
)
settings.customise_project_location_resource = customise_project_location_resource
# -----------------------------------------------------------------------------
# Inventory Management
settings.inv.show_mode_of_transport = True
settings.inv.send_show_time_in = True
#settings.inv.collapse_tabs = True
# Uncomment if you need a simpler (but less accountable) process for managing stock levels
settings.inv.direct_stock_edits = True
# -----------------------------------------------------------------------------
# Request Management
# Uncomment to disable Inline Forms in Requests module
settings.req.inline_forms = False
settings.req.req_type = ["Stock"]
settings.req.use_commit = False
# Should Requests ask whether Transportation is required?
settings.req.ask_transport = True
# -----------------------------------------------------------------------------
def customise_vulnerability_data_resource(r, tablename):
# Date is required: We don't store modelled data
r.table.date.requires = r.table.date.requires.other
settings.customise_vulnerability_data_resource = customise_vulnerability_data_resource
# =============================================================================
# Template Modules
# Comment/uncomment modules here to disable/enable them
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = "RMS",
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("translate", Storage(
name_nice = T("Translation Functionality"),
#description = "Selective translation of strings based on module.",
module_type = None,
)),
# Uncomment to enable internal support requests
("support", Storage(
name_nice = T("Support"),
#description = "Support Requests",
restricted = True,
module_type = None # This item is handled separately for the menu
)),
("gis", Storage(
name_nice = T("Map"),
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = T("Person Registry"),
#description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = T("Organizations"),
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
module_type = 1
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Staff"),
#description = "Human Resources Management",
restricted = True,
module_type = 2,
)),
("vol", Storage(
name_nice = T("Volunteers"),
#description = "Human Resources Management",
restricted = True,
module_type = 2,
)),
("doc", Storage(
name_nice = T("Documents"),
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
module_type = 10,
)),
("msg", Storage(
name_nice = T("Messaging"),
#description = "Sends & Receives Alerts via Email & SMS",
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = None,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
#description = "Used within Inventory Management, Request Management and Asset Management",
restricted = True,
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
restricted = True,
module_type = 4
)),
("asset", Storage(
name_nice = T("Assets"),
#description = "Recording and Assigning Assets",
restricted = True,
module_type = 5,
)),
("req", Storage(
name_nice = T("Requests"),
#description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
restricted = True,
module_type = 10,
)),
("project", Storage(
name_nice = T("Projects"),
#description = "Tracking of Projects, Activities and Tasks",
restricted = True,
module_type = 2
)),
("survey", Storage(
name_nice = T("Assessments"),
#description = "Create, enter, and manage surveys.",
restricted = True,
module_type = 5,
)),
("event", Storage(
name_nice = T("Events"),
#description = "Events",
restricted = True,
module_type = 10
)),
("irs", Storage(
name_nice = T("Incidents"),
#description = "Incident Reporting System",
restricted = True,
module_type = 10
)),
("member", Storage(
name_nice = T("Members"),
#description = "Membership Management System",
restricted = True,
module_type = 10,
)),
("deploy", Storage(
name_nice = T("Regional Disaster Response Teams"),
#description = "Alerting and Deployment of Disaster Response Teams",
restricted = True,
module_type = 10,
)),
("stats", Storage(
name_nice = T("Statistics"),
#description = "Manages statistics",
restricted = True,
module_type = None,
)),
("vulnerability", Storage(
name_nice = T("Vulnerability"),
#description = "Manages vulnerability indicators",
restricted = True,
module_type = 10,
)),
])
| mit | -5,644,412,904,426,701,000 | 37.11982 | 189 | 0.511329 | false |
liangdebin/tor | handlers/base.py | 1 | 2310 | import json
import tornado.web
import logging
logger = logging.getLogger('boilerplate.' + __name__)
class BaseHandler(tornado.web.RequestHandler):
"""A class to collect common handler methods - all other handlers should
subclass this one.
"""
def load_json(self):
"""Load JSON from the request body and store them in
self.request.arguments, like Tornado does by default for POSTed form
parameters.
If JSON cannot be decoded, raises an HTTPError with status 400.
"""
try:
self.request.arguments = json.loads(self.request.body)
except ValueError:
msg = "Could not decode JSON: %s" % self.request.body
logger.debug(msg)
raise tornado.web.HTTPError(400, msg)
def get_json_argument(self, name, default=None):
"""Find and return the argument with key 'name' from JSON request data.
Similar to Tornado's get_argument() method.
"""
if default is None:
default = self._ARG_DEFAULT
if not self.request.arguments:
self.load_json()
if name not in self.request.arguments:
if default is self._ARG_DEFAULT:
msg = "Missing argument '%s'" % name
logger.debug(msg)
raise tornado.web.HTTPError(400, msg)
logger.debug("Returning default argument %s, as we couldn't find "
"'%s' in %s" % (default, name, self.request.arguments))
return default
arg = self.request.arguments[name]
logger.debug("Found '%s': %s in JSON arguments" % (name, arg))
return arg
def write_error(self, status_code, **kwargs):
self.write("Gosh darnit, user! You caused a %d error." % status_code)
self.write( "<br/>" )
self.write( "%s"%(kwargs,) )
# self.write(json_encode(kwargs))
def prepare(self):
self.set_header("Power by","blueblue")
# self.write("Gosh darnit, user! ")
pass
def on_finish(self):
pass
def get(self):
self.req()
def post(self):
self.req()
def req(self):
# self.ROOT = settings.ROOT
# self.MEDIA_ROOT = settings.MEDIA_ROOT
# self.TEMPLATE_ROOT = settings.MEDIA_ROOT
pass
| mit | -580,859,130,668,034,600 | 33.477612 | 79 | 0.58658 | false |
WhatDo/FlowFairy | examples/sine_fix/dilated3.py | 1 | 2514 | import tensorflow as tf
import tensorflow.contrib.slim as slim
from flowfairy.conf import settings
from util import lrelu, conv2d, maxpool2d, embedding, avgpool2d, GLU, causal_GLU
from functools import partial
import ops
discrete_class = settings.DISCRETE_CLASS
batch_size = settings.BATCH_SIZE
samplerate = sr = settings.SAMPLERATE
embedding_size = settings.EMBEDDING_SIZE
num_classes = settings.CLASS_COUNT
def broadcast(l, emb):
sh = l.get_shape().as_list()[1]
emb = emb[:, None, None, :]
emb = tf.tile(emb, (1,sh,1,1))
return tf.concat([l, emb], 3)
# Create model
def conv_net(x, cls, dropout, is_training=False):
xs = tf.expand_dims(x, -1)
xs = tf.expand_dims(xs, -1)
conv1 = causal_GLU(xs, 4, [128, 1], scope='conv1_1', normalizer_fn=slim.batch_norm, normalizer_params={'is_training': is_training, 'decay': 0.9})
print('conv1', conv1)
conv1_d1 = GLU(conv1, 4, [128, 1], scope='conv1_d1')
print('conv1_d1 ', conv1_d1)
# Parallel
conv1_d2 = GLU(conv1, 4, [128, 1], rate=2, scope='conv1_d2')
print('conv1_d2 ', conv1_d2)
conv1_d4 = GLU(conv1, 4, [128, 1], rate=4, scope='conv1_d4')
print('conv1_d4 ', conv1_d4)
conv1 = tf.concat([conv1_d1, conv1_d2, conv1_d4], 3)
print('conv1_concat', conv1)
#conv1 = GLU(conv1, 4, [256, 1], scope='conv1_2')
#with tf.name_scope('embedding'):
#convblock 2
conv2 = GLU(conv1, 8, [128, 1], scope='conv2_1')
conv2 = GLU(conv2, 8, [128, 1], scope='conv2_2')
conv2 = slim.max_pool2d(conv2, [2,1])
print('conv2: ', conv2)
with tf.variable_scope('embedding'):
emb1 = embedding(cls, embedding_size, num_classes)
embedded = broadcast(conv2, emb1)
print('embedded:', embedded)
#convblock 3
conv3 = GLU(embedded, 16, [128, 1], scope='conv3_1')
conv3 = GLU(conv3, 16, [128, 1], scope='conv3_2')
print('conv3: ', conv3)
#convblock 4
conv4 = tf.depth_to_space(conv3, 4) #upconv
print('d2sp: ', conv4)
conv4 = tf.reshape(conv4, shape=[-1, sr, 1, 8]) # reshape upconvolution to have proper shape
conv4 = GLU(conv4, 16, [128, 1], scope='conv4_1')
#convblock 5
conv4 = tf.concat([conv4, conv1], 3) # <- unet like concat first with last
conv4 = GLU(conv4, 16, [128, 1], scope='conv4_2')
print('conv4: ', conv4)
conv5 = GLU(conv4, discrete_class, [2,1], scope='conv5')
print('conv5: ', conv5)
#out
out = tf.reshape(conv5, [-1, sr, discrete_class])
print('out: ', out)
return out
| mit | 379,616,175,697,035,800 | 29.289157 | 149 | 0.624503 | false |
anthill-services/anthill-common | anthill/common/social/google.py | 1 | 5627 |
import tornado.httpclient
import ujson
import jwt
import abc
from urllib import parse
from .. import admin as a
from .. social import SocialNetworkAPI, APIError, AuthResponse, SocialPrivateKey
class GoogleAPI(SocialNetworkAPI, metaclass=abc.ABCMeta):
GOOGLE_OAUTH = "https://www.googleapis.com/oauth2/"
NAME = "google"
def __init__(self, cache):
super(GoogleAPI, self).__init__(GoogleAPI.NAME, cache)
# noinspection PyMethodMayBeStatic
def __parse_friend__(self, friend):
try:
return {
"id": friend["id"],
"avatar": friend["image"]["url"],
"profile": friend["url"],
"display_name": friend["displayName"]
}
except KeyError:
return None
async def api_auth(self, gamespace, code, redirect_uri):
private_key = await self.get_private_key(gamespace)
fields = {
"code": code,
"client_id": private_key.app_id,
"client_secret": private_key.app_secret,
"redirect_uri": redirect_uri,
"grant_type": "authorization_code",
"access_type": "offline"
}
try:
response = await self.api_post("token", fields)
except tornado.httpclient.HTTPError as e:
raise APIError(
e.code,
e.response.body if hasattr(e.response, "body") else str(e))
else:
payload = ujson.loads(response.body)
refresh_token = payload.get("refresh_token", None)
access_token = payload["access_token"]
expires_in = payload["expires_in"]
id_token = payload["id_token"]
user_info = jwt.decode(id_token, verify=False)
username = user_info["sub"]
result = AuthResponse(
access_token=access_token,
expires_in=expires_in,
refresh_token=refresh_token,
username=username,
import_social=True)
return result
async def api_get(self, operation, fields, v="v4", **kwargs):
fields.update(**kwargs)
result = await self.client.fetch(
GoogleAPI.GOOGLE_OAUTH + v + "/" + operation + "?" +
parse.urlencode(fields))
return result
async def api_get_user_info(self, access_token=None):
try:
response = await self.api_get(
"userinfo",
{},
v="v2",
access_token=access_token)
except tornado.httpclient.HTTPError as e:
raise APIError(e.code, e.response.body)
else:
data = ujson.loads(response.body)
return GoogleAPI.process_user_info(data)
async def api_post(self, operation, fields, v="v4", **kwargs):
fields.update(**kwargs)
result = await self.client.fetch(
GoogleAPI.GOOGLE_OAUTH + v + "/" + operation,
method="POST",
body=parse.urlencode(fields))
return result
async def api_refresh_token(self, refresh_token, gamespace):
private_key = await self.get_private_key(gamespace)
try:
response = await self.api_post("token", {
"client_id": private_key.app_id,
"client_secret": private_key.app_secret,
"refresh_token": refresh_token,
"grant_type": "refresh_token"
})
except tornado.httpclient.HTTPError as e:
raise APIError(e.code, e.response.body)
else:
data = ujson.loads(response.body)
return data
async def get(self, url, headers=None, **kwargs):
result = await self.client.fetch(
url + "?" + parse.urlencode(kwargs),
headers=headers)
return result
@staticmethod
def process_user_info(data):
return {
"name": data["name"],
"avatar": data["picture"],
"language": data["locale"],
"email": data["email"]
}
def has_private_key(self):
return True
def new_private_key(self, data):
return GooglePrivateKey(data)
class GooglePrivateKey(SocialPrivateKey):
def __init__(self, key):
super(GooglePrivateKey, self).__init__(key)
self.app_secret = self.data["web"]["client_secret"] if self.data else None
self.app_id = self.data["web"]["client_id"] if self.data else None
def get_app_id(self):
return self.app_id
def dump(self):
return {
"web": {
"client_secret": self.app_secret,
"client_id": self.app_id
}
}
def has_ui(self):
return True
def get(self):
return {
"app_secret": self.app_secret,
"app_id": self.app_id
}
def render(self):
return {
"app_id": a.field(
"Client ID", "text", "primary", "non-empty",
order=1,
description="Client ID from Google's project Credentials, "
"see <a href=\"https://console.developers.google.com/apis/credentials\">Google "
"Credentials</a>"),
"app_secret": a.field(
"Client Secret", "text", "primary", "non-empty",
order=2,
description="Same as above, but called \"Client Secret\"")
}
def update(self, app_secret, app_id, **ignored):
self.app_secret = app_secret
self.app_id = app_id
| mit | -8,178,881,027,545,703,000 | 28.772487 | 108 | 0.53421 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.