text
stringlengths 4
1.02M
| meta
dict |
---|---|
"""
Test for io_services_test_stubs.py
"""
try:
import asyncio
except ImportError:
asyncio = None
import sys
import threading
import unittest
import tornado.ioloop
import twisted.internet.reactor
from pika.adapters import select_connection
from tests.stubs.io_services_test_stubs import IOServicesTestStubs
# Suppress invalid-name, since our test names are descriptive and quite long
# pylint: disable=C0103
# Suppress missing-docstring to allow test method names to be printed by our the
# test runner
# pylint: disable=C0111
# Tornado does some magic that substitutes the class dynamically
_TORNADO_IO_LOOP = tornado.ioloop.IOLoop()
_TORNADO_IOLOOP_CLASS = _TORNADO_IO_LOOP.__class__
_TORNADO_IO_LOOP.close()
del _TORNADO_IO_LOOP
_SUPPORTED_LOOP_CLASSES = {
select_connection.IOLoop,
_TORNADO_IOLOOP_CLASS,
}
if asyncio is not None:
if sys.platform == 'win32':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
_SUPPORTED_LOOP_CLASSES.add(asyncio.get_event_loop().__class__)
class TestStartCalledFromOtherThreadAndWithVaryingNativeLoops(
unittest.TestCase,
IOServicesTestStubs):
_native_loop_classes = None
@classmethod
def setUpClass(cls):
cls._native_loop_classes = set()
# AssertionError: Expected these 3 native I/O loop classes from IOServicesTestStubs:
# {<class 'asyncio.windows_events.ProactorEventLoop'>, <class 'tornado.platform.asyncio.AsyncIOLoop'>, <class 'pika.adapters.select_connection.IOLoop'>}
# but got these 3:
# {<class 'asyncio.windows_events._WindowsSelectorEventLoop'>, <class 'tornado.platform.asyncio.AsyncIOLoop'>, <class 'pika.adapters.select_connection.IOLoop'>}
@classmethod
def tearDownClass(cls):
# Now check against what was made available to us by
# IOServicesTestStubs
if cls._native_loop_classes != _SUPPORTED_LOOP_CLASSES:
raise AssertionError(
'Expected these {} native I/O loop classes from '
'IOServicesTestStubs: {!r}, but got these {}: {!r}'.format(
len(_SUPPORTED_LOOP_CLASSES),
_SUPPORTED_LOOP_CLASSES,
len(cls._native_loop_classes),
cls._native_loop_classes))
def setUp(self):
self._runner_thread_id = threading.current_thread().ident
def start(self):
nbio = self.create_nbio()
native_loop = nbio.get_native_ioloop()
self.assertIsNotNone(self._native_loop)
self.assertIs(native_loop, self._native_loop)
self._native_loop_classes.add(native_loop.__class__)
# Check that we're called from a different thread than the one that
# set up this test.
self.assertNotEqual(threading.current_thread().ident,
self._runner_thread_id)
# And make sure the loop actually works using this rudimentary test
nbio.add_callback_threadsafe(nbio.stop)
nbio.run()
| {
"content_hash": "d2240351984c4b4a78c6ffb1dafc3cbb",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 165,
"avg_line_length": 32.32258064516129,
"alnum_prop": 0.6783100465735197,
"repo_name": "pika/pika",
"id": "70857a1a68a3110fa6dc28009b72841e70b28fd5",
"size": "3006",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/io_services_test_stubs_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1278693"
}
],
"symlink_target": ""
} |
from operator import attrgetter
from random import shuffle
class Point:
def __init__(self, x, y):
self.x, self.y = x, y
def sort1(points):
points.sort(key = lambda p: p.x)
def sort2(points):
points.sort(key = attrgetter("x"))
if __name__ == "__main__":
from timeit import Timer
points1 = [Point(x, 2 * x) for x in range(100)]
points2 = points1[:]
num_times = 10000
t1 = Timer("sort1(points1)", "from __main__ import sort1, points1")
print t1.timeit(num_times)
t2 = Timer("sort2(points2)", "from __main__ import sort2, points2")
print t2.timeit(num_times)
| {
"content_hash": "d6f8c4ab997f86db00d5a8a00052c664",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 71,
"avg_line_length": 23.653846153846153,
"alnum_prop": 0.616260162601626,
"repo_name": "tebeka/pythonwise",
"id": "375e1206d45ddeca79aaf6881dded3b3c1a405f4",
"size": "638",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "attr.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "419"
},
{
"name": "Assembly",
"bytes": "130"
},
{
"name": "Awk",
"bytes": "94"
},
{
"name": "C",
"bytes": "3348"
},
{
"name": "CSS",
"bytes": "7156"
},
{
"name": "Dockerfile",
"bytes": "691"
},
{
"name": "Go",
"bytes": "17160"
},
{
"name": "HTML",
"bytes": "28603"
},
{
"name": "JavaScript",
"bytes": "75641"
},
{
"name": "Jupyter Notebook",
"bytes": "542450"
},
{
"name": "Makefile",
"bytes": "2242"
},
{
"name": "Mako",
"bytes": "795"
},
{
"name": "Python",
"bytes": "1039734"
},
{
"name": "Shell",
"bytes": "23126"
},
{
"name": "TeX",
"bytes": "257"
},
{
"name": "Vim script",
"bytes": "785"
}
],
"symlink_target": ""
} |
_A=None
from ._compat import filename_to_ui,get_text_stderr
from .utils import echo
def _join_param_hints(param_hint):
A=param_hint
if isinstance(A,(tuple,list)):return ' / '.join((repr(B)for B in A))
return A
class ClickException(Exception):
exit_code=1
def __init__(B,message):A=message;super().__init__(A);B.message=A
def format_message(A):return A.message
def __str__(A):return A.message
def show(B,file=_A):
A=file
if A is _A:A=get_text_stderr()
echo(f"Error: {B.format_message()}",file=A)
class UsageError(ClickException):
exit_code=2
def __init__(A,message,ctx=_A):ClickException.__init__(A,message);A.ctx=ctx;A.cmd=A.ctx.command if A.ctx else _A
def show(A,file=_A):
B=file
if B is _A:B=get_text_stderr()
C=_A;D=''
if A.cmd is not _A and A.cmd.get_help_option(A.ctx)is not _A:D=f"Try '{A.ctx.command_path} {A.ctx.help_option_names[0]}' for help.\n"
if A.ctx is not _A:C=A.ctx.color;echo(f"{A.ctx.get_usage()}\n{D}",file=B,color=C)
echo(f"Error: {A.format_message()}",file=B,color=C)
class BadParameter(UsageError):
def __init__(A,message,ctx=_A,param=_A,param_hint=_A):UsageError.__init__(A,message,ctx);A.param=param;A.param_hint=param_hint
def format_message(A):
if A.param_hint is not _A:B=A.param_hint
elif A.param is not _A:B=A.param.get_error_hint(A.ctx)
else:return f"Invalid value: {A.message}"
B=_join_param_hints(B);return f"Invalid value for {B}: {A.message}"
class MissingParameter(BadParameter):
def __init__(A,message=_A,ctx=_A,param=_A,param_hint=_A,param_type=_A):BadParameter.__init__(A,message,ctx,param,param_hint);A.param_type=param_type
def format_message(A):
if A.param_hint is not _A:B=A.param_hint
elif A.param is not _A:B=A.param.get_error_hint(A.ctx)
else:B=_A
B=_join_param_hints(B);D=A.param_type
if D is _A and A.param is not _A:D=A.param.param_type_name
C=A.message
if A.param is not _A:
E=A.param.type.get_missing_message(A.param)
if E:
if C:C+=f". {E}"
else:C=E
F=f" {B}"if B else'';return f"Missing {D}{F}.{' 'if C else''}{C or''}"
def __str__(A):
if A.message is _A:B=A.param.name if A.param else _A;return f"missing parameter: {B}"
else:return A.message
class NoSuchOption(UsageError):
def __init__(A,option_name,message=_A,possibilities=_A,ctx=_A):
C=option_name;B=message
if B is _A:B=f"no such option: {C}"
UsageError.__init__(A,B,ctx);A.option_name=C;A.possibilities=possibilities
def format_message(A):
B=[A.message]
if A.possibilities:
if len(A.possibilities)==1:B.append(f"Did you mean {A.possibilities[0]}?")
else:C=sorted(A.possibilities);B.append(f"(Possible options: {', '.join(C)})")
return ' '.join(B)
class BadOptionUsage(UsageError):
def __init__(A,option_name,message,ctx=_A):UsageError.__init__(A,message,ctx);A.option_name=option_name
class BadArgumentUsage(UsageError):
def __init__(A,message,ctx=_A):UsageError.__init__(A,message,ctx)
class FileError(ClickException):
def __init__(A,filename,hint=_A):
C=filename;B=hint;D=filename_to_ui(C)
if B is _A:B='unknown error'
ClickException.__init__(A,B);A.ui_filename=D;A.filename=C
def format_message(A):return f"Could not open file {A.ui_filename}: {A.message}"
class Abort(RuntimeError):0
class Exit(RuntimeError):
__slots__='exit_code',
def __init__(A,code=0):A.exit_code=code | {
"content_hash": "d827c2318c0d34d960665fcf6bddb394",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 149,
"avg_line_length": 43.38157894736842,
"alnum_prop": 0.6803154382772217,
"repo_name": "rochacbruno/dynaconf",
"id": "6cc018904802873a4aa0a3bcfb456fea42ca3e6c",
"size": "3297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dynaconf/vendor/click/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2867"
},
{
"name": "Makefile",
"bytes": "11505"
},
{
"name": "Python",
"bytes": "1438471"
},
{
"name": "Shell",
"bytes": "14740"
}
],
"symlink_target": ""
} |
"""Utility to generate the header files for BOOST_METAPARSE_STRING"""
# Copyright Abel Sinkovics ([email protected]) 2016.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import argparse
import math
import os
import sys
VERSION = 1
class Namespace(object):
"""Generate namespace definition"""
def __init__(self, out_f, names):
self.out_f = out_f
self.names = names
def begin(self):
"""Generate the beginning part"""
self.out_f.write('\n')
for depth, name in enumerate(self.names):
self.out_f.write(
'{0}namespace {1}\n{0}{{\n'.format(self.prefix(depth), name)
)
def end(self):
"""Generate the closing part"""
for depth in xrange(len(self.names) - 1, -1, -1):
self.out_f.write('{0}}}\n'.format(self.prefix(depth)))
def prefix(self, depth=None):
"""Returns the prefix of a given depth. Returns the prefix code inside
the namespace should use when depth is None."""
if depth is None:
depth = len(self.names)
return ' ' * depth
def __enter__(self):
self.begin()
return self
def __exit__(self, typ, value, traceback):
self.end()
def write_autogen_info(out_f):
"""Write the comment about the file being autogenerated"""
out_f.write(
'\n'
'// This is an automatically generated header file.\n'
'// Generated with the tools/string_headers.py utility of\n'
'// Boost.Metaparse\n'
)
class IncludeGuard(object):
"""Generate include guards"""
def __init__(self, out_f, name):
self.out_f = out_f
self.name = name.upper()
def begin(self):
"""Generate the beginning part"""
name = 'BOOST_METAPARSE_V1_IMPL_{0}_HPP'.format(self.name)
self.out_f.write('#ifndef {0}\n#define {0}\n'.format(name))
write_autogen_info(self.out_f)
def end(self):
"""Generate the closing part"""
self.out_f.write('\n#endif\n')
def __enter__(self):
self.begin()
return self
def __exit__(self, typ, value, traceback):
self.end()
def macro_name(name):
"""Generate the full macro name"""
return 'BOOST_METAPARSE_V{0}_{1}'.format(VERSION, name)
def define_macro(out_f, (name, args, body), undefine=False, check=True):
"""Generate a macro definition or undefinition"""
if undefine:
out_f.write(
'#undef {0}\n'
.format(macro_name(name))
)
else:
if len(args) > 0:
arg_list = '({0})'.format(', '.join(args))
else:
arg_list = ''
if check:
out_f.write(
'#ifdef {0}\n'
'# error {0} already defined.\n'
'#endif\n'
.format(macro_name(name))
)
out_f.write(
'#define {0}{1} {2}\n'.format(macro_name(name), arg_list, body)
)
def filename(out_dir, name, undefine=False):
"""Generate the filename"""
if undefine:
prefix = 'undef_'
else:
prefix = ''
return os.path.join(out_dir, '{0}{1}.hpp'.format(prefix, name.lower()))
def length_limits(max_length_limit, length_limit_step):
"""Generates the length limits"""
string_len = len(str(max_length_limit))
return [
str(i).zfill(string_len) for i in
xrange(
length_limit_step,
max_length_limit + length_limit_step - 1,
length_limit_step
)
]
def unique_names(count):
"""Generate count unique variable name"""
return ('C{0}'.format(i) for i in xrange(0, count))
def generate_take(out_f, steps, line_prefix):
"""Generate the take function"""
out_f.write(
'{0}constexpr inline int take(int n_)\n'
'{0}{{\n'
'{0} return {1} 0 {2};\n'
'{0}}}\n'
'\n'.format(
line_prefix,
''.join('n_ >= {0} ? {0} : ('.format(s) for s in steps),
')' * len(steps)
)
)
def generate_make_string(out_f, max_step):
"""Generate the make_string template"""
steps = [2 ** n for n in xrange(int(math.log(max_step, 2)), -1, -1)]
with Namespace(
out_f,
['boost', 'metaparse', 'v{0}'.format(VERSION), 'impl']
) as nsp:
generate_take(out_f, steps, nsp.prefix())
out_f.write(
'{0}template <int LenNow, int LenRemaining, char... Cs>\n'
'{0}struct make_string;\n'
'\n'
'{0}template <char... Cs>'
' struct make_string<0, 0, Cs...> : string<> {{}};\n'
.format(nsp.prefix())
)
disable_sun = False
for i in reversed(steps):
if i > 64 and not disable_sun:
out_f.write('#ifndef __SUNPRO_CC\n')
disable_sun = True
out_f.write(
'{0}template <int LenRemaining,{1}char... Cs>'
' struct make_string<{2},LenRemaining,{3}Cs...> :'
' concat<string<{4}>,'
' typename make_string<take(LenRemaining),'
'LenRemaining-take(LenRemaining),Cs...>::type> {{}};\n'
.format(
nsp.prefix(),
''.join('char {0},'.format(n) for n in unique_names(i)),
i,
''.join('{0},'.format(n) for n in unique_names(i)),
','.join(unique_names(i))
)
)
if disable_sun:
out_f.write('#endif\n')
def generate_string(out_dir, limits):
"""Generate string.hpp"""
max_limit = max((int(v) for v in limits))
with open(filename(out_dir, 'string'), 'wb') as out_f:
with IncludeGuard(out_f, ''):
out_f.write(
'\n'
'#include <boost/metaparse/v{0}/impl/concat.hpp>\n'
'#include <boost/preprocessor/cat.hpp>\n'
.format(VERSION)
)
generate_make_string(out_f, 512)
out_f.write(
'\n'
'#ifndef BOOST_METAPARSE_LIMIT_STRING_SIZE\n'
'# error BOOST_METAPARSE_LIMIT_STRING_SIZE not defined\n'
'#endif\n'
'\n'
'#if BOOST_METAPARSE_LIMIT_STRING_SIZE > {0}\n'
'# error BOOST_METAPARSE_LIMIT_STRING_SIZE is greater than'
' {0}. To increase the limit run tools/string_headers.py of'
' Boost.Metaparse against your Boost headers.\n'
'#endif\n'
'\n'
.format(max_limit)
)
define_macro(out_f, (
'STRING',
['s'],
'{0}::make_string< '
'{0}::take(sizeof(s)-1), sizeof(s)-1-{0}::take(sizeof(s)-1),'
'BOOST_PP_CAT({1}, BOOST_METAPARSE_LIMIT_STRING_SIZE)(s)'
'>::type'
.format(
'::boost::metaparse::v{0}::impl'.format(VERSION),
macro_name('I')
)
))
out_f.write('\n')
for limit in xrange(0, max_limit + 1):
out_f.write(
'#define {0} {1}\n'
.format(
macro_name('I{0}'.format(limit)),
macro_name('INDEX_STR{0}'.format(
min(int(l) for l in limits if int(l) >= limit)
))
)
)
out_f.write('\n')
prev_macro = None
prev_limit = 0
for length_limit in (int(l) for l in limits):
this_macro = macro_name('INDEX_STR{0}'.format(length_limit))
out_f.write(
'#define {0}(s) {1}{2}\n'
.format(
this_macro,
'{0}(s),'.format(prev_macro) if prev_macro else '',
','.join(
'{0}((s), {1})'
.format(macro_name('STRING_AT'), i)
for i in xrange(prev_limit, length_limit)
)
)
)
prev_macro = this_macro
prev_limit = length_limit
def positive_integer(value):
"""Throws when the argument is not a positive integer"""
val = int(value)
if val > 0:
return val
else:
raise argparse.ArgumentTypeError("A positive number is expected")
def existing_path(value):
"""Throws when the path does not exist"""
if os.path.exists(value):
return value
else:
raise argparse.ArgumentTypeError("Path {0} not found".format(value))
def main():
"""The main function of the script"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--boost_dir',
required=False,
type=existing_path,
help='The path to the include/boost directory of Metaparse'
)
parser.add_argument(
'--max_length_limit',
required=False,
default=2048,
type=positive_integer,
help='The maximum supported length limit'
)
parser.add_argument(
'--length_limit_step',
required=False,
default=128,
type=positive_integer,
help='The longest step at which headers are generated'
)
args = parser.parse_args()
if args.boost_dir is None:
tools_path = os.path.dirname(os.path.abspath(__file__))
boost_dir = os.path.join(
os.path.dirname(tools_path),
'include',
'boost'
)
else:
boost_dir = args.boost_dir
if args.max_length_limit < 1:
sys.stderr.write('Invalid maximum length limit')
sys.exit(-1)
generate_string(
os.path.join(boost_dir, 'metaparse', 'v{0}'.format(VERSION), 'impl'),
length_limits(args.max_length_limit, args.length_limit_step)
)
if __name__ == '__main__':
main()
| {
"content_hash": "7de114809af33be820fe4b00d4c6d49f",
"timestamp": "",
"source": "github",
"line_count": 341,
"max_line_length": 78,
"avg_line_length": 30.00293255131965,
"alnum_prop": 0.49819177011044863,
"repo_name": "jmanday/Master",
"id": "2676f3f21e92b443edde8bafda3dcc8124cf79c0",
"size": "10249",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "TFM/library/boost_1_63_0/libs/metaparse/tools/string_headers.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "309067"
},
{
"name": "Batchfile",
"bytes": "71697"
},
{
"name": "C",
"bytes": "3962470"
},
{
"name": "C#",
"bytes": "125762"
},
{
"name": "C++",
"bytes": "216284659"
},
{
"name": "CMake",
"bytes": "1594049"
},
{
"name": "CSS",
"bytes": "1737798"
},
{
"name": "CWeb",
"bytes": "174166"
},
{
"name": "Clojure",
"bytes": "1487"
},
{
"name": "Cuda",
"bytes": "1741779"
},
{
"name": "DIGITAL Command Language",
"bytes": "6246"
},
{
"name": "Fortran",
"bytes": "1856"
},
{
"name": "HLSL",
"bytes": "3314"
},
{
"name": "HTML",
"bytes": "192312054"
},
{
"name": "IDL",
"bytes": "28"
},
{
"name": "Java",
"bytes": "1111092"
},
{
"name": "JavaScript",
"bytes": "1906363"
},
{
"name": "Lex",
"bytes": "1231"
},
{
"name": "M4",
"bytes": "29689"
},
{
"name": "Makefile",
"bytes": "8410569"
},
{
"name": "Max",
"bytes": "36857"
},
{
"name": "Objective-C",
"bytes": "12659"
},
{
"name": "Objective-C++",
"bytes": "211927"
},
{
"name": "PHP",
"bytes": "140802"
},
{
"name": "Pascal",
"bytes": "26079"
},
{
"name": "Perl",
"bytes": "54411"
},
{
"name": "PowerShell",
"bytes": "16406"
},
{
"name": "Python",
"bytes": "2808348"
},
{
"name": "QML",
"bytes": "593"
},
{
"name": "QMake",
"bytes": "16692"
},
{
"name": "R",
"bytes": "69855"
},
{
"name": "Rebol",
"bytes": "354"
},
{
"name": "Roff",
"bytes": "5189"
},
{
"name": "Ruby",
"bytes": "9652"
},
{
"name": "Scala",
"bytes": "5683"
},
{
"name": "Shell",
"bytes": "416161"
},
{
"name": "Tcl",
"bytes": "1172"
},
{
"name": "TeX",
"bytes": "1096187"
},
{
"name": "XSLT",
"bytes": "553585"
},
{
"name": "Yacc",
"bytes": "19623"
}
],
"symlink_target": ""
} |
import os
from south.db import db
from south.v2 import DataMigration
from django.conf import settings
from django.core.files import File
from django.db import models
from cmsplugin_gallery.models import Image
class Migration(DataMigration):
classes = (models.ImageField, models.FileField, )
fields = ('src', )
def resave_files(self, object):
for field in self.fields:
field_callable = getattr(object, field)
absfilepath = os.path.join(settings.MEDIA_ROOT, field_callable.name)
filename = os.path.basename(absfilepath)
f = File(open(absfilepath, 'r'))
field_callable.save(filename, f)
f.close()
def forwards(self, orm):
objects = orm['cmsplugin_gallery.Image'].objects.all()
for object in objects:
self.resave_files(object)
backwards = forwards
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cmsplugin_gallery.galleryplugin': {
'Meta': {'object_name': 'GalleryPlugin', 'db_table': "'cmsplugin_galleryplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'cmsplugin_gallery/gallery.html'", 'max_length': '255'})
},
'cmsplugin_gallery.image': {
'Meta': {'ordering': "('inline_ordering_position',)", 'object_name': 'Image'},
'alt': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'gallery': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmsplugin_gallery.GalleryPlugin']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inline_ordering_position': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'src_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'src_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['cmsplugin_gallery']
| {
"content_hash": "5e2bc7991cb686e0acbbe51badcd00e2",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 156,
"avg_line_length": 56.901408450704224,
"alnum_prop": 0.5809405940594059,
"repo_name": "heckfi/cmsplugin_gallery_filer",
"id": "18850cd2d6841f7564a0cc91c3f590f47f497669",
"size": "4076",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cmsplugin_gallery/migrations/0003_move_images_to_media_path.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "85233"
},
{
"name": "Shell",
"bytes": "497"
}
],
"symlink_target": ""
} |
"""Radio auto updater."""
# Original code by Mikie (https://github.com/Mikie-Ghost/)
import radio
from radio import RUNDIR, logger, DATA_DIR
import urllib2, tarfile, os, shutil, platform, subprocess, re
from flask import json
# define master repo as user and branch in github repo
user = 'mrkipling'
branch = 'master'
def joinRundir(path):
"""Join rundir with 'path'"""
return os.path.join(RUNDIR, path)
# file containg currently installed version hash
version_file = os.path.join(DATA_DIR, 'Version.txt')
def writeVersion(hash):
"""Write hash to version file"""
f = open(version_file, 'w')
f.write(hash)
f.close()
def latestCommit():
"""Get SHA hash from latest commit"""
url = 'https://api.github.com/repos/%s/radio/commits/%s' % (user, branch)
result = urllib2.urlopen(url).read()
git = json.JSONDecoder().decode(result)
return git['sha']
def commitsBehind():
"""Calculate how many commits are missing"""
url = 'https://api.github.com/repos/%s/radio/compare/%s...%s' % (user, radio.CURRENT_COMMIT, radio.LATEST_COMMIT)
result = urllib2.urlopen(url).read()
git = json.JSONDecoder().decode(result)
return git['total_commits']
def checkGithub():
"""Check github repo for updates"""
logger.log('UPDATER :: Checking for updates', 'INFO')
try:
radio.LATEST_COMMIT = latestCommit()
if radio.FIRST_RUN:
radio.CURRENT_COMMIT = radio.LATEST_COMMIT
writeVersion(radio.CURRENT_COMMIT)
except:
logger.log('UPDATER :: Could not get latest commit from github', 'WARNING')
if radio.CURRENT_COMMIT:
try:
radio.COMMITS_BEHIND = commitsBehind()
except:
logger.log('UPDATER :: Could not get commits behind from github', 'WARNING')
if radio.COMMITS_BEHIND >= 1:
logger.log('UPDATER :: Update available, you are %i commits behind' % radio.COMMITS_BEHIND, 'INFO')
radio.COMMITS_COMPARE_URL = 'https://github.com/%s/radio/compare/%s...%s' % (user, radio.CURRENT_COMMIT, radio.LATEST_COMMIT)
elif radio.COMMITS_BEHIND == 0:
logger.log('UPDATER :: Up to date', 'INFO')
elif radio.COMMITS_BEHIND == -1:
logger.log('UPDATER :: Unknown version. Please run the updater', 'INFO')
else:
logger.log('UPDATER :: Unknown version. Please run the updater', 'INFO')
return radio.COMMITS_BEHIND
def RemoveUpdateFiles():
"""Remove the downloaded new version"""
logger.log('UPDATER :: Removing update files', 'INFO')
tar_file = joinRundir('radio.tar.gz')
update_folder = joinRundir('radio-update')
try:
if os.path.exists(tar_file):
logger.log('UPDATER :: Removing %s' % tar_file, 'DEBUG')
os.remove(tar_file)
except:
logger.log('UPDATER :: Could not remove %s' % tar_file, 'WARNING')
try:
if os.path.exists(update_folder):
logger.log('UPDATER :: Removing %s' % update_folder, 'DEBUG')
shutil.rmtree(update_folder)
except:
logger.log('UPDATER :: Could not remove %s' % update_folder, 'WARNING')
return
def Update():
"""Update radio installation"""
if radio.USE_GIT:
update = gitUpdate()
if update == 'complete':
return True
else:
logger.log('Git update failed, attempting tarball update', 'INFO')
tar_file = joinRundir('radio.tar.gz')
update_folder = joinRundir('radio-update')
# Download repo
try:
logger.log('UPDATER :: Downloading update file to %s' % tar_file, 'DEBUG')
url = urllib2.urlopen('https://github.com/%s/radio/tarball/%s' % (user, branch))
f = open(tar_file, 'wb')
f.write(url.read())
f.close()
except:
logger.log('UPDATER :: Failed to download update file', 'WARNING')
RemoveUpdateFiles()
return False
# Write new hash to file
try:
logger.log('UPDATER :: Writing new hash to %s' % version_file, 'DEBUG')
writeVersion(radio.LATEST_COMMIT)
except:
logger.log('UPDATER :: Faied to write new hash to version file', 'WARNING')
RemoveUpdateFiles()
return False
# Extract to temp folder
try:
logger.log('UPDATER :: Extracting %s' % tar_file, 'DEBUG')
tar = tarfile.open(tar_file)
tar.extractall(update_folder)
tar.close()
except:
logger.log('Failed to extract update file', 'WARNING')
RemoveUpdateFiles()
return False
# Overwrite old files with new ones
root_src_dir = os.path.join(update_folder, '%s-radio-%s' % (user, radio.LATEST_COMMIT[:7]))
try:
logger.log('UPDATER :: Overwriting old files', 'DEBUG')
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, RUNDIR)
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
if os.path.exists(dst_file):
os.remove(dst_file)
shutil.move(src_file, dst_dir)
except:
logger.log('UPDATER :: Failed to overwrite old files', 'WARNING')
RemoveUpdateFiles()
return False
# Clean up
RemoveUpdateFiles()
radio.CURRENT_COMMIT = radio.LATEST_COMMIT
radio.COMMITS_BEHIND = 0
return True
def runGit(args):
"""Run git command with args as arguments"""
git_locations = ['git']
if platform.system().lower() == 'darwin':
git_locations.append('/usr/local/git/bin/git')
output = err = None
for cur_git in git_locations:
cmd = cur_git + ' ' + args
try:
logger.log('UPDATER :: Trying to execute: "' + cmd + '" with shell in ' + RUNDIR, 'DEBUG')
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=RUNDIR)
output, err = p.communicate()
logger.log('UPDATER :: Git output: ' + output, 'DEBUG')
except OSError:
logger.log('UPDATER :: Command ' + cmd + ' didn\'t work, couldn\'t find git', 'WARNING')
continue
if 'not found' in output or "not recognized as an internal or external command" in output:
logger.log('UPDATER :: Unable to find git with command ' + cmd, 'WARNING')
output = None
elif 'fatal:' in output or err:
logger.log('UPDATER :: Git returned bad info. Are you sure this is a git installation?', 'WARNING')
output = None
elif output:
break
return (output, err)
def gitCurrentVersion():
"""Get version hash for local installation"""
output, err = runGit('rev-parse HEAD')
if not output:
logger.log('UPDATER :: Couldn\'t find latest installed version with git', 'WARNING')
radio.USE_GIT = False
return None
current_commit = output.strip()
if not re.match('^[a-z0-9]+$', current_commit):
logger.log('UPDATER :: Git output doesn\'t look like a hash, not using it', 'WARNING')
return None
writeVersion(current_commit)
return
def gitUpdate():
"""Update radio using git"""
output, err = runGit('pull origin %s' % branch)
if not output:
logger.log('Couldn\'t download latest version', 'ERROR')
radio.USE_GIT = False
return 'failed'
for line in output.split('\n'):
if 'Already up-to-date.' in line:
logger.log('UPDATER :: Already up to date', 'INFO')
logger.log('UPDATER :: Git output: ' + str(output), 'DEBUG')
return 'complete'
elif 'Aborting' in line:
logger.log('UPDATER :: Unable to update from git: ' + line, 'ERROR')
logger.log('UPDATER :: Output: ' + str(output), 'DEBUG')
radio.USE_GIT = False
return 'failed'
radio.CURRENT_COMMIT = radio.LATEST_COMMIT
writeVersion(radio.LATEST_COMMIT)
radio.COMMITS_BEHIND = 0
return 'complete'
| {
"content_hash": "0e92ddde7871b92e6b8a7aeae0d9fc05",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 137,
"avg_line_length": 32.21343873517787,
"alnum_prop": 0.6024539877300613,
"repo_name": "hephaestus9/Radio",
"id": "0f4a9fc959fc0db3fd29706c5f7894dda770adad",
"size": "8174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "radio/updater.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1449"
},
{
"name": "CSS",
"bytes": "337010"
},
{
"name": "JavaScript",
"bytes": "539084"
},
{
"name": "PHP",
"bytes": "701"
},
{
"name": "Python",
"bytes": "4900942"
}
],
"symlink_target": ""
} |
import theano
import theano.tensor as T
import numpy as np
from ..utils.theano_utils import shared_zeros, floatX, shared_scalar
from .. import activations, initializations
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
#srng = RandomStreams(seed=np.random.randint(10e6))
srng = RandomStreams()
import pdb
"""
# Core Layer Modual: The key component
- get_output(): call get_input(), then compute forward pass and return layer output
- get_input(): call previous-layer's forward function, this take cares of the real theano.graph construction
- connect(): link-list like structure, set a pointer (called self.previous) point to last layer
- get_params(): return the parameters for weights update
Recurvise calling get_output() <- get_input() <- last-layer.get_output()
You only need to call last layer's get_output(), then all layers's forward-pass will be called
"""
class Layer(object):
"""
abstract class of layer
"""
def __init__(self):
self.params = []
self.regs = []
def get_output(self,train=False):
raise NotImplementedError
def get_input(self,train=False):
"""
Key function to connect layers and compute forward-pass
"""
if hasattr(self, 'previous'):
return self.previous.get_output(train)
else:
return self.input
def get_params(self):
return self.params
def get_regs(self):
return self.regs
def get_param_vals(self):
"""
get layer parameter values (tensor -> numpy)
"""
param_vals = []
for p in self.params:
param_vals.append(p.get_value())
return param_vals
def set_param_vals(self, param_vals):
for (p, pval) in zip(self.params, param_vals):
if p.eval().shape != pval.shape:
raise Exception("[Error] in set_param_vals: input numpy params has different shape of model params")
p.set_value(floatX(pval))
def connect(self, layer):
self.previous = layer
class AffineLayer(Layer):
"""
Affine (fully connected) layer
"""
def __init__(self, nb_input, nb_output, init='normal',
activation='linear', reg_W=0.001, reg_b=0.,w_scale=1e-5):
super(AffineLayer, self).__init__()
self.init = initializations.get(init)
self.activation = activations.get(activation)
self.nb_input = nb_input
self.nb_output = nb_output
# this symbolic variable will be used if this is the first layer
self.input = T.matrix('input',dtype=theano.config.floatX)
self.W = self.init((self.nb_input, self.nb_output),w_scale)
self.b = shared_zeros((self.nb_output))
self.reg_W = shared_scalar(reg_W)
self.reg_b = shared_scalar(reg_b)
self.params = [self.W, self.b]
self.regs = [self.reg_W, self.reg_b]
# forward pass for the affine layer
def get_output(self,train=False):
X = self.get_input(train)
return self.activation(T.dot(X,self.W) + self.b)
# forward pass without activation
# This is used to get score before softmax
def get_output_score(self, train=False):
X = self.get_input(train)
return T.dot(X,self.W) + self.b
def get_config(self):
return {'name': self.__class__.__name__,
'nb_input': self.nb_input,
'nb_output': self.nb_output,
'init': self.init.__name__,
'activation': self.activation.__name__,
'reg_W': self.reg_W}
class Dropout(Layer):
"""
Dropout Layer
Reference: http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf
"""
def __init__(self, p, nb_input=None, uncertainty = False):
"""
p: floatX, percentage of neurons want to drop, higher this value, more to drop
uncertainty: Yarin Gal's Gaussian process uncertainty estimation
Reference: http://mlg.eng.cam.ac.uk/yarin/blog_3d801aa532c1ce.html
"""
super(Dropout, self).__init__()
self.p = p
self.uncertainty = uncertainty
self.nb_input = nb_input
#self.nb_output = nb_output
def get_output(self, train=False):
X = self.get_input(train)
assert self.p >= 0
retain_prob = 1. - self.p
if train:
X *= srng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX) / retain_prob
elif not train and self.uncertainty:
X *= srng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX) / retain_prob
return X
def get_config(self):
return {"name":self.__class__.__name__,
"p":self.p}
| {
"content_hash": "e5d45ca438efdb80033d40ee9c333149",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 116,
"avg_line_length": 32.33108108108108,
"alnum_prop": 0.6033437826541275,
"repo_name": "shenxudeu/deuNN",
"id": "dae8e479d785e3986844a76c82aa83680d253b3f",
"size": "4785",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "deuNet/layers/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "70698"
},
{
"name": "Python",
"bytes": "207679"
}
],
"symlink_target": ""
} |
import sys
from nb.classifier import Classifier
from nb.trainer import Trainer
from support.loader import Loader
from support.writer import Writer
from support.utils import Utils
def main():
print "### NBAC - Naive Bayes Ads Classifier ###","\n"
# Check if the training set limit was given by argv
if len(sys.argv) != 2:
print "Usage: python nbac.py <training-set-limit>"
# exit
return -1
# Initiate Utilities
utils = Utils()
utils.startTimer()
# Initiate Loader
loader = Loader()
# Get the training set limit
limit = sys.argv[1]
# Load all data
data = loader.load(limit)
# Check if data is invalid
if data is None:
print "Exiting..."
utils.stopTimer()
exit()
# Initiate Trainer
trainer = Trainer()
for _class in data:
# get the training set
_trainingData = _class[1]
# get the class name
print "Training data... class:", _class[0]
for _info in _trainingData:
# training data
trainer.train(_info, _class[0])
# Initiate Classifier
classifier = Classifier(trainer.feature, sum([len(x[1]) for x in data]))
for _class in data:
# get the test set
_testData = _class[2]
# get the class name
print "Classifying data... class:", _class[0]
for _info in _testData:
# classify data
_classification = classifier.classify(_info)
# get the suggested class
_suggestedClass = _classification[0][0]
# 1:text, 2:original class, 3:suggested class, 4:classification is correct?
classifier.setResult(_info[:47], _class[0], _suggestedClass, _class[0] == _suggestedClass)
# Print result
classifier.printSummary()
# Save summary in result
classifier.setSummary()
# Save data
writer = Writer()
writer.save(classifier.result)
utils.stopTimer()
# END
if __name__ == "__main__":
main()
| {
"content_hash": "d1efb282324eb8999f381dca66e85eb1",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 102,
"avg_line_length": 26.933333333333334,
"alnum_prop": 0.5985148514851485,
"repo_name": "marcusmachado/nbac",
"id": "a234b74b70532b4fbf4b4a8c530762562d7715ac",
"size": "2043",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nbac.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20045"
}
],
"symlink_target": ""
} |
import unittest
import mock
import base64
import troposphere
from stacker.lookups.handlers.file import parameterized_codec, handler
class TestFileTranslator(unittest.TestCase):
def test_parameterized_codec_b64(self):
expected = {
'Fn::Base64': {
'Fn::Join': [
'',
['Test ', {'Ref': 'Interpolation'}, ' Here']
]
}
}
self.assertEqual(
expected,
parameterized_codec('Test {{Interpolation}} Here', True).data
)
def test_parameterized_codec_plain(self):
expected = {
'Fn::Join': ['', ['Test ', {'Ref': 'Interpolation'}, ' Here']]
}
self.assertEqual(
expected,
parameterized_codec('Test {{Interpolation}} Here', False).data
)
def test_file_loaded(self):
with mock.patch('stacker.lookups.handlers.file.read_value_from_path',
return_value='') as amock:
handler('plain:file://tmp/test')
amock.assert_called_with('file://tmp/test')
def test_handler_plain(self):
expected = 'Hello, world'
with mock.patch('stacker.lookups.handlers.file.read_value_from_path',
return_value=expected):
out = handler('plain:file://tmp/test')
self.assertEqual(expected, out)
def test_handler_b64(self):
expected = 'Hello, world'
with mock.patch('stacker.lookups.handlers.file.read_value_from_path',
return_value=expected):
out = handler('base64:file://tmp/test')
self.assertEqual(expected, base64.b64decode(out))
def test_handler_parameterized(self):
expected = 'Hello, world'
with mock.patch('stacker.lookups.handlers.file.read_value_from_path',
return_value=expected):
out = handler('parameterized:file://tmp/test')
self.assertEqual(troposphere.GenericHelperFn, type(out))
def test_handler_parameterized_b64(self):
expected = 'Hello, world'
with mock.patch('stacker.lookups.handlers.file.read_value_from_path',
return_value=expected):
out = handler('parameterized-b64:file://tmp/test')
self.assertEqual(troposphere.Base64, type(out))
def test_unknown_codec(self):
expected = 'Hello, world'
with mock.patch('stacker.lookups.handlers.file.read_value_from_path',
return_value=expected):
with self.assertRaises(KeyError):
handler('bad:file://tmp/test')
| {
"content_hash": "2c783063f99227dddc0e79bb79921047",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 77,
"avg_line_length": 36.958333333333336,
"alnum_prop": 0.5704622322435174,
"repo_name": "mhahn/stacker",
"id": "bf7074b4020f181b953f4b8492a0a0ff6a3567b0",
"size": "2661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stacker/tests/lookups/handlers/test_file.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "187"
},
{
"name": "Python",
"bytes": "250329"
},
{
"name": "Shell",
"bytes": "216"
}
],
"symlink_target": ""
} |
import pygame
def component_Gravity(GameObject, GameState):
GameObject.velocity[1] += 0.1
def component_Movement(GameObject, GameState):
GameObject.pos[0] += GameObject.velocity[0]
GameObject.pos[1] += GameObject.velocity[1]
# added max speed
if GameObject.velocity[0] < -3:
GameObject.velocity[0] = -3
if GameObject.velocity[0] > 3:
GameObject.velocity[0] = 3
def component_AI(GameObject, GameState):
GameObject.velocity[0] -= 0.1
def component_Collidable(GameObject, GameState):
for target in GameState.GameObjects:
if target == GameObject:
# do not perform collision detection against the same object
continue
box1 = GameObject.image.get_rect()
box1 = box1.move(GameObject.pos)
box2 = target.image.get_rect()
box2 = box2.move(target.pos)
if box1.colliderect(box2):
if 'event_collide' in dir(target):
target.event_collide(GameObject)
class GameObject:
def __init__(self, image):
image = pygame.image.load(image).convert()
pos = [0, 0]
image.set_colorkey([255, 128, 255])
self.image = image
self.pos = pos
self.components = []
def update(self, GameState):
for component in self.components:
component(self, GameState)
def draw(self, surface):
surface.blit(self.image, self.pos)
class Hero(GameObject):
def __init__(self):
GameObject.__init__(self, 'snake.png')
self.velocity = [0, 0]
self.components.append(component_Gravity)
self.components.append(component_Movement)
self.components.append(component_Collidable)
# You can move the jumping code to a component, to make the code cleaner
self.isJumping = False
self.maxJumpPower = 5
self.jumpPower = self.maxJumpPower
def update(self, GameState):
GameObject.update(self, GameState)
keystate = pygame.key.get_pressed()
#if GameState.joystick.get_axis(0) < -0.2:
if keystate[pygame.K_LEFT]:
self.velocity[0] -= 1 / 4
#if GameState.joystick.get_axis(0) > 0.2:
if keystate[pygame.K_RIGHT]:
self.velocity[0] += 1 / 4
if keystate[pygame.K_SPACE]:
if self.jumpPower > 0:
self.velocity[1] -= 0.1
if self.jumpPower == self.maxJumpPower:
self.isJumping = True
self.velocity[1] = -7
elif self.jumpPower > 0:
self.velocity[1] -= 0.2
self.jumpPower -= 1
def draw(self, surface):
GameObject.draw(self, surface)
# debug so we can see the collision boxes
Debug = False
if Debug:
box1 = self.image.get_rect()
box1 = box1.move(self.pos)
pygame.draw.rect(surface, [255, 0, 0], box1, 1)
def event_collide(self, target):
self.jumpPower = self.maxJumpPower
class Enemy(GameObject):
def __init__(self):
GameObject.__init__(self, 'moongoose.png')
self.pos[0] = 500
self.velocity = [0, 0]
self.components.append(component_Gravity)
self.components.append(component_Movement)
self.components.append(component_Collidable)
self.components.append(component_AI)
class Platform(GameObject):
def __init__(self, pos):
GameObject.__init__(self, 'RTS_Crate.png')
self.pos = pos
def draw(self, surface):
GameObject.draw(self, surface)
# debug so we can see the collision boxes
Debug = False
if Debug:
box2 = self.image.get_rect()
box2 = box2.move(self.pos)
padding = 10
box2 = box2.inflate(-padding, -padding)
pygame.draw.rect(surface, [255, 0, 0], box2, 10)
def event_collide(self, target):
if 'isJumping' in dir(target):
# ignore collisions with the platform on the first frame of jumping
if target.isJumping:
target.isJumping = False
return
box1 = target.image.get_rect()
box1 = box1.move(target.pos)
box2 = self.image.get_rect()
box2 = box2.move(self.pos)
padding = 10
box2 = box2.inflate(-padding, -padding)
# another fix to the character passing through the platform is by checking
# against the bottom. notice the bug that this fix causes. There are ways
# to get perfect collision, but I'll leave that as your homework
if box1.bottom <= box2.bottom:
target.pos[1] = box2.top - box1.height
target.velocity[1] = 0
elif box1.right > box2.left and box1.right < box2.right:
target.pos[0] = box2.left - box1.width
target.velocity[0] = 0
elif box1.left < box2.right and box1.left > box2.left:
target.pos[0] = box2.right
target.velocity[0] = 0
| {
"content_hash": "d4269f03853e85118e48e9fbbbf742d5",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 82,
"avg_line_length": 33.63291139240506,
"alnum_prop": 0.5540082800150545,
"repo_name": "MrValdez/PyCon-2015-Gamedev-talk",
"id": "9f825f33159607775cffd6698bb23db545b34976",
"size": "5314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gameobjects.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7967"
}
],
"symlink_target": ""
} |
class ContentTypeMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
response = self.get_response(request)
if request.path.endswith('.hlt.gz'):
response._headers['content-type'] = ('Content-Type', 'application/json')
# Code to be executed for each request/response after
# the view is called.
return response
| {
"content_hash": "881842b7d4b0253195ce3a8e611876ae",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 84,
"avg_line_length": 33.72222222222222,
"alnum_prop": 0.6342668863261944,
"repo_name": "nmalaguti/mini-halite",
"id": "a4fa27a8f5e5b65d2b0882542bb2479375cf8046",
"size": "607",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tournament/middleware/content_type.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "848"
},
{
"name": "HTML",
"bytes": "11899"
},
{
"name": "JavaScript",
"bytes": "43179"
},
{
"name": "Python",
"bytes": "21587"
}
],
"symlink_target": ""
} |
import os
import sys
import json
import string
import random
import hashlib
import unittest
from cStringIO import StringIO
root_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(root_path)
sys.path.append(os.path.join(root_path, 'lib'))
os.environ['SETTINGS_FLAVOR'] = 'test'
import registry
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
registry.app.testing = True
self.http_client = registry.app.test_client()
def gen_random_string(self, length=16):
return ''.join([random.choice(string.ascii_uppercase + string.digits)
for x in range(length)]).lower()
def upload_image(self, image_id, parent_id, layer):
json_obj = {
'id': image_id
}
if parent_id:
json_obj['parent'] = parent_id
json_data = json.dumps(json_obj)
h = hashlib.sha256(json_data + '\n')
h.update(layer)
layer_checksum = 'sha256:{0}'.format(h.hexdigest())
resp = self.http_client.put('/v1/images/{0}/json'.format(image_id),
headers={
'X-Docker-Checksum': layer_checksum
},
data=json_data)
self.assertEqual(resp.status_code, 200, resp.data)
# Make sure I cannot download the image before push is complete
resp = self.http_client.get('/v1/images/{0}/json'.format(image_id))
self.assertEqual(resp.status_code, 400, resp.data)
layer_file = StringIO(layer)
resp = self.http_client.put('/v1/images/{0}/layer'.format(image_id),
input_stream=layer_file)
layer_file.close()
self.assertEqual(resp.status_code, 200, resp.data)
resp = self.http_client.get('/v1/images/{0}/json'.format(image_id))
self.assertEqual(resp.headers.get('x-docker-size'), str(len(layer)))
self.assertEqual(resp.status_code, 200, resp.data)
self.assertEqual(resp.headers['x-docker-checksum'], layer_checksum)
| {
"content_hash": "bac623138907c13f0264986d4745e0f3",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 77,
"avg_line_length": 38.45614035087719,
"alnum_prop": 0.5857664233576643,
"repo_name": "airbnb/docker-registry",
"id": "e0b64edd692aadcfaae5342f0567313f0a7ea69b",
"size": "2193",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import os
import nbformat
from generate_contents import iter_notebooks, NOTEBOOK_DIR
def fix_kernelspec():
for nb_name in iter_notebooks():
nb_file = os.path.join(NOTEBOOK_DIR, nb_name)
nb = nbformat.read(nb_file, as_version=4)
print("- Updating kernelspec for {0}".format(nb_name))
nb['metadata']['kernelspec']['display_name'] = 'Python 3'
nb['metadata']['kernelspec']['name'] = 'python3'
nbformat.write(nb, nb_file)
if __name__ == '__main__':
fix_kernelspec()
| {
"content_hash": "4aa13d40f8fc46667d916398de1d52af",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 65,
"avg_line_length": 25.047619047619047,
"alnum_prop": 0.6216730038022814,
"repo_name": "mbeyeler/opencv-machine-learning",
"id": "1472925d41d3ba7ddba4365b180d2df06538108d",
"size": "526",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/fix_kernelspec.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "5239940"
},
{
"name": "Python",
"bytes": "6239"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'finanzas.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include('finanzas.api.urls'))
)
| {
"content_hash": "e9f0c8b928e34ca92ac03dedb0b1eff2",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 53,
"avg_line_length": 29.545454545454547,
"alnum_prop": 0.6369230769230769,
"repo_name": "jualjiman/finanzas",
"id": "1dfbcf9a930bd058f415f89ead8e8e66b956a32d",
"size": "325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/finanzas/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13393"
},
{
"name": "Shell",
"bytes": "4964"
}
],
"symlink_target": ""
} |
from django.shortcuts import render
from django.template.loader import get_template
from django.http import HttpResponse
from datetime import datetime
# Create your views here.
def index(request, tvno='0'):
tv_list = [{'name':'CCTV News', 'tvcode':'yPhFG2I0dE0'},
{'name':'CCTV中文国际', 'tvcode':'E1DTZBy4xr4'},]
template = get_template('index.html')
now = datetime.now()
hour = now.timetuple().tm_hour
tvno = tvno
tv = tv_list[int(tvno)]
html = template.render(locals())
return HttpResponse(html)
def engtv(request, tvno='0'):
tv_list = [{'name':'SkyNews', 'tvcode':'y60wDzZt8yg'},
{'name':'Euro News', 'tvcode':'mWdKb7255Bs'},
{'name':'India News', 'tvcode':'oMncjfIE-ZU'},
{'name':'CCTV', 'tvcode':'wuzZYzSoEEU'},]
template = get_template('engtv.html')
now = datetime.now()
tvno = tvno
tv = tv_list[int(tvno)]
html = template.render(locals())
return HttpResponse(html)
| {
"content_hash": "5fc36d97894f85006f06708cae6bfee3",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 57,
"avg_line_length": 27.363636363636363,
"alnum_prop": 0.6799557032115172,
"repo_name": "lichengshuang/python",
"id": "ea4b82308fc942dd22c57330ac2bb1465b19d3ee",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/djangoStudy/mtv/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "321"
},
{
"name": "HTML",
"bytes": "68150"
},
{
"name": "Python",
"bytes": "420936"
},
{
"name": "Shell",
"bytes": "76121"
},
{
"name": "Vim script",
"bytes": "27690"
}
],
"symlink_target": ""
} |
'''
Defines the base class for any training pipeline
'''
import ecto
class TrainerBase(object):
"""
This is a base class for a training pipeline: you don't need to have your pipeline cell inherit from that class
but if you do, it will be listed as an official training pipeline
You need to call the BlackBox constructor in your __init__ first and then this function. Typically, your __init__ is
>>> class Foo(ecto.BlackBox, TrainerBase):
>>> def __init__(self, *args, **kwargs):
>>> ecto.BlackBox.__init__(self, *args, **kwargs)
>>> TrainerBase.__init__(self)
"""
pass
| {
"content_hash": "baf3dd3585e7203d988c40ac399b2062",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 120,
"avg_line_length": 38.05882352941177,
"alnum_prop": 0.6290571870170015,
"repo_name": "WalkingMachine/sara_commun",
"id": "c8cead7465084940fdfe83184af3cef09d0e2a95",
"size": "647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wm_ork/object_recognition_core/python/object_recognition_core/pipelines/training.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "6113"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import datetime
import os
import tokenize
import unittest
from django.core.validators import RegexValidator, EmailValidator
from django.db import models, migrations
from django.db.migrations.writer import MigrationWriter, SettingsReference
from django.test import TestCase
from django.conf import settings
from django.utils import datetime_safe, six
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import get_default_timezone
import custom_migration_operations.operations
import custom_migration_operations.more_operations
class TestModel1(object):
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
class WriterTests(TestCase):
"""
Tests the migration writer (makes migration files from Migration instances)
"""
def safe_exec(self, string, value=None):
l = {}
try:
exec(string, globals(), l)
except Exception as e:
if value:
self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e))
else:
self.fail("Could not exec %r: %s" % (string.strip(), e))
return l
def serialize_round_trip(self, value):
string, imports = MigrationWriter.serialize(value)
return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result']
def assertSerializedEqual(self, value):
self.assertEqual(self.serialize_round_trip(value), value)
def assertSerializedResultEqual(self, value, target):
self.assertEqual(MigrationWriter.serialize(value), target)
def assertSerializedFieldEqual(self, value):
new_value = self.serialize_round_trip(value)
self.assertEqual(value.__class__, new_value.__class__)
self.assertEqual(value.max_length, new_value.max_length)
self.assertEqual(value.null, new_value.null)
self.assertEqual(value.unique, new_value.unique)
def test_serialize(self):
"""
Tests various different forms of the serializer.
This does not care about formatting, just that the parsed result is
correct, so we always exec() the result and check that.
"""
# Basic values
self.assertSerializedEqual(1)
self.assertSerializedEqual(None)
self.assertSerializedEqual(b"foobar")
string, imports = MigrationWriter.serialize(b"foobar")
self.assertEqual(string, "b'foobar'")
self.assertSerializedEqual("föobár")
string, imports = MigrationWriter.serialize("foobar")
self.assertEqual(string, "'foobar'")
self.assertSerializedEqual({1: 2})
self.assertSerializedEqual(["a", 2, True, None])
self.assertSerializedEqual(set([2, 3, "eighty"]))
self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]})
self.assertSerializedEqual(_('Hello'))
# Functions
with six.assertRaisesRegex(self, ValueError, 'Cannot serialize function: lambda'):
self.assertSerializedEqual(lambda x: 42)
self.assertSerializedEqual(models.SET_NULL)
string, imports = MigrationWriter.serialize(models.SET(42))
self.assertEqual(string, 'models.SET(42)')
self.serialize_round_trip(models.SET(42))
# Datetime stuff
self.assertSerializedEqual(datetime.datetime.utcnow())
self.assertSerializedEqual(datetime.datetime.utcnow)
self.assertSerializedEqual(datetime.datetime.today())
self.assertSerializedEqual(datetime.datetime.today)
self.assertSerializedEqual(datetime.date.today())
self.assertSerializedEqual(datetime.date.today)
with self.assertRaises(ValueError):
self.assertSerializedEqual(datetime.datetime(2012, 1, 1, 1, 1, tzinfo=get_default_timezone()))
safe_date = datetime_safe.date(2014, 3, 31)
string, imports = MigrationWriter.serialize(safe_date)
self.assertEqual(string, repr(datetime.date(2014, 3, 31)))
self.assertEqual(imports, {'import datetime'})
safe_datetime = datetime_safe.datetime(2014, 3, 31, 16, 4, 31)
string, imports = MigrationWriter.serialize(safe_datetime)
self.assertEqual(string, repr(datetime.datetime(2014, 3, 31, 16, 4, 31)))
self.assertEqual(imports, {'import datetime'})
# Classes
validator = RegexValidator(message="hello")
string, imports = MigrationWriter.serialize(validator)
self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')")
self.serialize_round_trip(validator)
validator = EmailValidator(message="hello") # Test with a subclass.
string, imports = MigrationWriter.serialize(validator)
self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')")
self.serialize_round_trip(validator)
validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello")
string, imports = MigrationWriter.serialize(validator)
self.assertEqual(string, "custom.EmailValidator(message='hello')")
# Django fields
self.assertSerializedFieldEqual(models.CharField(max_length=255))
self.assertSerializedFieldEqual(models.TextField(null=True, blank=True))
# Setting references
self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL"))
self.assertSerializedResultEqual(
SettingsReference("someapp.model", "AUTH_USER_MODEL"),
(
"settings.AUTH_USER_MODEL",
set(["from django.conf import settings"]),
)
)
self.assertSerializedResultEqual(
((x, x * x) for x in range(3)),
(
"((0, 0), (1, 1), (2, 4))",
set(),
)
)
def test_serialize_empty_nonempty_tuple(self):
"""
Ticket #22679: makemigrations generates invalid code for (an empty
tuple) default_permissions = ()
"""
empty_tuple = ()
one_item_tuple = ('a',)
many_items_tuple = ('a', 'b', 'c')
self.assertSerializedEqual(empty_tuple)
self.assertSerializedEqual(one_item_tuple)
self.assertSerializedEqual(many_items_tuple)
@unittest.skipUnless(six.PY2, "Only applies on Python 2")
def test_serialize_direct_function_reference(self):
"""
Ticket #22436: You cannot use a function straight from its body
(e.g. define the method and use it in the same body)
"""
with self.assertRaises(ValueError):
self.serialize_round_trip(TestModel1.thing)
def test_serialize_local_function_reference(self):
"""
Neither py2 or py3 can serialize a reference in a local scope.
"""
class TestModel2(object):
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaises(ValueError):
self.serialize_round_trip(TestModel2.thing)
def test_simple_migration(self):
"""
Tests serializing a simple migration.
"""
fields = {
'charfield': models.DateTimeField(default=datetime.datetime.utcnow),
'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow),
}
options = {
'verbose_name': 'My model',
'verbose_name_plural': 'My models',
}
migration = type(str("Migration"), (migrations.Migration,), {
"operations": [
migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)),
migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)),
migrations.CreateModel(name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,)),
migrations.DeleteModel("MyModel"),
migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]),
],
"dependencies": [("testapp", "some_other_one")],
})
writer = MigrationWriter(migration)
output = writer.as_string()
# It should NOT be unicode.
self.assertIsInstance(output, six.binary_type, "Migration as_string returned unicode")
# We don't test the output formatting - that's too fragile.
# Just make sure it runs for now, and that things look alright.
result = self.safe_exec(output)
self.assertIn("Migration", result)
# In order to preserve compatibility with Python 3.2 unicode literals
# prefix shouldn't be added to strings.
tokens = tokenize.generate_tokens(six.StringIO(str(output)).readline)
for token_type, token_source, (srow, scol), __, line in tokens:
if token_type == tokenize.STRING:
self.assertFalse(
token_source.startswith('u'),
"Unicode literal prefix found at %d:%d: %r" % (
srow, scol, line.strip()
)
)
def test_migration_path(self):
test_apps = [
'migrations.migrations_test_apps.normal',
'migrations.migrations_test_apps.with_package_model',
'migrations.migrations_test_apps.without_init_file',
]
base_dir = os.path.dirname(os.path.dirname(__file__))
for app in test_apps:
with self.modify_settings(INSTALLED_APPS={'append': app}):
migration = migrations.Migration('0001_initial', app.split('.')[-1])
expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py']))
writer = MigrationWriter(migration)
self.assertEqual(writer.path, expected_path)
def test_custom_operation(self):
migration = type(str("Migration"), (migrations.Migration,), {
"operations": [
custom_migration_operations.operations.TestOperation(),
custom_migration_operations.operations.CreateModel(),
migrations.CreateModel("MyModel", (), {}, (models.Model,)),
custom_migration_operations.more_operations.TestOperation()
],
"dependencies": []
})
writer = MigrationWriter(migration)
output = writer.as_string()
result = self.safe_exec(output)
self.assertIn("custom_migration_operations", result)
self.assertNotEqual(
result['custom_migration_operations'].operations.TestOperation,
result['custom_migration_operations'].more_operations.TestOperation
)
| {
"content_hash": "d83b678e29d3793108e15d752ebe7955",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 126,
"avg_line_length": 44.28048780487805,
"alnum_prop": 0.6346277425869825,
"repo_name": "aleksandra-tarkowska/django",
"id": "8f8437faf92696acc263c586094d782e013e8d12",
"size": "10919",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/migrations/test_writer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Simple benchmark for Marshmallow serialization of a moderately complex object.
Uses the `timeit` module to benchmark serializing an object through Marshmallow.
"""
from __future__ import print_function, unicode_literals, division
import argparse
import cProfile
import gc
import timeit
import time
from marshmallow import Schema, fields, ValidationError, pre_load
# Custom validator
def must_not_be_blank(data):
if not data:
raise ValidationError('Data not provided.')
class AuthorSchema(Schema):
id = fields.Int(dump_only=True)
first = fields.Str()
last = fields.Str()
book_count = fields.Float()
age = fields.Float()
address = fields.Str()
full_name = fields.Method('full_name')
def full_name(self, obj):
return obj.first + ' ' + obj.last
def format_name(self, author):
return "{0}, {1}".format(author.last, author.first)
class QuoteSchema(Schema):
id = fields.Int(dump_only=True)
author = fields.Nested(AuthorSchema, validate=must_not_be_blank)
content = fields.Str(required=True, validate=must_not_be_blank)
posted_at = fields.Int(dump_only=True)
book_name = fields.Str()
page_number = fields.Float()
line_number = fields.Float()
col_number = fields.Float()
# Allow client to pass author's full name in request body
# e.g. {"author': 'Tim Peters"} rather than {"first": "Tim", "last": "Peters"}
@pre_load
def process_author(self, data):
author_name = data.get('author')
if author_name:
first, last = author_name.split(' ')
author_dict = dict(first=first, last=last)
else:
author_dict = {}
data['author'] = author_dict
return data
class Author(object):
def __init__(self, id, first, last, book_count, age, address):
self.id = id
self.first = first
self.last = last
self.book_count = book_count
self.age = age
self.address = address
class Quote(object):
def __init__(self, id, author, content, posted_at, book_name, page_number,
line_number, col_number):
self.id = id
self.author = author
self.content = content
self.posted_at = posted_at
self.book_name = book_name
self.page_number = page_number
self.line_number = line_number
self.col_number = col_number
def run_timeit(quotes, iterations, repeat, profile=False):
quotes_schema = QuoteSchema(many=True)
if profile:
profile = cProfile.Profile()
profile.enable()
gc.collect()
best = min(timeit.repeat(lambda: quotes_schema.dump(quotes),
'gc.enable()',
number=iterations,
repeat=repeat))
if profile:
profile.disable()
profile.dump_stats('marshmallow.pprof')
usec = best * 1e6 / iterations
return usec
def main():
parser = argparse.ArgumentParser(description='Runs a benchmark of Marshmallow.')
parser.add_argument('--iterations', type=int, default=1000,
help='Number of iterations to run per test.')
parser.add_argument('--repeat', type=int, default=5,
help='Number of times to repeat the performance test. The minimum will '
'be used.')
parser.add_argument('--object-count', type=int, default=20,
help='Number of objects to dump.')
parser.add_argument('--profile', action='store_true',
help='Whether or not to profile Marshmallow while running the benchmark.')
args = parser.parse_args()
quotes = []
for i in range(args.object_count):
quotes.append(
Quote(i, Author(i, 'Foo', 'Bar', 42, 66, '123 Fake St'),
'Hello World', time.time(), 'The World', 34, 3, 70)
)
print('Benchmark Result: {0:.2f} usec/dump'.format(
run_timeit(quotes, args.iterations, args.repeat, profile=args.profile)))
if __name__ == '__main__':
main()
| {
"content_hash": "95377726e12edc2db7ebffd21ec9f512",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 98,
"avg_line_length": 31.49230769230769,
"alnum_prop": 0.6042989741084513,
"repo_name": "xLegoz/marshmallow",
"id": "062d247224cf956dfe40a5208d092fdd777a9e85",
"size": "4094",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "performance/benchmark.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "403680"
}
],
"symlink_target": ""
} |
"""Helper functions for progress callbacks."""
import logging
import sys
from gslib.util import MakeHumanReadable
from gslib.util import UTF8
# Default upper and lower bounds for progress callback frequency.
_START_BYTES_PER_CALLBACK = 1024*64
_MAX_BYTES_PER_CALLBACK = 1024*1024*100
# Max width of URL to display in progress indicator. Wide enough to allow
# 15 chars for x/y display on an 80 char wide terminal.
MAX_PROGRESS_INDICATOR_COLUMNS = 65
class ProgressCallbackWithBackoff(object):
"""Makes progress callbacks with exponential backoff to a maximum value.
This prevents excessive log message output.
"""
def __init__(self, total_size, callback_func,
start_bytes_per_callback=_START_BYTES_PER_CALLBACK,
max_bytes_per_callback=_MAX_BYTES_PER_CALLBACK,
calls_per_exponent=10):
"""Initializes the callback with backoff.
Args:
total_size: Total bytes to process. If this is None, size is not known
at the outset.
callback_func: Func of (int: processed_so_far, int: total_bytes)
used to make callbacks.
start_bytes_per_callback: Lower bound of bytes per callback.
max_bytes_per_callback: Upper bound of bytes per callback.
calls_per_exponent: Number of calls to make before reducing rate.
"""
self._bytes_per_callback = start_bytes_per_callback
self._callback_func = callback_func
self._calls_per_exponent = calls_per_exponent
self._max_bytes_per_callback = max_bytes_per_callback
self._total_size = total_size
self._bytes_processed_since_callback = 0
self._callbacks_made = 0
self._total_bytes_processed = 0
def Progress(self, bytes_processed):
"""Tracks byte processing progress, making a callback if necessary."""
self._bytes_processed_since_callback += bytes_processed
if (self._bytes_processed_since_callback > self._bytes_per_callback or
(self._total_bytes_processed + self._bytes_processed_since_callback >=
self._total_size and self._total_size is not None)):
self._total_bytes_processed += self._bytes_processed_since_callback
# TODO: We check if >= total_size and truncate because JSON uploads count
# headers+metadata during their send progress. If the size is unknown,
# we can't do this and the progress message will make it appear that we
# send more than the original stream.
if self._total_size is not None:
bytes_sent = min(self._total_bytes_processed, self._total_size)
else:
bytes_sent = self._total_bytes_processed
self._callback_func(bytes_sent, self._total_size)
self._bytes_processed_since_callback = 0
self._callbacks_made += 1
if self._callbacks_made > self._calls_per_exponent:
self._bytes_per_callback = min(self._bytes_per_callback * 2,
self._max_bytes_per_callback)
self._callbacks_made = 0
def ConstructAnnounceText(operation_name, url_string):
"""Constructs announce text for ongoing operations on url_to_display.
This truncates the text to a maximum of MAX_PROGRESS_INDICATOR_COLUMNS.
Thus, concurrent output (gsutil -m) leaves progress counters in a readable
(fixed) position.
Args:
operation_name: String describing the operation, i.e.
'Uploading' or 'Hashing'.
url_string: String describing the file/object being processed.
Returns:
Formatted announce text for outputting operation progress.
"""
# Operation name occupies 11 characters (enough for 'Downloading'), plus a
# space. The rest is used for url_to_display. If a longer operation name is
# used, it will be truncated. We can revisit this size if we need to support
# a longer operation, but want to make sure the terminal output is meaningful.
justified_op_string = operation_name[:11].ljust(12)
start_len = len(justified_op_string)
end_len = len(': ')
if (start_len + len(url_string) + end_len >
MAX_PROGRESS_INDICATOR_COLUMNS):
ellipsis_len = len('...')
url_string = '...%s' % url_string[
-(MAX_PROGRESS_INDICATOR_COLUMNS - start_len - end_len - ellipsis_len):]
base_announce_text = '%s%s:' % (justified_op_string, url_string)
format_str = '{0:%ds}' % MAX_PROGRESS_INDICATOR_COLUMNS
return format_str.format(base_announce_text.encode(UTF8))
class FileProgressCallbackHandler(object):
"""Outputs progress info for large operations like file copy or hash."""
def __init__(self, announce_text, logger, start_byte=0,
override_total_size=None):
"""Initializes the callback handler.
Args:
announce_text: String describing the operation.
logger: For outputting log messages.
start_byte: The beginning of the file component, if one is being used.
override_total_size: The size of the file component, if one is being used.
"""
self._announce_text = announce_text
self._logger = logger
self._start_byte = start_byte
self._override_total_size = override_total_size
# Ensures final newline is written once even if we get multiple callbacks.
self._last_byte_written = False
# Function signature is in boto callback format, which cannot be changed.
def call(self, # pylint: disable=invalid-name
last_byte_processed,
total_size):
"""Prints an overwriting line to stderr describing the operation progress.
Args:
last_byte_processed: The last byte processed in the file. For file
components, this number should be in the range
[start_byte:start_byte + override_total_size].
total_size: Total size of the ongoing operation.
"""
if not self._logger.isEnabledFor(logging.INFO) or self._last_byte_written:
return
if self._override_total_size:
total_size = self._override_total_size
if total_size:
total_size_string = '/%s' % MakeHumanReadable(total_size)
else:
total_size_string = ''
# Use sys.stderr.write instead of self.logger.info so progress messages
# output on a single continuously overwriting line.
# TODO: Make this work with logging.Logger.
sys.stderr.write('%s%s%s \r' % (
self._announce_text,
MakeHumanReadable(last_byte_processed - self._start_byte),
total_size_string))
if total_size and last_byte_processed - self._start_byte == total_size:
self._last_byte_written = True
sys.stderr.write('\n')
| {
"content_hash": "99c1bf97377c9e3324dec96acfdd31de",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 80,
"avg_line_length": 41.34394904458599,
"alnum_prop": 0.6827915575412109,
"repo_name": "sahiljain/catapult",
"id": "69ee3ed725d62eebbcb658cef5a83703fcae553c",
"size": "7111",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "third_party/gsutil/gslib/progress_callback.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3598"
},
{
"name": "C++",
"bytes": "6390"
},
{
"name": "CSS",
"bytes": "24751"
},
{
"name": "HTML",
"bytes": "14570791"
},
{
"name": "JavaScript",
"bytes": "511007"
},
{
"name": "Python",
"bytes": "5842419"
},
{
"name": "Shell",
"bytes": "2834"
}
],
"symlink_target": ""
} |
import os
import time
import random
import threading
import unittest
from pyspark import SparkContext, SparkConf
class PinThreadTests(unittest.TestCase):
# These tests are in a separate class because it uses
# 'PYSPARK_PIN_THREAD' environment variable to test thread pin feature.
@classmethod
def setUpClass(cls):
cls.old_pin_thread = os.environ.get("PYSPARK_PIN_THREAD")
os.environ["PYSPARK_PIN_THREAD"] = "true"
cls.sc = SparkContext('local[4]', cls.__name__, conf=SparkConf())
@classmethod
def tearDownClass(cls):
cls.sc.stop()
if cls.old_pin_thread is not None:
os.environ["PYSPARK_PIN_THREAD"] = cls.old_pin_thread
else:
del os.environ["PYSPARK_PIN_THREAD"]
def test_pinned_thread(self):
threads = []
exceptions = []
property_name = "test_property_%s" % PinThreadTests.__name__
jvm_thread_ids = []
for i in range(10):
def test_local_property():
jvm_thread_id = self.sc._jvm.java.lang.Thread.currentThread().getId()
jvm_thread_ids.append(jvm_thread_id)
# If a property is set in this thread, later it should get the same property
# within this thread.
self.sc.setLocalProperty(property_name, str(i))
# 5 threads, 1 second sleep. 5 threads without a sleep.
time.sleep(i % 2)
try:
assert self.sc.getLocalProperty(property_name) == str(i)
# Each command might create a thread in multi-threading mode in Py4J.
# This assert makes sure that the created thread is being reused.
assert jvm_thread_id == self.sc._jvm.java.lang.Thread.currentThread().getId()
except Exception as e:
exceptions.append(e)
threads.append(threading.Thread(target=test_local_property))
for t in threads:
t.start()
for t in threads:
t.join()
for e in exceptions:
raise e
# Created JVM threads should be 10 because Python thread are 10.
assert len(set(jvm_thread_ids)) == 10
def test_multiple_group_jobs(self):
# SPARK-22340 Add a mode to pin Python thread into JVM's
group_a = "job_ids_to_cancel"
group_b = "job_ids_to_run"
threads = []
thread_ids = range(4)
thread_ids_to_cancel = [i for i in thread_ids if i % 2 == 0]
thread_ids_to_run = [i for i in thread_ids if i % 2 != 0]
# A list which records whether job is cancelled.
# The index of the array is the thread index which job run in.
is_job_cancelled = [False for _ in thread_ids]
def run_job(job_group, index):
"""
Executes a job with the group ``job_group``. Each job waits for 3 seconds
and then exits.
"""
try:
self.sc.setJobGroup(job_group, "test rdd collect with setting job group")
self.sc.parallelize([15]).map(lambda x: time.sleep(x)).collect()
is_job_cancelled[index] = False
except Exception:
# Assume that exception means job cancellation.
is_job_cancelled[index] = True
# Test if job succeeded when not cancelled.
run_job(group_a, 0)
self.assertFalse(is_job_cancelled[0])
# Run jobs
for i in thread_ids_to_cancel:
t = threading.Thread(target=run_job, args=(group_a, i))
t.start()
threads.append(t)
for i in thread_ids_to_run:
t = threading.Thread(target=run_job, args=(group_b, i))
t.start()
threads.append(t)
# Wait to make sure all jobs are executed.
time.sleep(3)
# And then, cancel one job group.
self.sc.cancelJobGroup(group_a)
# Wait until all threads launching jobs are finished.
for t in threads:
t.join()
for i in thread_ids_to_cancel:
self.assertTrue(
is_job_cancelled[i],
"Thread {i}: Job in group A was not cancelled.".format(i=i))
for i in thread_ids_to_run:
self.assertFalse(
is_job_cancelled[i],
"Thread {i}: Job in group B did not succeeded.".format(i=i))
if __name__ == "__main__":
import unittest
from pyspark.tests.test_pin_thread import *
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| {
"content_hash": "f88a79e203ffaa1c00b1a5716da08ecc",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 97,
"avg_line_length": 34.114285714285714,
"alnum_prop": 0.5722361809045227,
"repo_name": "skonto/spark",
"id": "657d129fe63bbca908d3c2705459a86e674dc455",
"size": "5560",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "python/pyspark/tests/test_pin_thread.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "46871"
},
{
"name": "Batchfile",
"bytes": "31352"
},
{
"name": "C",
"bytes": "1493"
},
{
"name": "CSS",
"bytes": "26599"
},
{
"name": "Dockerfile",
"bytes": "8863"
},
{
"name": "HTML",
"bytes": "70407"
},
{
"name": "HiveQL",
"bytes": "1823701"
},
{
"name": "Java",
"bytes": "4054268"
},
{
"name": "JavaScript",
"bytes": "201603"
},
{
"name": "Makefile",
"bytes": "9397"
},
{
"name": "PLpgSQL",
"bytes": "257276"
},
{
"name": "PowerShell",
"bytes": "3867"
},
{
"name": "Python",
"bytes": "2976348"
},
{
"name": "R",
"bytes": "1186688"
},
{
"name": "Roff",
"bytes": "15633"
},
{
"name": "SQLPL",
"bytes": "9325"
},
{
"name": "Scala",
"bytes": "30321574"
},
{
"name": "Shell",
"bytes": "201878"
},
{
"name": "TSQL",
"bytes": "438358"
},
{
"name": "Thrift",
"bytes": "67610"
},
{
"name": "q",
"bytes": "146878"
}
],
"symlink_target": ""
} |
import copy
from .base import BaseField, MAPPING_TYPES, SEQUENCE_TYPES
__all__ = ['StringField', 'ListField', 'DictField',
'GroupsField', 'ExtrasField']
class StringField(BaseField):
default = None
def validate(self, instance, name, value):
if value is None:
if self.required:
raise TypeError("Required field cannot be None")
return None
if not isinstance(value, basestring):
raise TypeError("Invalid type for string field {0!r}: {1!r}"
.format(name, type(value)))
return value
class BoolField(BaseField):
default = False
def validate(self, instance, name, value):
if value is False or value is None:
return False
if value is True:
return True
raise TypeError("Invalid type for boolean field {0!r}: {1!r}"
.format(name, type(value)))
class IntegerField(BaseField):
default = 0
def validate(self, instance, name, value):
if value is None:
if self.required:
raise TypeError("Required field cannot be None")
return None
if not isinstance(value, int):
raise TypeError("Invalid type for integer field {0!r}: {1!r}"
.format(name, type(value)))
return value
class MutableFieldMixin(object):
def get(self, instance, name):
"""
When getting a mutable object, we need to make a copy,
in order to make sure we are still able to detect changes.
"""
if name not in instance._updates:
if name not in instance._values:
instance._values[name] = self.get_default()
value = instance._values[name]
# to be extra safe, make copy here, even on
# default values, which might get shared..
instance._updates[name] = copy.deepcopy(
self.validate(instance, name, value))
return instance._updates[name]
def serialize(self, instance, name):
# Copy to prevent unwanted mutation
return copy.deepcopy(self.get(instance, name))
def is_modified(self, instance, name):
if name not in instance._updates:
return False
# Otherwise, compare values to check whether
# field has been modified.
if name in instance._values:
default = instance._values[name]
else:
default = self.get_default()
return default != instance._updates[name]
class ListField(MutableFieldMixin, BaseField):
default = staticmethod(lambda: [])
def validate(self, instance, name, value):
value = super(ListField, self).validate(instance, name, value)
if not isinstance(value, SEQUENCE_TYPES):
raise ValueError("{0} must be a list".format(name))
return value
class SetField(MutableFieldMixin, BaseField):
default = staticmethod(lambda: [])
def validate(self, instance, name, value):
value = super(SetField, self).validate(instance, name, value)
if isinstance(value, set):
return value
if not isinstance(value, SEQUENCE_TYPES):
raise ValueError("{0} must be a set or list".format(name))
return set(value)
def serialize(self, instance, name):
return copy.deepcopy(list(self.get(instance, name)))
class DictField(MutableFieldMixin, BaseField):
default = staticmethod(lambda: {})
def validate(self, instance, name, value):
value = super(DictField, self).validate(instance, name, value)
if not isinstance(value, MAPPING_TYPES):
raise ValueError("{0} must be a dict".format(name))
return value
class GroupsField(SetField):
def validate(self, instance, name, value):
value = super(GroupsField, self).validate(instance, name, value)
if not all(isinstance(x, basestring) for x in value):
raise ValueError("{0} must be a list of strings".format(name))
return value
class ExtrasField(DictField):
def validate(self, instance, name, value):
return super(ExtrasField, self).validate(instance, name, value)
def is_equivalent(self, instance, name, other, ignore_key=True):
# Equivalency check for extras is tricky:
# we want to compare equal a missing key with one set to None
# as they are trated equally by Ckan..
if ignore_key and self.is_key:
return True
def _remove_null(dct):
return dict((k, v) for k, v in dct.iteritems() if v is not None)
# Just perform simple comparison between values
myvalue = getattr(instance, name)
othervalue = getattr(other, name)
if myvalue is None:
myvalue = self.get_default()
if othervalue is None:
othervalue = self.get_default()
return _remove_null(myvalue) == _remove_null(othervalue)
| {
"content_hash": "e0132392f317ef8ff1a8375e7b511f8c",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 76,
"avg_line_length": 31.67721518987342,
"alnum_prop": 0.6107892107892108,
"repo_name": "opendatatrentino/ckan-api-client",
"id": "a47f7d073b437ec60202c4c91f9dd98f9378385b",
"size": "5005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ckan_api_client/objects/fields.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "7801"
},
{
"name": "Python",
"bytes": "250289"
},
{
"name": "Shell",
"bytes": "11387"
}
],
"symlink_target": ""
} |
from .sub_resource import SubResource
class VirtualNetworkGatewayIPConfiguration(SubResource):
"""IP configuration for virtual network gateway.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:param private_ip_allocation_method: The private IP allocation method.
Possible values are: 'Static' and 'Dynamic'. Possible values include:
'Static', 'Dynamic'
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2017_06_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2017_06_01.models.SubResource
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address:
~azure.mgmt.network.v2017_06_01.models.SubResource
:ivar provisioning_state: The provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VirtualNetworkGatewayIPConfiguration, self).__init__(**kwargs)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
| {
"content_hash": "0dd6e04e4dfc1d99449c03ffa031e3ad",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 103,
"avg_line_length": 43.907407407407405,
"alnum_prop": 0.6613243357233235,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "96fdc0bec279b91ceb37ebfb8e74cffa5a07efe4",
"size": "2845",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/virtual_network_gateway_ip_configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine import properties
from heat.engine import resource
class NeutronSecurityGroup(object):
def __init__(self, sg):
self.sg = sg
self.client = sg.client('neutron')
self.plugin = sg.client_plugin('neutron')
def _convert_to_neutron_rule(self, sg_rule):
return {
'direction': sg_rule['direction'],
'ethertype': 'IPv4',
'remote_ip_prefix': sg_rule.get(self.sg.RULE_CIDR_IP),
'port_range_min': sg_rule.get(self.sg.RULE_FROM_PORT),
'port_range_max': sg_rule.get(self.sg.RULE_TO_PORT),
'protocol': sg_rule.get(self.sg.RULE_IP_PROTOCOL),
'remote_group_id': sg_rule.get(
self.sg.RULE_SOURCE_SECURITY_GROUP_ID),
'security_group_id': self.sg.resource_id
}
def _res_rules_to_common(self, api_rules):
rules = {}
for nr in api_rules:
rule = {}
rule[self.sg.RULE_FROM_PORT] = nr['port_range_min']
rule[self.sg.RULE_TO_PORT] = nr['port_range_max']
rule[self.sg.RULE_IP_PROTOCOL] = nr['protocol']
rule['direction'] = nr['direction']
rule[self.sg.RULE_CIDR_IP] = nr['remote_ip_prefix']
rule[self.sg.RULE_SOURCE_SECURITY_GROUP_ID
] = nr['remote_group_id']
rules[nr['id']] = rule
return rules
def _prop_rules_to_common(self, props, direction):
rules = []
prs = props.get(direction) or []
for pr in prs:
rule = dict(pr)
rule.pop(self.sg.RULE_SOURCE_SECURITY_GROUP_OWNER_ID)
# Neutron only accepts positive ints
from_port = pr.get(self.sg.RULE_FROM_PORT)
if from_port is not None:
from_port = int(from_port)
if from_port < 0:
from_port = None
rule[self.sg.RULE_FROM_PORT] = from_port
to_port = pr.get(self.sg.RULE_TO_PORT)
if to_port is not None:
to_port = int(to_port)
if to_port < 0:
to_port = None
rule[self.sg.RULE_TO_PORT] = to_port
if (pr.get(self.sg.RULE_FROM_PORT) is None and
pr.get(self.sg.RULE_TO_PORT) is None):
rule[self.sg.RULE_CIDR_IP] = None
else:
rule[self.sg.RULE_CIDR_IP] = pr.get(self.sg.RULE_CIDR_IP)
# Neutron understands both names and ids
rule[self.sg.RULE_SOURCE_SECURITY_GROUP_ID] = (
pr.get(self.sg.RULE_SOURCE_SECURITY_GROUP_ID) or
pr.get(self.sg.RULE_SOURCE_SECURITY_GROUP_NAME)
)
rule.pop(self.sg.RULE_SOURCE_SECURITY_GROUP_NAME)
rules.append(rule)
return rules
def create(self):
sec = self.client.create_security_group({'security_group': {
'name': self.sg.physical_resource_name(),
'description': self.sg.properties[self.sg.GROUP_DESCRIPTION]}
})['security_group']
self.sg.resource_id_set(sec['id'])
self.delete_default_egress_rules(sec)
if self.sg.properties[self.sg.SECURITY_GROUP_INGRESS]:
rules_in = self._prop_rules_to_common(
self.sg.properties, self.sg.SECURITY_GROUP_INGRESS)
for rule in rules_in:
rule['direction'] = 'ingress'
self.create_rule(rule)
if self.sg.properties[self.sg.SECURITY_GROUP_EGRESS]:
rules_e = self._prop_rules_to_common(
self.sg.properties, self.sg.SECURITY_GROUP_EGRESS)
for rule in rules_e:
rule['direction'] = 'egress'
self.create_rule(rule)
def create_rule(self, rule):
try:
self.client.create_security_group_rule({
'security_group_rule':
self._convert_to_neutron_rule(rule)
})
except Exception as ex:
# ignore error if the group already exists
if not self.plugin.is_conflict(ex):
raise
def delete(self):
if self.sg.resource_id is not None:
try:
sec = self.client.show_security_group(
self.sg.resource_id)['security_group']
except Exception as ex:
self.plugin.ignore_not_found(ex)
else:
for rule in sec['security_group_rules']:
self.delete_rule(rule['id'])
with self.plugin.ignore_not_found:
self.client.delete_security_group(self.sg.resource_id)
def delete_rule(self, rule_id):
with self.plugin.ignore_not_found:
self.client.delete_security_group_rule(rule_id)
def delete_default_egress_rules(self, sec):
"""Delete the default rules which allow all egress traffic."""
if self.sg.properties[self.sg.SECURITY_GROUP_EGRESS]:
for rule in sec['security_group_rules']:
if rule['direction'] == 'egress':
self.client.delete_security_group_rule(rule['id'])
def update(self, props):
sec = self.client.show_security_group(
self.sg.resource_id)['security_group']
existing = self._res_rules_to_common(
sec['security_group_rules'])
updated = {}
updated[self.sg.SECURITY_GROUP_EGRESS
] = self._prop_rules_to_common(
props, self.sg.SECURITY_GROUP_EGRESS)
updated[self.sg.SECURITY_GROUP_INGRESS
] = self._prop_rules_to_common(
props, self.sg.SECURITY_GROUP_INGRESS)
ids, new = self.diff_rules(existing, updated)
for id in ids:
self.delete_rule(id)
for rule in new:
self.create_rule(rule)
def diff_rules(self, existing, updated):
for rule in updated[self.sg.SECURITY_GROUP_EGRESS]:
rule['direction'] = 'egress'
for rule in updated[self.sg.SECURITY_GROUP_INGRESS]:
rule['direction'] = 'ingress'
updated_rules = list(six.itervalues(updated))
updated_all = updated_rules[0] + updated_rules[1]
ids_to_delete = [id for id, rule in existing.items()
if rule not in updated_all]
rules_to_create = [rule for rule in updated_all
if rule not in six.itervalues(existing)]
return ids_to_delete, rules_to_create
class SecurityGroup(resource.Resource):
PROPERTIES = (
GROUP_DESCRIPTION, VPC_ID, SECURITY_GROUP_INGRESS,
SECURITY_GROUP_EGRESS,
) = (
'GroupDescription', 'VpcId', 'SecurityGroupIngress',
'SecurityGroupEgress',
)
_RULE_KEYS = (
RULE_CIDR_IP, RULE_FROM_PORT, RULE_TO_PORT, RULE_IP_PROTOCOL,
RULE_SOURCE_SECURITY_GROUP_ID, RULE_SOURCE_SECURITY_GROUP_NAME,
RULE_SOURCE_SECURITY_GROUP_OWNER_ID,
) = (
'CidrIp', 'FromPort', 'ToPort', 'IpProtocol',
'SourceSecurityGroupId', 'SourceSecurityGroupName',
'SourceSecurityGroupOwnerId',
)
_rule_schema = {
RULE_CIDR_IP: properties.Schema(
properties.Schema.STRING
),
RULE_FROM_PORT: properties.Schema(
properties.Schema.STRING
),
RULE_TO_PORT: properties.Schema(
properties.Schema.STRING
),
RULE_IP_PROTOCOL: properties.Schema(
properties.Schema.STRING
),
RULE_SOURCE_SECURITY_GROUP_ID: properties.Schema(
properties.Schema.STRING
),
RULE_SOURCE_SECURITY_GROUP_NAME: properties.Schema(
properties.Schema.STRING
),
RULE_SOURCE_SECURITY_GROUP_OWNER_ID: properties.Schema(
properties.Schema.STRING,
implemented=False
),
}
properties_schema = {
GROUP_DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of the security group.'),
required=True
),
VPC_ID: properties.Schema(
properties.Schema.STRING,
_('Physical ID of the VPC. Not implemented.')
),
SECURITY_GROUP_INGRESS: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
_('List of security group ingress rules.'),
schema=_rule_schema,
),
update_allowed=True
),
SECURITY_GROUP_EGRESS: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
_('List of security group egress rules.'),
schema=_rule_schema,
),
update_allowed=True
),
}
def handle_create(self):
NeutronSecurityGroup(self).create()
def handle_delete(self):
NeutronSecurityGroup(self).delete()
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if (self.SECURITY_GROUP_INGRESS in prop_diff or
self.SECURITY_GROUP_EGRESS in prop_diff):
props = json_snippet.properties(self.properties_schema,
self.context)
NeutronSecurityGroup(self).update(props)
class SecurityGroupNotFound(exception.HeatException):
msg_fmt = _('Security Group "%(group_name)s" not found')
def resource_mapping():
return {
'AWS::EC2::SecurityGroup': SecurityGroup,
}
| {
"content_hash": "9d637849db279e6f6b3d9dd4f15c9296",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 74,
"avg_line_length": 36.71863117870723,
"alnum_prop": 0.5595940768354561,
"repo_name": "noironetworks/heat",
"id": "38b298e8fae07578f31274e3a7cbc9e772759b9c",
"size": "10232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/engine/resources/aws/ec2/security_group.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8804896"
},
{
"name": "Shell",
"bytes": "64533"
}
],
"symlink_target": ""
} |
from . import number_types as N
from .number_types import (UOffsetTFlags, SOffsetTFlags, VOffsetTFlags)
from . import encode
from . import packer
from . import compat
from .compat import range_func
from .compat import memoryview_type
from .compat import import_numpy, NumpyRequiredForThisFeature
np = import_numpy()
## @file
## @addtogroup flatbuffers_python_api
## @{
## @cond FLATBUFFERS_INTERNAL
class OffsetArithmeticError(RuntimeError):
"""
Error caused by an Offset arithmetic error. Probably caused by bad
writing of fields. This is considered an unreachable situation in
normal circumstances.
"""
pass
class IsNotNestedError(RuntimeError):
"""
Error caused by using a Builder to write Object data when not inside
an Object.
"""
pass
class IsNestedError(RuntimeError):
"""
Error caused by using a Builder to begin an Object when an Object is
already being built.
"""
pass
class StructIsNotInlineError(RuntimeError):
"""
Error caused by using a Builder to write a Struct at a location that
is not the current Offset.
"""
pass
class BuilderSizeError(RuntimeError):
"""
Error caused by causing a Builder to exceed the hardcoded limit of 2
gigabytes.
"""
pass
class BuilderNotFinishedError(RuntimeError):
"""
Error caused by not calling `Finish` before calling `Output`.
"""
pass
# VtableMetadataFields is the count of metadata fields in each vtable.
VtableMetadataFields = 2
## @endcond
class Builder(object):
""" A Builder is used to construct one or more FlatBuffers.
Typically, Builder objects will be used from code generated by the `flatc`
compiler.
A Builder constructs byte buffers in a last-first manner for simplicity and
performance during reading.
Internally, a Builder is a state machine for creating FlatBuffer objects.
It holds the following internal state:
- Bytes: an array of bytes.
- current_vtable: a list of integers.
- vtables: a list of vtable entries (i.e. a list of list of integers).
Attributes:
Bytes: The internal `bytearray` for the Builder.
finished: A boolean determining if the Builder has been finalized.
"""
## @cond FLATBUFFERS_INTENRAL
__slots__ = ("Bytes", "current_vtable", "head", "minalign", "objectEnd",
"vtables", "nested", "finished")
"""Maximum buffer size constant, in bytes.
Builder will never allow it's buffer grow over this size.
Currently equals 2Gb.
"""
MAX_BUFFER_SIZE = 2**31
## @endcond
def __init__(self, initialSize):
"""Initializes a Builder of size `initial_size`.
The internal buffer is grown as needed.
"""
if not (0 <= initialSize <= Builder.MAX_BUFFER_SIZE):
msg = "flatbuffers: Cannot create Builder larger than 2 gigabytes."
raise BuilderSizeError(msg)
self.Bytes = bytearray(initialSize)
## @cond FLATBUFFERS_INTERNAL
self.current_vtable = None
self.head = UOffsetTFlags.py_type(initialSize)
self.minalign = 1
self.objectEnd = None
self.vtables = []
self.nested = False
## @endcond
self.finished = False
def Output(self):
"""Return the portion of the buffer that has been used for writing data.
This is the typical way to access the FlatBuffer data inside the
builder. If you try to access `Builder.Bytes` directly, you would need
to manually index it with `Head()`, since the buffer is constructed
backwards.
It raises BuilderNotFinishedError if the buffer has not been finished
with `Finish`.
"""
if not self.finished:
raise BuilderNotFinishedError()
return self.Bytes[self.Head():]
## @cond FLATBUFFERS_INTERNAL
def StartObject(self, numfields):
"""StartObject initializes bookkeeping for writing a new object."""
self.assertNotNested()
# use 32-bit offsets so that arithmetic doesn't overflow.
self.current_vtable = [0 for _ in range_func(numfields)]
self.objectEnd = self.Offset()
self.nested = True
def WriteVtable(self):
"""
WriteVtable serializes the vtable for the current object, if needed.
Before writing out the vtable, this checks pre-existing vtables for
equality to this one. If an equal vtable is found, point the object to
the existing vtable and return.
Because vtable values are sensitive to alignment of object data, not
all logically-equal vtables will be deduplicated.
A vtable has the following format:
<VOffsetT: size of the vtable in bytes, including this value>
<VOffsetT: size of the object in bytes, including the vtable offset>
<VOffsetT: offset for a field> * N, where N is the number of fields
in the schema for this type. Includes deprecated fields.
Thus, a vtable is made of 2 + N elements, each VOffsetT bytes wide.
An object has the following format:
<SOffsetT: offset to this object's vtable (may be negative)>
<byte: data>+
"""
# Prepend a zero scalar to the object. Later in this function we'll
# write an offset here that points to the object's vtable:
self.PrependSOffsetTRelative(0)
objectOffset = self.Offset()
existingVtable = None
# Trim trailing 0 offsets.
while self.current_vtable and self.current_vtable[-1] == 0:
self.current_vtable.pop()
# Search backwards through existing vtables, because similar vtables
# are likely to have been recently appended. See
# BenchmarkVtableDeduplication for a case in which this heuristic
# saves about 30% of the time used in writing objects with duplicate
# tables.
i = len(self.vtables) - 1
while i >= 0:
# Find the other vtable, which is associated with `i`:
vt2Offset = self.vtables[i]
vt2Start = len(self.Bytes) - vt2Offset
vt2Len = encode.Get(packer.voffset, self.Bytes, vt2Start)
metadata = VtableMetadataFields * N.VOffsetTFlags.bytewidth
vt2End = vt2Start + vt2Len
vt2 = self.Bytes[vt2Start+metadata:vt2End]
# Compare the other vtable to the one under consideration.
# If they are equal, store the offset and break:
if vtableEqual(self.current_vtable, objectOffset, vt2):
existingVtable = vt2Offset
break
i -= 1
if existingVtable is None:
# Did not find a vtable, so write this one to the buffer.
# Write out the current vtable in reverse , because
# serialization occurs in last-first order:
i = len(self.current_vtable) - 1
while i >= 0:
off = 0
if self.current_vtable[i] != 0:
# Forward reference to field;
# use 32bit number to ensure no overflow:
off = objectOffset - self.current_vtable[i]
self.PrependVOffsetT(off)
i -= 1
# The two metadata fields are written last.
# First, store the object bytesize:
objectSize = UOffsetTFlags.py_type(objectOffset - self.objectEnd)
self.PrependVOffsetT(VOffsetTFlags.py_type(objectSize))
# Second, store the vtable bytesize:
vBytes = len(self.current_vtable) + VtableMetadataFields
vBytes *= N.VOffsetTFlags.bytewidth
self.PrependVOffsetT(VOffsetTFlags.py_type(vBytes))
# Next, write the offset to the new vtable in the
# already-allocated SOffsetT at the beginning of this object:
objectStart = SOffsetTFlags.py_type(len(self.Bytes) - objectOffset)
encode.Write(packer.soffset, self.Bytes, objectStart,
SOffsetTFlags.py_type(self.Offset() - objectOffset))
# Finally, store this vtable in memory for future
# deduplication:
self.vtables.append(self.Offset())
else:
# Found a duplicate vtable.
objectStart = SOffsetTFlags.py_type(len(self.Bytes) - objectOffset)
self.head = UOffsetTFlags.py_type(objectStart)
# Write the offset to the found vtable in the
# already-allocated SOffsetT at the beginning of this object:
encode.Write(packer.soffset, self.Bytes, self.Head(),
SOffsetTFlags.py_type(existingVtable - objectOffset))
self.current_vtable = None
return objectOffset
def EndObject(self):
"""EndObject writes data necessary to finish object construction."""
self.assertNested()
self.nested = False
return self.WriteVtable()
def growByteBuffer(self):
"""Doubles the size of the byteslice, and copies the old data towards
the end of the new buffer (since we build the buffer backwards)."""
if len(self.Bytes) == Builder.MAX_BUFFER_SIZE:
msg = "flatbuffers: cannot grow buffer beyond 2 gigabytes"
raise BuilderSizeError(msg)
newSize = min(len(self.Bytes) * 2, Builder.MAX_BUFFER_SIZE)
if newSize == 0:
newSize = 1
bytes2 = bytearray(newSize)
bytes2[newSize-len(self.Bytes):] = self.Bytes
self.Bytes = bytes2
## @endcond
def Head(self):
"""Get the start of useful data in the underlying byte buffer.
Note: unlike other functions, this value is interpreted as from the
left.
"""
## @cond FLATBUFFERS_INTERNAL
return self.head
## @endcond
## @cond FLATBUFFERS_INTERNAL
def Offset(self):
"""Offset relative to the end of the buffer."""
return UOffsetTFlags.py_type(len(self.Bytes) - self.Head())
def Pad(self, n):
"""Pad places zeros at the current offset."""
for i in range_func(n):
self.Place(0, N.Uint8Flags)
def Prep(self, size, additionalBytes):
"""
Prep prepares to write an element of `size` after `additional_bytes`
have been written, e.g. if you write a string, you need to align
such the int length field is aligned to SizeInt32, and the string
data follows it directly.
If all you need to do is align, `additionalBytes` will be 0.
"""
# Track the biggest thing we've ever aligned to.
if size > self.minalign:
self.minalign = size
# Find the amount of alignment needed such that `size` is properly
# aligned after `additionalBytes`:
alignSize = (~(len(self.Bytes) - self.Head() + additionalBytes)) + 1
alignSize &= (size - 1)
# Reallocate the buffer if needed:
while self.Head() < alignSize+size+additionalBytes:
oldBufSize = len(self.Bytes)
self.growByteBuffer()
updated_head = self.head + len(self.Bytes) - oldBufSize
self.head = UOffsetTFlags.py_type(updated_head)
self.Pad(alignSize)
def PrependSOffsetTRelative(self, off):
"""
PrependSOffsetTRelative prepends an SOffsetT, relative to where it
will be written.
"""
# Ensure alignment is already done:
self.Prep(N.SOffsetTFlags.bytewidth, 0)
if not (off <= self.Offset()):
msg = "flatbuffers: Offset arithmetic error."
raise OffsetArithmeticError(msg)
off2 = self.Offset() - off + N.SOffsetTFlags.bytewidth
self.PlaceSOffsetT(off2)
## @endcond
def PrependUOffsetTRelative(self, off):
"""Prepends an unsigned offset into vector data, relative to where it
will be written.
"""
# Ensure alignment is already done:
self.Prep(N.UOffsetTFlags.bytewidth, 0)
if not (off <= self.Offset()):
msg = "flatbuffers: Offset arithmetic error."
raise OffsetArithmeticError(msg)
off2 = self.Offset() - off + N.UOffsetTFlags.bytewidth
self.PlaceUOffsetT(off2)
## @cond FLATBUFFERS_INTERNAL
def StartVector(self, elemSize, numElems, alignment):
"""
StartVector initializes bookkeeping for writing a new vector.
A vector has the following format:
- <UOffsetT: number of elements in this vector>
- <T: data>+, where T is the type of elements of this vector.
"""
self.assertNotNested()
self.nested = True
self.Prep(N.Uint32Flags.bytewidth, elemSize*numElems)
self.Prep(alignment, elemSize*numElems) # In case alignment > int.
return self.Offset()
## @endcond
def EndVector(self, vectorNumElems):
"""EndVector writes data necessary to finish vector construction."""
self.assertNested()
## @cond FLATBUFFERS_INTERNAL
self.nested = False
## @endcond
# we already made space for this, so write without PrependUint32
self.PlaceUOffsetT(vectorNumElems)
return self.Offset()
def CreateString(self, s, encoding='utf-8', errors='strict'):
"""CreateString writes a null-terminated byte string as a vector."""
self.assertNotNested()
## @cond FLATBUFFERS_INTERNAL
self.nested = True
## @endcond
if isinstance(s, compat.string_types):
x = s.encode(encoding, errors)
elif isinstance(s, compat.binary_types):
x = s
else:
raise TypeError("non-string passed to CreateString")
self.Prep(N.UOffsetTFlags.bytewidth, (len(x)+1)*N.Uint8Flags.bytewidth)
self.Place(0, N.Uint8Flags)
l = UOffsetTFlags.py_type(len(s))
## @cond FLATBUFFERS_INTERNAL
self.head = UOffsetTFlags.py_type(self.Head() - l)
## @endcond
self.Bytes[self.Head():self.Head()+l] = x
return self.EndVector(len(x))
def CreateByteVector(self, x):
"""CreateString writes a byte vector."""
self.assertNotNested()
## @cond FLATBUFFERS_INTERNAL
self.nested = True
## @endcond
if not isinstance(x, compat.binary_types):
raise TypeError("non-byte vector passed to CreateByteVector")
self.Prep(N.UOffsetTFlags.bytewidth, len(x)*N.Uint8Flags.bytewidth)
l = UOffsetTFlags.py_type(len(x))
## @cond FLATBUFFERS_INTERNAL
self.head = UOffsetTFlags.py_type(self.Head() - l)
## @endcond
self.Bytes[self.Head():self.Head()+l] = x
return self.EndVector(len(x))
def CreateNumpyVector(self, x):
"""CreateNumpyVector writes a numpy array into the buffer."""
if np is None:
# Numpy is required for this feature
raise NumpyRequiredForThisFeature("Numpy was not found.")
if not isinstance(x, np.ndarray):
raise TypeError("non-numpy-ndarray passed to CreateNumpyVector")
if x.dtype.kind not in ['b', 'i', 'u', 'f']:
raise TypeError("numpy-ndarray holds elements of unsupported datatype")
if x.ndim > 1:
raise TypeError("multidimensional-ndarray passed to CreateNumpyVector")
self.StartVector(x.itemsize, x.size, x.dtype.alignment)
# Ensure little endian byte ordering
if x.dtype.str[0] == "<":
x_lend = x
else:
x_lend = x.byteswap(inplace=False)
# Calculate total length
l = UOffsetTFlags.py_type(x_lend.itemsize * x_lend.size)
## @cond FLATBUFFERS_INTERNAL
self.head = UOffsetTFlags.py_type(self.Head() - l)
## @endcond
# tobytes ensures c_contiguous ordering
self.Bytes[self.Head():self.Head()+l] = x_lend.tobytes(order='C')
return self.EndVector(x.size)
## @cond FLATBUFFERS_INTERNAL
def assertNested(self):
"""
Check that we are in the process of building an object.
"""
if not self.nested:
raise IsNotNestedError()
def assertNotNested(self):
"""
Check that no other objects are being built while making this
object. If not, raise an exception.
"""
if self.nested:
raise IsNestedError()
def assertStructIsInline(self, obj):
"""
Structs are always stored inline, so need to be created right
where they are used. You'll get this error if you created it
elsewhere.
"""
N.enforce_number(obj, N.UOffsetTFlags)
if obj != self.Offset():
msg = ("flatbuffers: Tried to write a Struct at an Offset that "
"is different from the current Offset of the Builder.")
raise StructIsNotInlineError(msg)
def Slot(self, slotnum):
"""
Slot sets the vtable key `voffset` to the current location in the
buffer.
"""
self.assertNested()
self.current_vtable[slotnum] = self.Offset()
## @endcond
def __Finish(self, rootTable, sizePrefix):
"""Finish finalizes a buffer, pointing to the given `rootTable`."""
N.enforce_number(rootTable, N.UOffsetTFlags)
prepSize = N.UOffsetTFlags.bytewidth
if sizePrefix:
prepSize += N.Int32Flags.bytewidth
self.Prep(self.minalign, prepSize)
self.PrependUOffsetTRelative(rootTable)
if sizePrefix:
size = len(self.Bytes) - self.Head()
N.enforce_number(size, N.Int32Flags)
self.PrependInt32(size)
self.finished = True
return self.Head()
def Finish(self, rootTable):
"""Finish finalizes a buffer, pointing to the given `rootTable`."""
return self.__Finish(rootTable, False)
def FinishSizePrefixed(self, rootTable):
"""
Finish finalizes a buffer, pointing to the given `rootTable`,
with the size prefixed.
"""
return self.__Finish(rootTable, True)
## @cond FLATBUFFERS_INTERNAL
def Prepend(self, flags, off):
self.Prep(flags.bytewidth, 0)
self.Place(off, flags)
def PrependSlot(self, flags, o, x, d):
N.enforce_number(x, flags)
N.enforce_number(d, flags)
if x != d:
self.Prepend(flags, x)
self.Slot(o)
def PrependBoolSlot(self, *args): self.PrependSlot(N.BoolFlags, *args)
def PrependByteSlot(self, *args): self.PrependSlot(N.Uint8Flags, *args)
def PrependUint8Slot(self, *args): self.PrependSlot(N.Uint8Flags, *args)
def PrependUint16Slot(self, *args): self.PrependSlot(N.Uint16Flags, *args)
def PrependUint32Slot(self, *args): self.PrependSlot(N.Uint32Flags, *args)
def PrependUint64Slot(self, *args): self.PrependSlot(N.Uint64Flags, *args)
def PrependInt8Slot(self, *args): self.PrependSlot(N.Int8Flags, *args)
def PrependInt16Slot(self, *args): self.PrependSlot(N.Int16Flags, *args)
def PrependInt32Slot(self, *args): self.PrependSlot(N.Int32Flags, *args)
def PrependInt64Slot(self, *args): self.PrependSlot(N.Int64Flags, *args)
def PrependFloat32Slot(self, *args): self.PrependSlot(N.Float32Flags,
*args)
def PrependFloat64Slot(self, *args): self.PrependSlot(N.Float64Flags,
*args)
def PrependUOffsetTRelativeSlot(self, o, x, d):
"""
PrependUOffsetTRelativeSlot prepends an UOffsetT onto the object at
vtable slot `o`. If value `x` equals default `d`, then the slot will
be set to zero and no other data will be written.
"""
if x != d:
self.PrependUOffsetTRelative(x)
self.Slot(o)
def PrependStructSlot(self, v, x, d):
"""
PrependStructSlot prepends a struct onto the object at vtable slot `o`.
Structs are stored inline, so nothing additional is being added.
In generated code, `d` is always 0.
"""
N.enforce_number(d, N.UOffsetTFlags)
if x != d:
self.assertStructIsInline(x)
self.Slot(v)
## @endcond
def PrependBool(self, x):
"""Prepend a `bool` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.BoolFlags, x)
def PrependByte(self, x):
"""Prepend a `byte` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Uint8Flags, x)
def PrependUint8(self, x):
"""Prepend an `uint8` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Uint8Flags, x)
def PrependUint16(self, x):
"""Prepend an `uint16` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Uint16Flags, x)
def PrependUint32(self, x):
"""Prepend an `uint32` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Uint32Flags, x)
def PrependUint64(self, x):
"""Prepend an `uint64` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Uint64Flags, x)
def PrependInt8(self, x):
"""Prepend an `int8` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Int8Flags, x)
def PrependInt16(self, x):
"""Prepend an `int16` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Int16Flags, x)
def PrependInt32(self, x):
"""Prepend an `int32` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Int32Flags, x)
def PrependInt64(self, x):
"""Prepend an `int64` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Int64Flags, x)
def PrependFloat32(self, x):
"""Prepend a `float32` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Float32Flags, x)
def PrependFloat64(self, x):
"""Prepend a `float64` to the Builder buffer.
Note: aligns and checks for space.
"""
self.Prepend(N.Float64Flags, x)
##############################################################
## @cond FLATBUFFERS_INTERNAL
def PrependVOffsetT(self, x): self.Prepend(N.VOffsetTFlags, x)
def Place(self, x, flags):
"""
Place prepends a value specified by `flags` to the Builder,
without checking for available space.
"""
N.enforce_number(x, flags)
self.head = self.head - flags.bytewidth
encode.Write(flags.packer_type, self.Bytes, self.Head(), x)
def PlaceVOffsetT(self, x):
"""PlaceVOffsetT prepends a VOffsetT to the Builder, without checking
for space.
"""
N.enforce_number(x, N.VOffsetTFlags)
self.head = self.head - N.VOffsetTFlags.bytewidth
encode.Write(packer.voffset, self.Bytes, self.Head(), x)
def PlaceSOffsetT(self, x):
"""PlaceSOffsetT prepends a SOffsetT to the Builder, without checking
for space.
"""
N.enforce_number(x, N.SOffsetTFlags)
self.head = self.head - N.SOffsetTFlags.bytewidth
encode.Write(packer.soffset, self.Bytes, self.Head(), x)
def PlaceUOffsetT(self, x):
"""PlaceUOffsetT prepends a UOffsetT to the Builder, without checking
for space.
"""
N.enforce_number(x, N.UOffsetTFlags)
self.head = self.head - N.UOffsetTFlags.bytewidth
encode.Write(packer.uoffset, self.Bytes, self.Head(), x)
## @endcond
## @cond FLATBUFFERS_INTERNAL
def vtableEqual(a, objectStart, b):
"""vtableEqual compares an unwritten vtable to a written vtable."""
N.enforce_number(objectStart, N.UOffsetTFlags)
if len(a) * N.VOffsetTFlags.bytewidth != len(b):
return False
for i, elem in enumerate(a):
x = encode.Get(packer.voffset, b, i * N.VOffsetTFlags.bytewidth)
# Skip vtable entries that indicate a default value.
if x == 0 and elem == 0:
pass
else:
y = objectStart - elem
if x != y:
return False
return True
## @endcond
## @}
| {
"content_hash": "b518c7f777d5f4132e38eeb678a2aee7",
"timestamp": "",
"source": "github",
"line_count": 741,
"max_line_length": 83,
"avg_line_length": 33.225371120107965,
"alnum_prop": 0.6128350934199838,
"repo_name": "hgl888/flatbuffers",
"id": "1e96d6fe72e517a81427e31b80af32e12b7cb179",
"size": "25217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/flatbuffers/builder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "842"
},
{
"name": "C#",
"bytes": "79002"
},
{
"name": "C++",
"bytes": "387760"
},
{
"name": "CMake",
"bytes": "7654"
},
{
"name": "Go",
"bytes": "87282"
},
{
"name": "Java",
"bytes": "54572"
},
{
"name": "Makefile",
"bytes": "9140"
},
{
"name": "Protocol Buffer",
"bytes": "1012"
},
{
"name": "Python",
"bytes": "95339"
},
{
"name": "Shell",
"bytes": "23876"
}
],
"symlink_target": ""
} |
import socket, struct, sys, time
from logging import getLogger, DEBUG
from qpid import compat
from qpid import sasl
from qpid.concurrency import synchronized
from qpid.datatypes import RangedSet, Serial
from qpid.framing import OpEncoder, SegmentEncoder, FrameEncoder, \
FrameDecoder, SegmentDecoder, OpDecoder
from qpid.messaging import address, transports
from qpid.messaging.constants import UNLIMITED, REJECTED, RELEASED
from qpid.messaging.exceptions import *
from qpid.messaging.message import get_codec, Disposition, Message
from qpid.messaging.endpoints import MangledString
from qpid.ops import *
from qpid.selector import Selector
from qpid.util import URL, default,get_client_properties_with_defaults
from qpid.validator import And, Context, List, Map, Types, Values
from threading import Condition, Thread
log = getLogger("qpid.messaging")
rawlog = getLogger("qpid.messaging.io.raw")
opslog = getLogger("qpid.messaging.io.ops")
def addr2reply_to(addr):
name, subject, options = address.parse(addr)
if options:
type = options.get("node", {}).get("type")
else:
type = None
if type == "topic":
return ReplyTo(name, subject)
else:
return ReplyTo(None, name)
def reply_to2addr(reply_to):
if reply_to.exchange in (None, ""):
return reply_to.routing_key
elif reply_to.routing_key is None:
return "%s; {node: {type: topic}}" % reply_to.exchange
else:
return "%s/%s; {node: {type: topic}}" % (reply_to.exchange, reply_to.routing_key)
class Attachment:
def __init__(self, target):
self.target = target
# XXX
DURABLE_DEFAULT=False
# XXX
class Pattern:
"""
The pattern filter matches the supplied wildcard pattern against a
message subject.
"""
def __init__(self, value):
self.value = value
# XXX: this should become part of the driver
def _bind(self, sst, exchange, queue):
from qpid.ops import ExchangeBind
sst.write_cmd(ExchangeBind(exchange=exchange, queue=queue,
binding_key=self.value.replace("*", "#")))
SUBJECT_DEFAULTS = {
"topic": "#"
}
def noop(): pass
def sync_noop(): pass
class SessionState:
def __init__(self, driver, session, name, channel):
self.driver = driver
self.session = session
self.name = name
self.channel = channel
self.detached = False
self.committing = False
self.aborting = False
# sender state
self.sent = Serial(0)
self.acknowledged = RangedSet()
self.actions = {}
self.min_completion = self.sent
self.max_completion = self.sent
self.results = {}
self.need_sync = False
# receiver state
self.received = None
self.executed = RangedSet()
# XXX: need to periodically exchange completion/known_completion
self.destinations = {}
def write_query(self, query, handler, obj):
id = self.sent
self.write_cmd(query, lambda: handler(self.results.pop(id), obj))
def apply_overrides(self, cmd, overrides):
for k, v in overrides.items():
cmd[k.replace('-', '_')] = v
def write_cmd(self, cmd, action=noop, overrides=None, sync=True):
if overrides:
self.apply_overrides(cmd, overrides)
if action != noop:
cmd.sync = sync
if self.detached:
raise Exception("detached")
cmd.id = self.sent
self.sent += 1
self.actions[cmd.id] = action
self.max_completion = cmd.id
self.write_op(cmd)
self.need_sync = not cmd.sync
def write_cmds(self, cmds, action=noop):
if cmds:
for cmd in cmds[:-1]:
self.write_cmd(cmd)
self.write_cmd(cmds[-1], action)
else:
action()
def write_op(self, op):
op.channel = self.channel
self.driver.write_op(op)
POLICIES = Values("always", "sender", "receiver", "never")
RELIABILITY = Values("unreliable", "at-most-once", "at-least-once",
"exactly-once")
DECLARE = Map({}, restricted=False)
BINDINGS = List(Map({
"exchange": Types(basestring),
"queue": Types(basestring),
"key": Types(basestring),
"arguments": Map({}, restricted=False)
}))
COMMON_OPTS = {
"create": POLICIES,
"delete": POLICIES,
"assert": POLICIES,
"node": Map({
"type": Values("queue", "topic"),
"durable": Types(bool),
"x-declare": DECLARE,
"x-bindings": BINDINGS
}),
"link": Map({
"name": Types(basestring),
"durable": Types(bool),
"reliability": RELIABILITY,
"x-declare": DECLARE,
"x-bindings": BINDINGS,
"x-subscribe": Map({}, restricted=False)
})
}
RECEIVE_MODES = Values("browse", "consume")
SOURCE_OPTS = COMMON_OPTS.copy()
SOURCE_OPTS.update({
"mode": RECEIVE_MODES
})
TARGET_OPTS = COMMON_OPTS.copy()
class LinkIn:
ADDR_NAME = "source"
DIR_NAME = "receiver"
VALIDATOR = Map(SOURCE_OPTS)
def init_link(self, sst, rcv, _rcv):
_rcv.destination = str(rcv.id)
sst.destinations[_rcv.destination] = _rcv
_rcv.draining = False
_rcv.bytes_open = False
_rcv.on_unlink = []
def do_link(self, sst, rcv, _rcv, type, subtype, action):
link_opts = _rcv.options.get("link", {})
if type == "topic":
default_reliability = "unreliable"
else:
default_reliability = "at-least-once"
reliability = link_opts.get("reliability", default_reliability)
declare = link_opts.get("x-declare", {})
subscribe = link_opts.get("x-subscribe", {})
acq_mode = acquire_mode.pre_acquired
if reliability in ("unreliable", "at-most-once"):
rcv._accept_mode = accept_mode.none
else:
rcv._accept_mode = accept_mode.explicit
if type == "topic":
default_name = "%s.%s" % (rcv.session.name, _rcv.destination)
_rcv._queue = link_opts.get("name", default_name)
sst.write_cmd(QueueDeclare(queue=_rcv._queue,
durable=link_opts.get("durable", False),
exclusive=True,
auto_delete=(reliability == "unreliable")),
overrides=declare)
if declare.get("exclusive", True): _rcv.on_unlink = [QueueDelete(_rcv._queue)]
subject = _rcv.subject or SUBJECT_DEFAULTS.get(subtype)
bindings = get_bindings(link_opts, _rcv._queue, _rcv.name, subject)
if not bindings:
sst.write_cmd(ExchangeBind(_rcv._queue, _rcv.name, subject))
elif type == "queue":
_rcv._queue = _rcv.name
if _rcv.options.get("mode", "consume") == "browse":
acq_mode = acquire_mode.not_acquired
bindings = get_bindings(link_opts, queue=_rcv._queue)
sst.write_cmds(bindings)
sst.write_cmd(MessageSubscribe(queue=_rcv._queue,
destination=_rcv.destination,
acquire_mode = acq_mode,
accept_mode = rcv._accept_mode),
overrides=subscribe)
sst.write_cmd(MessageSetFlowMode(_rcv.destination, flow_mode.credit), action)
def do_unlink(self, sst, rcv, _rcv, action=noop):
link_opts = _rcv.options.get("link", {})
reliability = link_opts.get("reliability")
cmds = [MessageCancel(_rcv.destination)]
cmds.extend(_rcv.on_unlink)
msgs = [] #release back messages for the closing receiver
msg = rcv.session._pop(rcv)
while msg is not None:
msgs.append(msg)
msg = rcv.session._pop(rcv)
if len(msgs) > 0:
ids = RangedSet(*[m._transfer_id for m in msgs])
log.debug("releasing back messages: %s, as receiver is closing", ids)
cmds.append(MessageRelease(ids, True))
sst.write_cmds(cmds, action)
def del_link(self, sst, rcv, _rcv):
del sst.destinations[_rcv.destination]
class LinkOut:
ADDR_NAME = "target"
DIR_NAME = "sender"
VALIDATOR = Map(TARGET_OPTS)
def init_link(self, sst, snd, _snd):
_snd.closing = False
_snd.pre_ack = False
def do_link(self, sst, snd, _snd, type, subtype, action):
link_opts = _snd.options.get("link", {})
reliability = link_opts.get("reliability", "at-least-once")
_snd.pre_ack = reliability in ("unreliable", "at-most-once")
if type == "topic":
_snd._exchange = _snd.name
_snd._routing_key = _snd.subject
bindings = get_bindings(link_opts, exchange=_snd.name, key=_snd.subject)
elif type == "queue":
_snd._exchange = ""
_snd._routing_key = _snd.name
bindings = get_bindings(link_opts, queue=_snd.name)
sst.write_cmds(bindings, action)
def do_unlink(self, sst, snd, _snd, action=noop):
action()
def del_link(self, sst, snd, _snd):
pass
class Cache:
def __init__(self, ttl):
self.ttl = ttl
self.entries = {}
def __setitem__(self, key, value):
self.entries[key] = time.time(), value
def __getitem__(self, key):
tstamp, value = self.entries[key]
if time.time() - tstamp >= self.ttl:
del self.entries[key]
raise KeyError(key)
else:
return value
def __delitem__(self, key):
del self.entries[key]
# XXX
HEADER="!4s4B"
EMPTY_DP = DeliveryProperties()
EMPTY_MP = MessageProperties()
SUBJECT = "qpid.subject"
CLOSED = "CLOSED"
READ_ONLY = "READ_ONLY"
WRITE_ONLY = "WRITE_ONLY"
OPEN = "OPEN"
class Driver:
def __init__(self, connection):
self.connection = connection
self.log_id = "%x" % id(self.connection)
self._lock = self.connection._lock
self._selector = Selector.default()
self._attempts = 0
self._delay = self.connection.reconnect_interval_min
self._reconnect_log = self.connection.reconnect_log
self._host = 0
self._retrying = False
self._next_retry = None
self._transport = None
self._timeout = None
self.engine = None
def _next_host(self):
urls = [URL(u) for u in self.connection.reconnect_urls]
hosts = [(self.connection.host, default(self.connection.port, 5672))] + \
[(u.host, default(u.port, 5672)) for u in urls]
if self._host >= len(hosts):
self._host = 0
self._last_host = hosts[self._host]
if self._host == 0:
self._attempts += 1
self._host = self._host + 1
return self._last_host
def _num_hosts(self):
return len(self.connection.reconnect_urls) + 1
@synchronized
def wakeup(self):
self.dispatch()
self._selector.wakeup()
def start(self):
self._selector.register(self)
def stop(self):
self._selector.unregister(self)
if self._transport:
self.st_closed()
def fileno(self):
return self._transport.fileno()
@synchronized
def reading(self):
"""Called by the Selector I/O thread to determine if the driver needs to
wait on the arrival of network data (call self.readable() callback)
"""
return self._transport is not None and \
self._transport.reading(True)
@synchronized
def writing(self):
"""Called by the Selector I/O thread to determine if it should block
waiting for output bandwidth (call the self.writeable() callback)
"""
return self._transport is not None and \
self._transport.writing(self.engine.pending())
@synchronized
def timing(self):
"""Called by the Selector I/O thread to determine if it should wake up the
driver (call the timeout() callback
"""
return self._timeout
@synchronized
def abort(self, exc, info):
"""Called if the Selector I/O thread hits an unrecoverable error and fails.
"""
try:
self.connection.error = exc
log.error("I/O Thread Fatal error: %s\n%s" % (str(exc), info))
except:
pass
def _check_retry_ok(self):
"""We consider a reconnect to have suceeded only when we have received
open-ok from the peer.
If we declared success as soon as the transport connected, then we could get
into an infinite heartbeat loop if the remote process is hung and never
sends us any data. We would fail the connection after 2 missed heartbeats,
reconnect the transport, declare the reconnect ok, then fail again after 2
missed heartbeats and so on.
"""
if self._retrying and self.engine._connected: # Means we have received open-ok.
if self._reconnect_log:
log.warn("reconnect succeeded: %s:%s", *self._last_host)
self._next_retry = None
self._attempts = 0
self._delay = self.connection.reconnect_interval_min
self._retrying = False
@synchronized
def readable(self):
try:
data = self._transport.recv(64*1024)
if data is None:
return
elif data:
rawlog.debug("READ[%s]: %r", self.log_id, data)
self.engine.write(data)
self._check_retry_ok()
else:
self.close_engine()
except socket.error, e:
self.close_engine(ConnectionError(text=str(e)))
self.update_status()
self._notify()
def _notify(self):
if self.connection.error:
self.connection._condition.gc()
self.connection._waiter.notifyAll()
def close_engine(self, e=None):
if e is None:
e = ConnectionError(text="connection aborted")
if (self.connection.reconnect and
(self.connection.reconnect_limit is None or
self.connection.reconnect_limit <= 0 or
self._attempts <= self.connection.reconnect_limit)):
if self._host < self._num_hosts():
delay = 0
else:
delay = self._delay
self._delay = min(2*self._delay,
self.connection.reconnect_interval_max)
self._next_retry = time.time() + delay
if self._reconnect_log:
log.warn("recoverable error[attempt %s]: %s" % (self._attempts, e))
if delay > 0:
log.warn("sleeping %s seconds" % delay)
self._retrying = True
self.engine.close()
else:
self.engine.close(e)
self.schedule()
def update_status(self):
if not self.engine: return False
status = self.engine.status()
return getattr(self, "st_%s" % status.lower())()
def st_closed(self):
# XXX: this log statement seems to sometimes hit when the socket is not connected
# XXX: rawlog.debug("CLOSE[%s]: %s", self.log_id, self._socket.getpeername())
if self._transport: self._transport.close()
self._transport = None
self.engine = None
return True
def st_open(self):
return False
@synchronized
def writeable(self):
notify = False
try:
n = self._transport.send(self.engine.peek())
if n == 0: return
sent = self.engine.read(n)
rawlog.debug("SENT[%s]: %r", self.log_id, sent)
except socket.error, e:
self.close_engine(e)
notify = True
if self.update_status() or notify:
self._notify()
@synchronized
def timeout(self):
self.dispatch()
self.update_status()
self._notify()
self.schedule()
def schedule(self):
times = []
if self.connection.heartbeat:
times.append(time.time() + self.connection.heartbeat)
if self._next_retry:
times.append(self._next_retry)
if times:
self._timeout = min(times)
else:
self._timeout = None
def dispatch(self):
try:
if self._transport is None:
if self.connection._connected and not self.connection.error:
self.connect()
else:
self.engine.dispatch()
except HeartbeatTimeout, e:
self.close_engine(e)
except ContentError, e:
msg = compat.format_exc()
self.connection.error = ContentError(text=msg)
except:
# XXX: Does socket get leaked if this occurs?
msg = compat.format_exc()
self.connection.error = InternalError(text=msg)
def connect(self):
if self._retrying and time.time() < self._next_retry:
return
try:
# XXX: should make this non blocking
host, port = self._next_host()
if self._retrying and self._reconnect_log:
log.warn("trying: %s:%s", host, port)
self.engine = Engine(self.connection)
self.engine.open()
rawlog.debug("OPEN[%s]: %s:%s", self.log_id, host, port)
trans = transports.TRANSPORTS.get(self.connection.transport)
if trans:
self._transport = trans(self.connection, host, port)
else:
raise ConnectError("no such transport: %s" % self.connection.transport)
self.schedule()
except socket.error, e:
self.close_engine(ConnectError(text=str(e)))
DEFAULT_DISPOSITION = Disposition(None)
def get_bindings(opts, queue=None, exchange=None, key=None):
bindings = opts.get("x-bindings", [])
cmds = []
for b in bindings:
exchange = b.get("exchange", exchange)
queue = b.get("queue", queue)
key = b.get("key", key)
args = b.get("arguments", {})
cmds.append(ExchangeBind(queue, exchange, key, args))
return cmds
CONNECTION_ERRS = {
# anythong not here (i.e. everything right now) will default to
# connection error
}
SESSION_ERRS = {
# anything not here will default to session error
error_code.unauthorized_access: UnauthorizedAccess,
error_code.not_found: NotFound,
error_code.resource_locked: ReceiverError,
error_code.resource_limit_exceeded: TargetCapacityExceeded,
error_code.internal_error: ServerError
}
class Engine:
def __init__(self, connection):
self.connection = connection
self.log_id = "%x" % id(self.connection)
self._closing = False
self._connected = False
self._reconnecting = bool(connection.sessions)
self._attachments = {}
self._in = LinkIn()
self._out = LinkOut()
self._channel_max = 65536
self._channels = 0
self._sessions = {}
self.address_cache = Cache(self.connection.address_ttl)
self._status = CLOSED
self._buf = ""
self._hdr = ""
# Set _last_in and _last_out here so heartbeats will be timed from the
# beginning of connection if no data is sent/received.
self._last_in = time.time()
self._last_out = time.time()
self._op_enc = OpEncoder()
self._seg_enc = SegmentEncoder()
self._frame_enc = FrameEncoder()
self._frame_dec = FrameDecoder()
self._seg_dec = SegmentDecoder()
self._op_dec = OpDecoder()
self._sasl = sasl.Client()
if self.connection.username:
self._sasl.setAttr("username", self.connection.username)
if self.connection.password:
self._sasl.setAttr("password", self.connection.password)
if self.connection.host:
self._sasl.setAttr("host", self.connection.host)
self._sasl.setAttr("service", self.connection.sasl_service)
if self.connection.sasl_min_ssf is not None:
self._sasl.setAttr("minssf", self.connection.sasl_min_ssf)
if self.connection.sasl_max_ssf is not None:
self._sasl.setAttr("maxssf", self.connection.sasl_max_ssf)
self._sasl.init()
self._sasl_encode = False
self._sasl_decode = False
def _reset(self):
self.connection._transport_connected = False
for ssn in self.connection.sessions.values():
for m in ssn.acked + ssn.unacked + ssn.incoming:
m._transfer_id = None
for snd in ssn.senders:
snd.linked = False
for rcv in ssn.receivers:
rcv.impending = rcv.received
rcv.linked = False
def status(self):
return self._status
def write(self, data):
self._last_in = time.time()
try:
if self._sasl_decode:
data = self._sasl.decode(data)
if len(self._hdr) < 8:
r = 8 - len(self._hdr)
self._hdr += data[:r]
data = data[r:]
if len(self._hdr) == 8:
self.do_header(self._hdr)
self._frame_dec.write(data)
self._seg_dec.write(*self._frame_dec.read())
self._op_dec.write(*self._seg_dec.read())
for op in self._op_dec.read():
self.assign_id(op)
opslog.debug("RCVD[%s]: %r", self.log_id, op)
op.dispatch(self)
self.dispatch()
except MessagingError, e:
self.close(e)
except:
self.close(InternalError(text=compat.format_exc()))
def close(self, e=None):
self._reset()
# We cannot re-establish transactional sessions, they must be aborted.
# We could re-do transactional enqueues, but not dequeues.
for ssn in self.connection.sessions.values():
if ssn.transactional:
if ssn.committing:
ssn.error = TransactionUnknown(text="Transaction outcome unknown due to transport failure")
else:
ssn.error = TransactionAborted(text="Transaction aborted due to transport failure")
ssn.closed = True
if e:
self.connection.error = e
self._status = CLOSED
def assign_id(self, op):
if isinstance(op, Command):
sst = self.get_sst(op)
op.id = sst.received
sst.received += 1
def pending(self):
return len(self._buf)
def read(self, n):
result = self._buf[:n]
self._buf = self._buf[n:]
return result
def peek(self):
return self._buf
def write_op(self, op):
opslog.debug("SENT[%s]: %r", self.log_id, op)
self._op_enc.write(op)
self._seg_enc.write(*self._op_enc.read())
self._frame_enc.write(*self._seg_enc.read())
bytes = self._frame_enc.read()
if self._sasl_encode:
bytes = self._sasl.encode(bytes)
self._buf += bytes
self._last_out = time.time()
def do_header(self, hdr):
cli_major = 0; cli_minor = 10
magic, _, _, major, minor = struct.unpack(HEADER, hdr)
if major != cli_major or minor != cli_minor:
raise VersionError(text="client: %s-%s, server: %s-%s" %
(cli_major, cli_minor, major, minor))
def do_connection_start(self, start):
if self.connection.sasl_mechanisms:
permitted = self.connection.sasl_mechanisms.split()
mechs = [m for m in start.mechanisms if m in permitted]
else:
mechs = start.mechanisms
try:
mech, initial = self._sasl.start(" ".join(mechs))
except sasl.SASLError, e:
if "ANONYMOUS" not in mechs and self.connection.username is None:
_text="Anonymous connections disabled, missing credentials"
else:
_text=str(e)
raise AuthenticationFailure(text=_text)
client_properties = get_client_properties_with_defaults(provided_client_properties=self.connection.client_properties);
self.write_op(ConnectionStartOk(client_properties=client_properties,
mechanism=mech, response=initial))
def do_connection_secure(self, secure):
resp = self._sasl.step(secure.challenge)
self.write_op(ConnectionSecureOk(response=resp))
def do_connection_tune(self, tune):
# XXX: is heartbeat protocol specific?
if tune.channel_max is not None:
self.channel_max = tune.channel_max
self.write_op(ConnectionTuneOk(heartbeat=self.connection.heartbeat,
channel_max=self.channel_max))
self.write_op(ConnectionOpen())
self._sasl_encode = True
def do_connection_open_ok(self, open_ok):
self.connection.auth_username = self._sasl.auth_username()
self._connected = True
self._sasl_decode = True
self.connection._transport_connected = True
def do_connection_heartbeat(self, hrt):
pass
def do_connection_close(self, close):
self.write_op(ConnectionCloseOk())
if close.reply_code != close_code.normal:
exc = CONNECTION_ERRS.get(close.reply_code, ConnectionError)
self.connection.error = exc(close.reply_code, close.reply_text)
# XXX: should we do a half shutdown on the socket here?
# XXX: we really need to test this, we may end up reporting a
# connection abort after this, if we were to do a shutdown on read
# and stop reading, then we wouldn't report the abort, that's
# probably the right thing to do
def do_connection_close_ok(self, close_ok):
self.close()
def do_session_attached(self, atc):
pass
def do_session_command_point(self, cp):
sst = self.get_sst(cp)
sst.received = cp.command_id
def do_session_completed(self, sc):
sst = self.get_sst(sc)
for r in sc.commands:
sst.acknowledged.add(r.lower, r.upper)
if not sc.commands.empty():
while sst.min_completion in sc.commands:
if sst.actions.has_key(sst.min_completion):
sst.actions.pop(sst.min_completion)()
sst.min_completion += 1
def session_known_completed(self, kcmp):
sst = self.get_sst(kcmp)
executed = RangedSet()
for e in sst.executed.ranges:
for ke in kcmp.ranges:
if e.lower in ke and e.upper in ke:
break
else:
executed.add_range(e)
sst.executed = completed
def do_session_flush(self, sf):
sst = self.get_sst(sf)
if sf.expected:
if sst.received is None:
exp = None
else:
exp = RangedSet(sst.received)
sst.write_op(SessionExpected(exp))
if sf.confirmed:
sst.write_op(SessionConfirmed(sst.executed))
if sf.completed:
sst.write_op(SessionCompleted(sst.executed))
def do_session_request_timeout(self, rt):
sst = self.get_sst(rt)
sst.write_op(SessionTimeout(timeout=0))
def do_execution_result(self, er):
sst = self.get_sst(er)
sst.results[er.command_id] = er.value
sst.executed.add(er.id)
def do_execution_exception(self, ex):
sst = self.get_sst(ex)
exc = SESSION_ERRS.get(ex.error_code, SessionError)
sst.session.error = exc(ex.error_code, ex.description)
def dispatch(self):
if not self.connection._connected and not self._closing and self._status != CLOSED:
self.disconnect()
if self._connected and not self._closing:
for ssn in self.connection.sessions.values():
self.attach(ssn)
self.process(ssn)
# We need to check heartbeat even if not self._connected since we may have
# heartbeat timeout before receiving an open-ok
if self.connection.heartbeat and self._status != CLOSED and not self._closing:
now = time.time()
if now - self._last_in > 2*self.connection.heartbeat:
raise HeartbeatTimeout(text="heartbeat timeout")
# Only send heartbeats if we are connected.
if self._connected and now - self._last_out >= self.connection.heartbeat/2.0:
self.write_op(ConnectionHeartbeat())
def open(self):
self._reset()
self._status = OPEN
self._buf += struct.pack(HEADER, "AMQP", 1, 1, 0, 10)
def disconnect(self):
self.write_op(ConnectionClose(close_code.normal))
self._closing = True
def attach(self, ssn):
if ssn.closed: return
sst = self._attachments.get(ssn)
if sst is None:
for i in xrange(0, self.channel_max):
if not self._sessions.has_key(i):
ch = i
break
else:
raise RuntimeError("all channels used")
sst = SessionState(self, ssn, ssn.name, ch)
sst.write_op(SessionAttach(name=ssn.name, force=self._reconnecting))
sst.write_op(SessionCommandPoint(sst.sent, 0))
self._reconnecting = False
sst.outgoing_idx = 0
sst.acked = []
sst.acked_idx = 0
if ssn.transactional:
sst.write_cmd(TxSelect())
self._attachments[ssn] = sst
self._sessions[sst.channel] = sst
for snd in ssn.senders:
self.link(snd, self._out, snd.target)
for rcv in ssn.receivers:
self.link(rcv, self._in, rcv.source)
if sst is not None and ssn.closing and not sst.detached:
sst.detached = True
sst.write_op(SessionDetach(name=ssn.name))
def get_sst(self, op):
return self._sessions[op.channel]
def do_session_detached(self, dtc):
sst = self._sessions.pop(dtc.channel)
ssn = sst.session
del self._attachments[ssn]
ssn.closed = True
def do_session_detach(self, dtc):
sst = self.get_sst(dtc)
sst.write_op(SessionDetached(name=dtc.name))
self.do_session_detached(dtc)
def link(self, lnk, dir, addr):
sst = self._attachments.get(lnk.session)
_lnk = self._attachments.get(lnk)
if _lnk is None and not lnk.closed:
_lnk = Attachment(lnk)
_lnk.closing = False
dir.init_link(sst, lnk, _lnk)
err = self.parse_address(_lnk, dir, addr) or self.validate_options(_lnk, dir)
if err:
lnk.error = err
lnk.closed = True
return
def linked():
lnk.linked = True
def resolved(type, subtype):
dir.do_link(sst, lnk, _lnk, type, subtype, linked)
self.resolve_declare(sst, _lnk, dir.DIR_NAME, resolved)
self._attachments[lnk] = _lnk
if lnk.linked and lnk.closing and not lnk.closed:
if not _lnk.closing:
def unlinked():
dir.del_link(sst, lnk, _lnk)
del self._attachments[lnk]
lnk.closed = True
if _lnk.options.get("delete") in ("always", dir.DIR_NAME):
dir.do_unlink(sst, lnk, _lnk)
requested_type = _lnk.options.get("node", {}).get("type")
self.delete(sst, _lnk.name, unlinked, node_type=requested_type)
else:
dir.do_unlink(sst, lnk, _lnk, unlinked)
_lnk.closing = True
elif not lnk.linked and lnk.closing and not lnk.closed:
if lnk.error: lnk.closed = True
def parse_address(self, lnk, dir, addr):
if addr is None:
return MalformedAddress(text="%s is None" % dir.ADDR_NAME)
else:
try:
lnk.name, lnk.subject, lnk.options = address.parse(addr)
# XXX: subject
if lnk.options is None:
lnk.options = {}
if isinstance(addr, MangledString):
lnk.options['create'] = "always"
if 'node' not in lnk.options:
lnk.options['node'] = {}
if 'x-declare' not in lnk.options['node']:
lnk.options['node']['x-declare'] = {}
xdeclare = lnk.options['node']['x-declare']
if 'auto-delete' not in xdeclare:
xdeclare['auto-delete'] = "True"
if 'exclusive' not in xdeclare:
xdeclare['exclusive'] = "True"
except address.LexError, e:
return MalformedAddress(text=str(e))
except address.ParseError, e:
return MalformedAddress(text=str(e))
def validate_options(self, lnk, dir):
ctx = Context()
err = dir.VALIDATOR.validate(lnk.options, ctx)
if err: return InvalidOption(text="error in options: %s" % err)
def resolve_declare(self, sst, lnk, dir, action):
declare = lnk.options.get("create") in ("always", dir)
assrt = lnk.options.get("assert") in ("always", dir)
requested_type = lnk.options.get("node", {}).get("type")
def do_resolved(type, subtype):
err = None
if type is None:
if declare:
err = self.declare(sst, lnk, action, True)
else:
err = NotFound(text="no such %s: %s" % (requested_type or "queue", lnk.name))
else:
if assrt:
expected = lnk.options.get("node", {}).get("type")
if expected and type != expected:
if declare:
err = self.declare(sst, lnk, action, True)
else:
err = AssertionFailed(text="expected %s, got %s" % (expected, type))
if "node" in lnk.options and "x-bindings" in lnk.options["node"]:
err = self.declare(sst, lnk, action, False)
if err is None:
action(type, subtype)
if err:
tgt = lnk.target
tgt.error = err
del self._attachments[tgt]
tgt.closed = True
return
self.resolve(sst, lnk.name, do_resolved, node_type=requested_type, force=declare)
def resolve(self, sst, name, action, force=False, node_type=None, delete=False):
if not force and not node_type:
try:
type, subtype = self.address_cache[name]
action(type, subtype)
return
except KeyError:
pass
args = { "topic":None, "queue":None }
def do_result(r, obj):
args[obj] = r
def do_action():
er = args["topic"]
qr = args["queue"]
if node_type == "topic" and er and not er.not_found:
type, subtype = "topic", er.type
elif node_type == "queue" and qr and qr.queue:
type, subtype = "queue", None
elif (er and er.not_found) and qr and not qr.queue:
type, subtype = None, None
elif (qr and qr.queue):
if node_type == "topic" and force:
type, subtype = None, None
else:
type, subtype = "queue", None
elif (er and not er.not_found):
if node_type == "queue" and force:
type, subtype = None, None
else:
type, subtype = "topic", er.type
elif er:
if er.not_found:
type, subtype = None, None
else:
type, subtype = "topic", er.type
else:
type, subtype = None, None
if type is not None:
self.address_cache[name] = (type, subtype)
action(type, subtype)
def do_result_and_action(r, obj):
do_result(r, obj)
do_action()
if (node_type is None): # we don't know the type, let check broker
sst.write_query(ExchangeQuery(name), do_result, "topic")
sst.write_query(QueueQuery(name), do_result_and_action, "queue")
elif force and not delete: # we forcefully declare known type, dont ask broker
do_action()
elif node_type == "topic":
sst.write_query(ExchangeQuery(name), do_result_and_action, "topic")
else:
sst.write_query(QueueQuery(name), do_result_and_action, "queue")
def declare(self, sst, lnk, action, create_node):
name = lnk.name
props = lnk.options.get("node", {})
durable = props.get("durable", DURABLE_DEFAULT)
type = props.get("type", "queue")
declare = props.get("x-declare", {})
cmd = None
if type == "topic":
if create_node: cmd = ExchangeDeclare(exchange=name, durable=durable)
bindings = get_bindings(props, exchange=name)
elif type == "queue":
if create_node: cmd = QueueDeclare(queue=name, durable=durable)
bindings = get_bindings(props, queue=name)
else:
raise ValueError(type)
if cmd is not None:
sst.apply_overrides(cmd, declare)
if type == "topic":
if cmd.type is None:
cmd.type = "topic"
subtype = cmd.type
else:
subtype = None
cmds = [cmd]
else:
cmds = []
cmds.extend(bindings)
def declared():
if create_node:
self.address_cache[name] = (type, subtype)
action(type, subtype)
sst.write_cmds(cmds, declared)
def delete(self, sst, name, action, node_type=None):
def deleted():
del self.address_cache[name]
action()
def do_delete(type, subtype):
if type == "topic":
sst.write_cmd(ExchangeDelete(name), deleted)
elif type == "queue":
sst.write_cmd(QueueDelete(name), deleted)
elif type is None:
action()
else:
raise ValueError(type)
self.resolve(sst, name, do_delete, force=True, node_type=node_type, delete=True)
def process(self, ssn):
if ssn.closed or ssn.closing: return
sst = self._attachments[ssn]
while sst.outgoing_idx < len(ssn.outgoing):
msg = ssn.outgoing[sst.outgoing_idx]
snd = msg._sender
# XXX: should check for sender error here
_snd = self._attachments.get(snd)
if _snd and snd.linked:
self.send(snd, msg)
sst.outgoing_idx += 1
else:
break
for snd in ssn.senders:
# XXX: should included snd.acked in this
if snd.synced >= snd.queued and sst.need_sync:
sst.write_cmd(ExecutionSync(), sync_noop)
for rcv in ssn.receivers:
self.process_receiver(rcv)
if ssn.acked:
messages = ssn.acked[sst.acked_idx:]
if messages:
ids = RangedSet()
disposed = [(DEFAULT_DISPOSITION, [])]
acked = []
for m in messages:
# XXX: we're ignoring acks that get lost when disconnected,
# could we deal this via some message-id based purge?
if m._transfer_id is None:
acked.append(m)
continue
ids.add(m._transfer_id)
if m._receiver._accept_mode is accept_mode.explicit:
disp = m._disposition or DEFAULT_DISPOSITION
last, msgs = disposed[-1]
if disp.type is last.type and disp.options == last.options:
msgs.append(m)
else:
disposed.append((disp, [m]))
else:
acked.append(m)
for range in ids:
sst.executed.add_range(range)
sst.write_op(SessionCompleted(sst.executed))
def ack_acker(msgs):
def ack_ack():
for m in msgs:
ssn.acked.remove(m)
sst.acked_idx -= 1
# XXX: should this check accept_mode too?
if not ssn.transactional:
sst.acked.remove(m)
return ack_ack
for disp, msgs in disposed:
if not msgs: continue
if disp.type is None:
op = MessageAccept
elif disp.type is RELEASED:
op = MessageRelease
elif disp.type is REJECTED:
op = MessageReject
sst.write_cmd(op(RangedSet(*[m._transfer_id for m in msgs]),
**disp.options),
ack_acker(msgs))
if log.isEnabledFor(DEBUG):
for m in msgs:
log.debug("SACK[%s]: %s, %s", ssn.log_id, m, m._disposition)
sst.acked.extend(messages)
sst.acked_idx += len(messages)
ack_acker(acked)()
if ssn.committing and not sst.committing:
def commit_ok():
del sst.acked[:]
ssn.committing = False
ssn.committed = True
ssn.aborting = False
ssn.aborted = False
sst.committing = False
sst.write_cmd(TxCommit(), commit_ok)
sst.committing = True
if ssn.aborting and not sst.aborting:
sst.aborting = True
def do_rb():
messages = sst.acked + ssn.unacked + ssn.incoming
ids = RangedSet(*[m._transfer_id for m in messages])
for range in ids:
sst.executed.add_range(range)
sst.write_op(SessionCompleted(sst.executed))
sst.write_cmd(MessageRelease(ids, True))
sst.write_cmd(TxRollback(), do_rb_ok)
def do_rb_ok():
del ssn.incoming[:]
del ssn.unacked[:]
del sst.acked[:]
for rcv in ssn.receivers:
rcv.impending = rcv.received
rcv.returned = rcv.received
# XXX: do we need to update granted here as well?
for rcv in ssn.receivers:
self.process_receiver(rcv)
ssn.aborting = False
ssn.aborted = True
ssn.committing = False
ssn.committed = False
sst.aborting = False
for rcv in ssn.receivers:
_rcv = self._attachments[rcv]
sst.write_cmd(MessageStop(_rcv.destination))
sst.write_cmd(ExecutionSync(), do_rb)
def grant(self, rcv):
sst = self._attachments[rcv.session]
_rcv = self._attachments.get(rcv)
if _rcv is None or not rcv.linked or _rcv.closing or _rcv.draining:
return
if rcv.granted is UNLIMITED:
if rcv.impending is UNLIMITED:
delta = 0
else:
delta = UNLIMITED
elif rcv.impending is UNLIMITED:
delta = -1
else:
delta = max(rcv.granted, rcv.received) - rcv.impending
if delta is UNLIMITED:
if not _rcv.bytes_open:
sst.write_cmd(MessageFlow(_rcv.destination, credit_unit.byte, UNLIMITED.value))
_rcv.bytes_open = True
sst.write_cmd(MessageFlow(_rcv.destination, credit_unit.message, UNLIMITED.value))
rcv.impending = UNLIMITED
elif delta > 0:
if not _rcv.bytes_open:
sst.write_cmd(MessageFlow(_rcv.destination, credit_unit.byte, UNLIMITED.value))
_rcv.bytes_open = True
sst.write_cmd(MessageFlow(_rcv.destination, credit_unit.message, delta))
rcv.impending += delta
elif delta < 0 and not rcv.draining:
_rcv.draining = True
def do_stop():
rcv.impending = rcv.received
_rcv.draining = False
_rcv.bytes_open = False
self.grant(rcv)
sst.write_cmd(MessageStop(_rcv.destination), do_stop)
if rcv.draining:
_rcv.draining = True
def do_flush():
rcv.impending = rcv.received
rcv.granted = rcv.impending
_rcv.draining = False
_rcv.bytes_open = False
rcv.draining = False
sst.write_cmd(MessageFlush(_rcv.destination), do_flush)
def process_receiver(self, rcv):
if rcv.closed: return
self.grant(rcv)
def send(self, snd, msg):
sst = self._attachments[snd.session]
_snd = self._attachments[snd]
if msg.subject is None or _snd._exchange == "":
rk = _snd._routing_key
else:
rk = msg.subject
if msg.subject is None:
subject = _snd.subject
else:
subject = msg.subject
# XXX: do we need to query to figure out how to create the reply-to interoperably?
if msg.reply_to:
rt = addr2reply_to(msg.reply_to)
else:
rt = None
content_encoding = msg.properties.get("x-amqp-0-10.content-encoding")
dp = DeliveryProperties(routing_key=rk)
mp = MessageProperties(message_id=msg.id,
user_id=msg.user_id,
reply_to=rt,
correlation_id=msg.correlation_id,
app_id = msg.properties.get("x-amqp-0-10.app-id"),
content_type=msg.content_type,
content_encoding=content_encoding,
application_headers=msg.properties)
if subject is not None:
if mp.application_headers is None:
mp.application_headers = {}
mp.application_headers[SUBJECT] = subject
if msg.durable is not None:
if msg.durable:
dp.delivery_mode = delivery_mode.persistent
else:
dp.delivery_mode = delivery_mode.non_persistent
if msg.priority is not None:
dp.priority = msg.priority
if msg.ttl is not None:
dp.ttl = long(msg.ttl*1000)
enc, dec = get_codec(msg.content_type)
try:
body = enc(msg.content)
except AttributeError, e:
# convert to non-blocking EncodeError
raise EncodeError(e)
# XXX: this is not safe for out of order, can this be triggered by pre_ack?
def msg_acked():
# XXX: should we log the ack somehow too?
snd.acked += 1
m = snd.session.outgoing.pop(0)
sst.outgoing_idx -= 1
log.debug("RACK[%s]: %s", sst.session.log_id, msg)
assert msg == m
xfr = MessageTransfer(destination=_snd._exchange, headers=(dp, mp),
payload=body)
if _snd.pre_ack:
sst.write_cmd(xfr)
else:
sst.write_cmd(xfr, msg_acked, sync=msg._sync)
log.debug("SENT[%s]: %s", sst.session.log_id, msg)
if _snd.pre_ack:
msg_acked()
def do_message_transfer(self, xfr):
sst = self.get_sst(xfr)
ssn = sst.session
msg = self._decode(xfr)
rcv = sst.destinations[xfr.destination].target
msg._receiver = rcv
if rcv.closing or rcv.closed: # release message to a closing receiver
ids = RangedSet(*[msg._transfer_id])
log.debug("releasing back %s message: %s, as receiver is closing", ids, msg)
sst.write_cmd(MessageRelease(ids, True))
return
if rcv.impending is not UNLIMITED:
assert rcv.received < rcv.impending, "%s, %s" % (rcv.received, rcv.impending)
rcv.received += 1
log.debug("RCVD[%s]: %s", ssn.log_id, msg)
ssn._notify_message_received(msg)
def _decode(self, xfr):
dp = EMPTY_DP
mp = EMPTY_MP
for h in xfr.headers:
if isinstance(h, DeliveryProperties):
dp = h
elif isinstance(h, MessageProperties):
mp = h
ap = mp.application_headers
enc, dec = get_codec(mp.content_type)
try:
content = dec(xfr.payload)
except Exception, e:
raise DecodeError(e)
msg = Message(content)
msg.id = mp.message_id
if ap is not None:
msg.subject = ap.get(SUBJECT)
msg.user_id = mp.user_id
if mp.reply_to is not None:
msg.reply_to = reply_to2addr(mp.reply_to)
msg.correlation_id = mp.correlation_id
if dp.delivery_mode is not None:
msg.durable = dp.delivery_mode == delivery_mode.persistent
msg.priority = dp.priority
if dp.ttl is not None:
msg.ttl = dp.ttl/1000.0
msg.redelivered = dp.redelivered
msg.properties = mp.application_headers or {}
if mp.app_id is not None:
msg.properties["x-amqp-0-10.app-id"] = mp.app_id
if mp.content_encoding is not None:
msg.properties["x-amqp-0-10.content-encoding"] = mp.content_encoding
if dp.routing_key is not None:
msg.properties["x-amqp-0-10.routing-key"] = dp.routing_key
if dp.timestamp is not None:
msg.properties["x-amqp-0-10.timestamp"] = dp.timestamp
msg.content_type = mp.content_type
msg._transfer_id = xfr.id
return msg
| {
"content_hash": "d7e077f9862b638d6af71301d68d6b75",
"timestamp": "",
"source": "github",
"line_count": 1432,
"max_line_length": 122,
"avg_line_length": 31.225558659217878,
"alnum_prop": 0.6180476350218047,
"repo_name": "mbroadst/debian-qpid-python",
"id": "146b8188ab8bebed2d3af26f6bfa6c3667af91cd",
"size": "45505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qpid/messaging/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "990553"
}
],
"symlink_target": ""
} |
from django.db import models
from pdc.apps.common.validators import validate_sigkey
def get_cached_id(cls, cache_field, value, create=False):
"""cached `value` to database `id`"""
if not value:
return None
result = cls.CACHE.get(value, None)
if result is None:
if create:
obj, _ = cls.objects.get_or_create(**{cache_field: value})
else:
obj = cls.objects.get(**{cache_field: value})
cls.CACHE[value] = obj.id
result = obj.id
return result
class Arch(models.Model):
name = models.CharField(max_length=50, unique=True)
class Meta:
pass
def __unicode__(self):
return u"%s" % (self.name, )
def export(self):
# FIXME: export has been deprecated, use serializer instead.
return {"name": self.name}
class SigKey(models.Model):
key_id = models.CharField(max_length=20, unique=True, validators=[validate_sigkey])
name = models.CharField(max_length=50, blank=True, null=True, unique=True)
description = models.CharField(max_length=100, blank=True)
def __unicode__(self):
return u"%s" % self.key_id
CACHE = {}
@classmethod
def get_cached_id(cls, value, create=False):
"""cached `key_id` to `id`"""
return get_cached_id(cls, "key_id", value, create=create)
def export(self):
return {
"key_id": self.key_id,
"name": self.name,
"description": self.description,
}
class Label(models.Model):
"""
Record label/tag with its name and description.
"""
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=500)
def __unicode__(self):
return u'%s' % self.name
# FIXME: Compatible with ChangeSetMixin which still uses export funtion to record changeset
def export(self, fields=None):
_fields = ['name', 'description'] if fields is None else fields
result = dict()
if 'name' in _fields:
result['name'] = self.name
if 'description' in _fields:
result['description'] = self.description
return result
| {
"content_hash": "7a9a8a1143c98ac59401c44631a7fbdf",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 100,
"avg_line_length": 29.32894736842105,
"alnum_prop": 0.5930910722296994,
"repo_name": "tzhaoredhat/automation",
"id": "0ad4dfbd1124d7f3762344678f26006e4731ad7c",
"size": "2364",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pdc/apps/common/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1767"
},
{
"name": "HTML",
"bytes": "49433"
},
{
"name": "JavaScript",
"bytes": "6629"
},
{
"name": "Makefile",
"bytes": "2828"
},
{
"name": "Python",
"bytes": "1190922"
},
{
"name": "Shell",
"bytes": "94"
}
],
"symlink_target": ""
} |
import logging
from datetime import datetime, timedelta
from datetime import time as dttime
import time
import math
# Our imports
from emission.core.get_database import get_section_db, get_uuid_db
import emission.analysis.result.carbon as carbon
import emission.core.common as common
import emission.net.api.stats as stats
from emission.core.wrapper.user import User
from emission.analysis.result import userclient
# sb375 is a weekly goal - we convert it to daily by dividing by 7
sb375DailyGoal = 40.142892/7
# BEGIN: Code to get and set client specific fields in the profile (currentScore and previousScore)
def getStoredScore(user):
profile = user.getProfile()
if profile is None:
return (0, 0)
currScore = profile.get('currentScore', 0)
prevScore = profile.get('previousScore', 0)
return (prevScore, currScore)
def setScores(user, prevScore, newScore):
logging.debug("Changing score for user %s from %s to %s" % (user.uuid, prevScore, newScore))
user.setClientSpecificProfileFields({'previousScore': prevScore,
'currentScore': newScore})
# TODO: Add a server side stat here so that we can know for sure how the score varies over time
# END: Code to get and set client specific fields in the profile (currentScore and previousScore)
# Returns the components on which the score is based. These will be combined in
# getScore later, but it is useful to see them to decide how to set up the
# weights
def getScoreComponents(user_uuid, start, end):
# The score is based on the following components:
# - Percentage of trips classified. We are not auto-classifying high
# confidence trips, so don't need to handle those here
user = User.fromUUID(user_uuid)
pctClassified = common.getClassifiedRatio(user_uuid, start, end)
(myModeShareCount, avgModeShareCount,
myModeShareDistance, avgModeShareDistance,
myModeCarbonFootprint, avgModeCarbonFootprint,
myModeCarbonFootprintNoLongMotorized, avgModeCarbonFootprintNoLongMotorized,
myOptimalCarbonFootprint, avgOptimalCarbonFootprint,
myOptimalCarbonFootprintNoLongMotorized, avgOptimalCarbonFootprintNoLongMotorized) = carbon.getFootprintCompareForRange(user.uuid, start, end)
carbon.delLongMotorizedModes(myModeShareDistance)
myAllDrive = carbon.getAllDrive(user.uuid, myModeShareDistance)
myCarbonFootprintSum = sum(myModeCarbonFootprintNoLongMotorized.values())
myOptimalFootprintSum = sum(myOptimalCarbonFootprintNoLongMotorized.values())
logging.debug("myCarbonFootprintSum = %s, myOptimalFootprintSum = %s, myAllDrive = %s" %
(myCarbonFootprintSum, myOptimalFootprintSum, myAllDrive))
handleZero = lambda x, y: 0 if y == 0 else float(x)/y
components = [pctClassified,
handleZero(myCarbonFootprintSum - myOptimalFootprintSum, myOptimalFootprintSum),
handleZero(myAllDrive - myCarbonFootprintSum, myAllDrive),
handleZero(sb375DailyGoal - myCarbonFootprintSum, sb375DailyGoal)]
return components
def calcScore(componentArr):
[pctClassified, mineMinusOptimal, allDriveMinusMine, sb375DailyGoal] = componentArr
# We want the ratio between the three components to be 5 : 3 : 2
# Let's just convert everything to percentages to keep the ratios consistent
# Also, we subtract the mineMinusOptimal term, since being way above optimal
# should lower your score
return 50 * pctClassified + 30 * allDriveMinusMine - 20 * mineMinusOptimal + 10 * sb375DailyGoal
def getScore(user_uuid, start, end):
components = getScoreComponents(user_uuid, start, end)
[pctClassified, mineMinusOptimal, allDriveMinusMine, sb375DailyGoal] = components
stats.storeResultEntry(user_uuid, stats.STAT_PCT_CLASSIFIED, time.time(), pctClassified)
stats.storeResultEntry(user_uuid, stats.STAT_MINE_MINUS_OPTIMAL, time.time(), mineMinusOptimal)
stats.storeResultEntry(user_uuid, stats.STAT_ALL_DRIVE_MINUS_MINE, time.time(), allDriveMinusMine)
stats.storeResultEntry(user_uuid, stats.STAT_SB375_DAILY_GOAL, time.time(), sb375DailyGoal)
return calcScore(components)
# Ok so this is a big tricky to get right.
# We want this to be in increments of a day, so that the SB375 target
# calculation makes sense.
# The high level plan is to run it around midnight everyday and update the
# score based on the day that just passed. But what exactly does "around
# midnight" mean?
# It can't be just before midnight, because then we will skip trips around midnight
# I guess we start right after midnight
# Couple of other challenges:
# - What to do about trips that span days?
# - What to do about people in other time zones who have a different midnight?
# In order to handle both of these, we should really have task scheduled from
# the app instead of a cronjob, and have it track the last time it was run. But
# let's do the cheap solution for now so that we know whether it works at all.
def updateScore(user_uuid):
today = datetime.now().date()
updateScoreForDay(user_uuid, today)
def updateScoreForDay(user_uuid, today):
yesterday = today - timedelta(days = 1)
dayBeforeYesterday = today - timedelta(days = 2)
dayBeforeYesterdayStart = datetime.combine(dayBeforeYesterday, dttime.min)
yesterdayStart = datetime.combine(yesterday, dttime.min)
todayStart = datetime.combine(today, dttime.min)
user = User.fromUUID(user_uuid)
(discardedScore, prevScore) = getStoredScore(user)
# Using score from dayBeforeYesterday instead of yesterday because there is
# currently a significant lag in the time for e-mission to prompt for
# entries, so people might not confirm yesterday's trips until sometime
# today, which means that it won't be counted in their score
newScore = prevScore + getScore(user_uuid, dayBeforeYesterdayStart, yesterdayStart)
if newScore < 0:
newScore = 0
stats.storeResultEntry(user_uuid, stats.STAT_GAME_SCORE, time.time(), newScore)
setScores(user, prevScore, newScore)
def getLevel(score):
if score < 100:
level = 1
sublevel = math.floor(score / 20) + 1
elif score < 1000:
level = 2
sublevel = math.floor(score / 200) + 1
elif score < 10000:
level = 3
sublevel = math.floor(score / 2000) + 1
else:
# Off the charts, stay at the top image
level = 3
sublevel = 5
return (level, sublevel)
def getFileName(level, sublevel):
return "level_%s_%s.png" % (int(level), int(sublevel))
def getResult(user_uuid):
# This is in here, as opposed to the top level as recommended by the PEP
# because then we don't have to worry about loading bottle in the unit tests
from bottle import template
(prevScore, currScore) = getStoredScore(User.fromUUID(user_uuid))
(level, sublevel) = getLevel(currScore)
otherCurrScoreList = []
for user_uuid_dict in get_uuid_db().find({}, {'uuid': 1, '_id': 0}):
(currPrevScore, currCurrScore) = getStoredScore(User.fromUUID(user_uuid_dict['uuid']))
otherCurrScoreList.append(currCurrScore)
otherCurrScoreList.sort()
renderedTemplate = template("clients/leaderboard/result_template.html",
level_picture_filename = getFileName(level, sublevel),
prevScore = prevScore,
currScore = currScore,
otherCurrScoreList = otherCurrScoreList)
return renderedTemplate
# These are copy/pasted from our first client, the carshare study
def getSectionFilter(uuid):
# We are not planning to do any filtering for this study. Bring on the worst!
return []
def clientSpecificSetters(uuid, sectionId, predictedModeMap):
return None
def getClientConfirmedModeField():
return None
def runBackgroundTasks(uuid):
today = datetime.now().date()
runBackgroundTasksForDay(uuid, today)
def runBackgroundTasksForDay(uuid, today):
updateScoreForDay(uuid, today)
| {
"content_hash": "e13d4120ee2408e118e1aeb069b4766d",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 145,
"avg_line_length": 44.92613636363637,
"alnum_prop": 0.7395978247122803,
"repo_name": "joshzarrabi/e-mission-server",
"id": "c07edc23d875a98259c90cc85cfe1a16d4db9b99",
"size": "7926",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "emission/clients/leaderboard/leaderboard.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "445"
},
{
"name": "CSS",
"bytes": "12835"
},
{
"name": "HTML",
"bytes": "50997"
},
{
"name": "JavaScript",
"bytes": "3507788"
},
{
"name": "Python",
"bytes": "1190346"
},
{
"name": "Shell",
"bytes": "1191"
},
{
"name": "Smarty",
"bytes": "3456"
}
],
"symlink_target": ""
} |
"""
Created on Mon Feb 14, 2011
@author:Isabel Restrepo
A script to run (fast) k-means on J sets of random subsamples
"""
import os;
import dbrec3d_batch
import multiprocessing
import Queue
import time
import random
import optparse
import sys
from numpy import log, ceil
from xml.etree.ElementTree import ElementTree
import glob
#time.sleep(30);
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
class bof_job():
def __init__(self, cm_i_file, CM_set, max_it, fm_i_file):
self.cm_i_file = cm_i_file;
self.CM_set = CM_set;
self.max_it = max_it;
self.fm_i_file = fm_i_file;
def execute_bof_jobs(jobs, num_procs=4):
work_queue=multiprocessing.Queue();
result_queue=multiprocessing.Queue();
for job in jobs:
work_queue.put(job)
for i in range(num_procs):
worker= bof_worker(work_queue,result_queue)
worker.start();
print("worker with name ",worker.name," started!")
# collect the results off the queue
#important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting
#results = []
# while len(results) < len(jobs):
# result = result_queue.get()
# results.append(result)
#
# return results
class bof_worker(multiprocessing.Process):
def __init__(self,work_queue,result_queue):
# base class initialization
multiprocessing.Process.__init__(self)
# job management stuff
self.work_queue = work_queue
self.result_queue = result_queue
self.kill_received = False
def run(self):
while not self.kill_received:
# get a task
try:
job = self.work_queue.get_nowait()
except Queue.Empty:
break
start_time = time.time();
dbrec3d_batch.set_stdout('logs/log_' + str(os.getpid())+ ".txt");
dbrec3d_batch.init_process("bofKMeansOnVectorProcess");
dbrec3d_batch.set_input_string(0, job.cm_i_file);
dbrec3d_batch.set_input_from_db(1, job.CM_set);
dbrec3d_batch.set_input_unsigned(2, job.max_it);
dbrec3d_batch.set_input_string(3, job.fm_i_file);
dbrec3d_batch.run_process();
dbrec3d_batch.clear();
dbrec3d_batch.reset_stdout();
print ("Runing time for worker:", self.name)
print(time.time() - start_time);
#output exit code in this case
#important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting
#self.result_queue.put(0);
#*******************The Main Algorithm ************************#
if __name__=="__main__":
dbrec3d_batch.register_processes();
dbrec3d_batch.register_datatypes();
#Parse inputs
parser = optparse.OptionParser(description='bof Statistics Pass 0');
parser.add_option('--k_means_dir', action="store", dest="k_means_dir");
parser.add_option('--num_cores', action="store", dest="num_cores", type="int", default=4);
parser.add_option('--max_it', action="store", dest="max_it", type="int", default=100);
options, args = parser.parse_args()
k_means_dir = options.k_means_dir; #path where all CM_i means are saved and where the ouput FM_i will be written to
num_cores = options.num_cores;
max_it = options.max_it;
if not os.path.isdir(k_means_dir +"/"):
print "Invalid init_k_means Dir"
sys.exit(-1);
CM_path = k_means_dir + "/CM";
if not os.path.isdir(CM_path +"/"):
print "Invalid CM Dir"
sys.exit(-1);
CM_files = glob.glob1(CM_path, 'CM*');
FM_path = k_means_dir + "/FM";
if not os.path.isdir(FM_path +"/"):
os.mkdir(FM_path +"/");
start_time = time.time();
#Combine all CM_i means into one set CM to be passed for k-means
mean_file_sfx = CM_path + "/CM_" ;
dbrec3d_batch.init_process("bofCombineMeansProcess");
dbrec3d_batch.set_input_string(0, mean_file_sfx);
dbrec3d_batch.run_process();
(id, type) = dbrec3d_batch.commit_output(0);
CM_set= dbvalue(id, type);
#Begin multiprocessing
job_list=[];
#Enqueue jobs
for CM_file in CM_files:
cm_file = CM_path + "/" + CM_file;
fm_file = FM_path + "/FM" + CM_file.strip('CM');
current_job = bof_job(cm_file, CM_set, max_it, fm_file);
job_list.append(current_job);
execute_bof_jobs(job_list, num_cores);
| {
"content_hash": "df8f6b7a3c13bf53e111029988b4bd39",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 119,
"avg_line_length": 29.98709677419355,
"alnum_prop": 0.5935886402753873,
"repo_name": "mirestrepo/voxels-at-lems",
"id": "5b82268404b68de2498f5346f9a3fd0e961323eb",
"size": "4690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dbrec3d/bof/pca/learn_codebook/k_means/k_means_on_CM_means.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1426982"
},
{
"name": "Shell",
"bytes": "360033"
},
{
"name": "TeX",
"bytes": "568"
},
{
"name": "nesC",
"bytes": "374"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
with open("README.pypi.rst") as readmeFile:
long_description = readmeFile.read()
install_requires = []
with open("requirements.txt") as requirementsFile:
for line in requirementsFile:
line = line.strip()
if len(line) == 0:
continue
if line[0] == '#':
continue
if line.find('-c constraints.txt') == -1:
pinnedVersion = line.split()[0]
install_requires.append(pinnedVersion)
dependency_links = []
try:
with open("constraints.txt") as constraintsFile:
for line in constraintsFile:
line = line.strip()
if len(line) == 0:
continue
if line[0] == '#':
continue
dependency_links.append(line)
except EnvironmentError:
print('No constraints file found, proceeding without '
'creating dependency links.')
setup(
name="celldb",
description="Functional genomics database python client.",
packages=["celldb.client"],
namespace_packages=["celldb"],
zip_safe=False,
url="https://github.com/david4096/celldb",
version=2.5,
entry_points={
'console_scripts': []
},
long_description=long_description,
install_requires=install_requires,
dependency_links=dependency_links,
license='Apache License 2.0',
include_package_data=True,
author="David Steinberg",
author_email="[email protected]",
classifiers=[
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
keywords=['genomics'],
# Use setuptools_scm to set the version number automatically from Git
# setup_requires=['setuptools_scm'],
)
| {
"content_hash": "efe23430320894ff75ad1736eb67b936",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 73,
"avg_line_length": 31.06153846153846,
"alnum_prop": 0.6235760277365032,
"repo_name": "david4096/celldb",
"id": "77588e01bd7bb32878d4e3ae0794eb23bb041d9a",
"size": "2176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "13629046"
},
{
"name": "Python",
"bytes": "27287"
}
],
"symlink_target": ""
} |
import logging
import tempfile
from telemetry import benchmark
from telemetry.core import bitmap
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.core import video
from telemetry.core.platform import tracing_category_filter
from telemetry.timeline import model
from telemetry.unittest import tab_test_case
def _IsDocumentVisible(tab):
return not tab.EvaluateJavaScript('document.hidden || document.webkitHidden')
class FakePlatformBackend(object):
def __init__(self):
self.platform = FakePlatform()
def DidStartBrowser(self, _, _2):
pass
def WillCloseBrowser(self, _, _2):
pass
class FakePlatform(object):
def __init__(self):
self._is_video_capture_running = False
#pylint: disable=W0613
def StartVideoCapture(self, min_bitrate_mbps):
self._is_video_capture_running = True
def StopVideoCapture(self):
self._is_video_capture_running = False
return video.Video(tempfile.NamedTemporaryFile())
@property
def is_video_capture_running(self):
return self._is_video_capture_running
class TabTest(tab_test_case.TabTestCase):
def testNavigateAndWaitForCompleteState(self):
self._tab.Navigate(self.UrlOfUnittestFile('blank.html'))
self._tab.WaitForDocumentReadyStateToBeComplete()
def testNavigateAndWaitForInteractiveState(self):
self._tab.Navigate(self.UrlOfUnittestFile('blank.html'))
self._tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
def testTabBrowserIsRightBrowser(self):
self.assertEquals(self._tab.browser, self._browser)
def testRendererCrash(self):
self.assertRaises(exceptions.TabCrashException,
lambda: self._tab.Navigate('chrome://crash',
timeout=5))
@benchmark.Enabled('has tabs')
def testActivateTab(self):
util.WaitFor(lambda: _IsDocumentVisible(self._tab), timeout=5)
new_tab = self._browser.tabs.New()
new_tab.Navigate('about:blank')
util.WaitFor(lambda: _IsDocumentVisible(new_tab), timeout=5)
self.assertFalse(_IsDocumentVisible(self._tab))
self._tab.Activate()
util.WaitFor(lambda: _IsDocumentVisible(self._tab), timeout=5)
self.assertFalse(_IsDocumentVisible(new_tab))
def testTabUrl(self):
self.assertEquals(self._tab.url, 'about:blank')
url = self.UrlOfUnittestFile('blank.html')
self._tab.Navigate(url)
self.assertEquals(self._tab.url, url)
def testIsTimelineRecordingRunningTab(self):
self.assertFalse(self._tab.is_timeline_recording_running)
self._tab.StartTimelineRecording()
self.assertTrue(self._tab.is_timeline_recording_running)
self._tab.StopTimelineRecording()
self.assertFalse(self._tab.is_timeline_recording_running)
#pylint: disable=W0212
def testIsVideoCaptureRunning(self):
original_platform_backend = self._tab.browser._platform_backend
try:
self._tab.browser._platform_backend = FakePlatformBackend()
self.assertFalse(self._tab.is_video_capture_running)
self._tab.StartVideoCapture(min_bitrate_mbps=2)
self.assertTrue(self._tab.is_video_capture_running)
self.assertIsNotNone(self._tab.StopVideoCapture())
self.assertFalse(self._tab.is_video_capture_running)
finally:
self._tab.browser._platform_backend = original_platform_backend
def testHighlight(self):
self.assertEquals(self._tab.url, 'about:blank')
self._browser.StartTracing()
self._tab.Highlight(bitmap.WEB_PAGE_TEST_ORANGE)
self._tab.ClearHighlight(bitmap.WEB_PAGE_TEST_ORANGE)
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
renderer_thread = timeline_model.GetRendererThreadFromTabId(
self._tab.id)
found_video_start_event = False
for event in renderer_thread.async_slices:
if event.name == '__ClearHighlight.video_capture_start':
found_video_start_event = True
break
self.assertTrue(found_video_start_event)
@benchmark.Enabled('has tabs')
def testGetRendererThreadFromTabId(self):
self.assertEquals(self._tab.url, 'about:blank')
# Create 3 tabs. The third tab is closed before we call StartTracing.
first_tab = self._tab
second_tab = self._browser.tabs.New()
second_tab.Navigate('about:blank')
second_tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
third_tab = self._browser.tabs.New()
third_tab.Navigate('about:blank')
third_tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
third_tab.Close()
self._browser.StartTracing(
tracing_category_filter.CreateNoOverheadFilter())
first_tab.ExecuteJavaScript('console.time("first-tab-marker");')
first_tab.ExecuteJavaScript('console.timeEnd("first-tab-marker");')
second_tab.ExecuteJavaScript('console.time("second-tab-marker");')
second_tab.ExecuteJavaScript('console.timeEnd("second-tab-marker");')
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
# Assert that the renderer_thread of the first tab contains
# 'first-tab-marker'.
renderer_thread = timeline_model.GetRendererThreadFromTabId(
first_tab.id)
first_tab_markers = [
renderer_thread.IterAllSlicesOfName('first-tab-marker')]
self.assertEquals(1, len(first_tab_markers))
# Close second tab and assert that the renderer_thread of the second tab
# contains 'second-tab-marker'.
second_tab.Close()
renderer_thread = timeline_model.GetRendererThreadFromTabId(
second_tab.id)
second_tab_markers = [
renderer_thread.IterAllSlicesOfName('second-tab-marker')]
self.assertEquals(1, len(second_tab_markers))
# Third tab wasn't available when we start tracing, so there is no
# renderer_thread corresponding to it in the the trace.
self.assertIs(None, timeline_model.GetRendererThreadFromTabId(third_tab.id))
class GpuTabTest(tab_test_case.TabTestCase):
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
# Test flaky on mac: http://crbug.com/358664
@benchmark.Disabled('android', 'mac')
def testScreenshot(self):
if not self._tab.screenshot_supported:
logging.warning('Browser does not support screenshots, skipping test.')
return
self.Navigate('green_rect.html')
pixel_ratio = self._tab.EvaluateJavaScript('window.devicePixelRatio || 1')
screenshot = self._tab.Screenshot(5)
assert screenshot
screenshot.GetPixelColor(0 * pixel_ratio, 0 * pixel_ratio).AssertIsRGB(
0, 255, 0, tolerance=2)
screenshot.GetPixelColor(31 * pixel_ratio, 31 * pixel_ratio).AssertIsRGB(
0, 255, 0, tolerance=2)
screenshot.GetPixelColor(32 * pixel_ratio, 32 * pixel_ratio).AssertIsRGB(
255, 255, 255, tolerance=2)
| {
"content_hash": "bbbf188655a068dba33a059e6872e464",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 80,
"avg_line_length": 37.48351648351648,
"alnum_prop": 0.7229551451187335,
"repo_name": "sencha/chromium-spacewalk",
"id": "2f5cb694fd2dce479096443caac086777ffffc39",
"size": "6985",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/core/tab_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
WSGI config for happening project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling, MediaCling
from os.path import join, dirname, abspath
from dotenv import load_dotenv
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "happening.settings")
dotenv_path = join(dirname(abspath(__file__)), '../.env')
if os.path.isfile(dotenv_path):
load_dotenv(dotenv_path)
application = Cling(MediaCling(get_wsgi_application()))
| {
"content_hash": "ee79de243abbe992b73b6bda48c30a81",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 78,
"avg_line_length": 29.40909090909091,
"alnum_prop": 0.7604327666151468,
"repo_name": "jscott1989/happening",
"id": "edee64e3e3d679ceb53e0116afcca90fc00bf7fa",
"size": "647",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/happening/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48575"
},
{
"name": "CoffeeScript",
"bytes": "1402"
},
{
"name": "HTML",
"bytes": "219438"
},
{
"name": "JavaScript",
"bytes": "32186"
},
{
"name": "Python",
"bytes": "421487"
},
{
"name": "Shell",
"bytes": "2955"
}
],
"symlink_target": ""
} |
from django.db import models
class User(models.Model):
username = models.text()
| {
"content_hash": "a69534d1b44a08be5f9474c68372f73b",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 28,
"avg_line_length": 20.5,
"alnum_prop": 0.7560975609756098,
"repo_name": "crystal-gen/django",
"id": "bbed3d864611591095973ec2accc448332db06ab",
"size": "170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/django-test/out/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "37"
},
{
"name": "Handlebars",
"bytes": "265"
},
{
"name": "Python",
"bytes": "170"
}
],
"symlink_target": ""
} |
from .server import *
from .client import *
| {
"content_hash": "e790729cfb89948668ff2e5853b94fe7",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 21,
"avg_line_length": 22,
"alnum_prop": 0.7272727272727273,
"repo_name": "maxpumperla/elephas",
"id": "ad9c21ab41ddcb20ad648ec2cae3c27ea5d8651a",
"size": "44",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "elephas/parameter/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1781"
},
{
"name": "Python",
"bytes": "102076"
},
{
"name": "Shell",
"bytes": "232"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('game', '0006_game'),
]
operations = [
migrations.AddField(
model_name='game',
name='desc_en',
field=models.TextField(default='Something about the game'),
preserve_default=False,
),
migrations.AddField(
model_name='game',
name='desc_ru',
field=models.TextField(default=u'\u0427\u0442\u043e-\u043d\u0438\u0431\u0443\u0434\u044c \u043e \u0441\u043e\u0440\u0435\u0432\u043d\u043e\u0432\u0430\u043d\u0438\u0438'),
preserve_default=False,
),
migrations.AddField(
model_name='game',
name='is_school',
field=models.BooleanField(default=False),
preserve_default=True,
),
]
| {
"content_hash": "e1c10f678d6e2310b5a10c838535a644",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 183,
"avg_line_length": 29.806451612903224,
"alnum_prop": 0.5811688311688312,
"repo_name": "stefantsov/blackbox3",
"id": "42b0accb3a9a7a512bc23bc726044647caa333a6",
"size": "948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/migrations/0007_auto_20150908_1346.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2241"
},
{
"name": "HTML",
"bytes": "39706"
},
{
"name": "JavaScript",
"bytes": "133810"
},
{
"name": "Python",
"bytes": "95948"
}
],
"symlink_target": ""
} |
from __future__ import division
import glob
import logging
import os
import sys
import time
from optparse import OptionParser
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
ROOT_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
DEFAULT_LIBDIR_GLOB = os.path.join(ROOT_DIR, 'lib', 'py', 'build', 'lib.*')
class TestHandler(object):
def testVoid(self):
if options.verbose > 1:
logging.info('testVoid()')
def testString(self, str):
if options.verbose > 1:
logging.info('testString(%s)' % str)
return str
def testBool(self, boolean):
if options.verbose > 1:
logging.info('testBool(%s)' % str(boolean).lower())
return boolean
def testByte(self, byte):
if options.verbose > 1:
logging.info('testByte(%d)' % byte)
return byte
def testI16(self, i16):
if options.verbose > 1:
logging.info('testI16(%d)' % i16)
return i16
def testI32(self, i32):
if options.verbose > 1:
logging.info('testI32(%d)' % i32)
return i32
def testI64(self, i64):
if options.verbose > 1:
logging.info('testI64(%d)' % i64)
return i64
def testDouble(self, dub):
if options.verbose > 1:
logging.info('testDouble(%f)' % dub)
return dub
def testBinary(self, thing):
if options.verbose > 1:
logging.info('testBinary()') # TODO: hex output
return thing
def testStruct(self, thing):
if options.verbose > 1:
logging.info('testStruct({%s, %s, %s, %s})' % (thing.string_thing, thing.byte_thing, thing.i32_thing, thing.i64_thing))
return thing
def testException(self, arg):
# if options.verbose > 1:
logging.info('testException(%s)' % arg)
if arg == 'Xception':
raise Xception(errorCode=1001, message=arg)
elif arg == 'TException':
raise TException(message='This is a TException')
def testMultiException(self, arg0, arg1):
if options.verbose > 1:
logging.info('testMultiException(%s, %s)' % (arg0, arg1))
if arg0 == 'Xception':
raise Xception(errorCode=1001, message='This is an Xception')
elif arg0 == 'Xception2':
raise Xception2(
errorCode=2002,
struct_thing=Xtruct(string_thing='This is an Xception2'))
return Xtruct(string_thing=arg1)
def testOneway(self, seconds):
if options.verbose > 1:
logging.info('testOneway(%d) => sleeping...' % seconds)
time.sleep(seconds / 3) # be quick
if options.verbose > 1:
logging.info('done sleeping')
def testNest(self, thing):
if options.verbose > 1:
logging.info('testNest(%s)' % thing)
return thing
def testMap(self, thing):
if options.verbose > 1:
logging.info('testMap(%s)' % thing)
return thing
def testStringMap(self, thing):
if options.verbose > 1:
logging.info('testStringMap(%s)' % thing)
return thing
def testSet(self, thing):
if options.verbose > 1:
logging.info('testSet(%s)' % thing)
return thing
def testList(self, thing):
if options.verbose > 1:
logging.info('testList(%s)' % thing)
return thing
def testEnum(self, thing):
if options.verbose > 1:
logging.info('testEnum(%s)' % thing)
return thing
def testTypedef(self, thing):
if options.verbose > 1:
logging.info('testTypedef(%s)' % thing)
return thing
def testMapMap(self, thing):
if options.verbose > 1:
logging.info('testMapMap(%s)' % thing)
return {
-4: {
-4: -4,
-3: -3,
-2: -2,
-1: -1,
},
4: {
4: 4,
3: 3,
2: 2,
1: 1,
},
}
def testInsanity(self, argument):
if options.verbose > 1:
logging.info('testInsanity(%s)' % argument)
return {
1: {
2: argument,
3: argument,
},
2: {6: Insanity()},
}
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
if options.verbose > 1:
logging.info('testMulti(%s)' % [arg0, arg1, arg2, arg3, arg4, arg5])
return Xtruct(string_thing='Hello2',
byte_thing=arg0, i32_thing=arg1, i64_thing=arg2)
def main(options):
# set up the protocol factory form the --protocol option
prot_factories = {
'binary': TBinaryProtocol.TBinaryProtocolFactory,
'accel': TBinaryProtocol.TBinaryProtocolAcceleratedFactory,
'compact': TCompactProtocol.TCompactProtocolFactory,
'json': TJSONProtocol.TJSONProtocolFactory,
}
pfactory_cls = prot_factories.get(options.proto, None)
if pfactory_cls is None:
raise AssertionError('Unknown --protocol option: %s' % options.proto)
pfactory = pfactory_cls()
try:
pfactory.string_length_limit = options.string_limit
pfactory.container_length_limit = options.container_limit
except:
# Ignore errors for those protocols that does not support length limit
pass
# get the server type (TSimpleServer, TNonblockingServer, etc...)
if len(args) > 1:
raise AssertionError('Only one server type may be specified, not multiple types.')
server_type = args[0]
# Set up the handler and processor objects
handler = TestHandler()
processor = ThriftTest.Processor(handler)
# Handle THttpServer as a special case
if server_type == 'THttpServer':
server = THttpServer.THttpServer(processor, ('', options.port), pfactory)
server.serve()
sys.exit(0)
# set up server transport and transport factory
abs_key_path = os.path.join(os.path.dirname(SCRIPT_DIR), 'keys', 'server.pem')
host = None
if options.ssl:
from thrift.transport import TSSLSocket
transport = TSSLSocket.TSSLServerSocket(host, options.port, certfile=abs_key_path)
else:
transport = TSocket.TServerSocket(host, options.port)
tfactory = TTransport.TBufferedTransportFactory()
if options.trans == 'buffered':
tfactory = TTransport.TBufferedTransportFactory()
elif options.trans == 'framed':
tfactory = TTransport.TFramedTransportFactory()
elif options.trans == '':
raise AssertionError('Unknown --transport option: %s' % options.trans)
else:
tfactory = TTransport.TBufferedTransportFactory()
# if --zlib, then wrap server transport, and use a different transport factory
if options.zlib:
transport = TZlibTransport.TZlibTransport(transport) # wrap with zlib
tfactory = TZlibTransport.TZlibTransportFactory()
# do server-specific setup here:
if server_type == "TNonblockingServer":
server = TNonblockingServer.TNonblockingServer(processor, transport, inputProtocolFactory=pfactory)
elif server_type == "TProcessPoolServer":
import signal
from thrift.server import TProcessPoolServer
server = TProcessPoolServer.TProcessPoolServer(processor, transport, tfactory, pfactory)
server.setNumWorkers(5)
def set_alarm():
def clean_shutdown(signum, frame):
for worker in server.workers:
if options.verbose > 0:
logging.info('Terminating worker: %s' % worker)
worker.terminate()
if options.verbose > 0:
logging.info('Requesting server to stop()')
try:
server.stop()
except:
pass
signal.signal(signal.SIGALRM, clean_shutdown)
signal.alarm(4)
set_alarm()
else:
# look up server class dynamically to instantiate server
ServerClass = getattr(TServer, server_type)
server = ServerClass(processor, transport, tfactory, pfactory)
# enter server main loop
server.serve()
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--libpydir', type='string', dest='libpydir',
help='include this directory to sys.path for locating library code')
parser.add_option('--genpydir', type='string', dest='genpydir',
default='gen-py',
help='include this directory to sys.path for locating generated code')
parser.add_option("--port", type="int", dest="port",
help="port number for server to listen on")
parser.add_option("--zlib", action="store_true", dest="zlib",
help="use zlib wrapper for compressed transport")
parser.add_option("--ssl", action="store_true", dest="ssl",
help="use SSL for encrypted transport")
parser.add_option('-v', '--verbose', action="store_const",
dest="verbose", const=2,
help="verbose output")
parser.add_option('-q', '--quiet', action="store_const",
dest="verbose", const=0,
help="minimal output")
parser.add_option('--protocol', dest="proto", type="string",
help="protocol to use, one of: accel, binary, compact, json")
parser.add_option('--transport', dest="trans", type="string",
help="transport to use, one of: buffered, framed")
parser.add_option('--container-limit', dest='container_limit', type='int', default=None)
parser.add_option('--string-limit', dest='string_limit', type='int', default=None)
parser.set_defaults(port=9090, verbose=1, proto='binary')
options, args = parser.parse_args()
# Print TServer log to stdout so that the test-runner can redirect it to log files
logging.basicConfig(level=options.verbose)
sys.path.insert(0, os.path.join(SCRIPT_DIR, options.genpydir))
if options.libpydir:
sys.path.insert(0, glob.glob(options.libpydir)[0])
else:
sys.path.insert(0, glob.glob(DEFAULT_LIBDIR_GLOB)[0])
from ThriftTest import ThriftTest
from ThriftTest.ttypes import Xtruct, Xception, Xception2, Insanity
from thrift.Thrift import TException
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.transport import TZlibTransport
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
from thrift.protocol import TJSONProtocol
from thrift.server import TServer, TNonblockingServer, THttpServer
sys.exit(main(options))
| {
"content_hash": "2cf5649d66b3d6732ff2b76ec55ecf16",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 125,
"avg_line_length": 33.25167785234899,
"alnum_prop": 0.6562720758906045,
"repo_name": "reTXT/thrift",
"id": "f12a9fe76229b81b803275ce71e6bdf0e5e575af",
"size": "10717",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/py/TestServer.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "890"
},
{
"name": "ActionScript",
"bytes": "75532"
},
{
"name": "Batchfile",
"bytes": "5757"
},
{
"name": "C",
"bytes": "680503"
},
{
"name": "C#",
"bytes": "405991"
},
{
"name": "C++",
"bytes": "3919518"
},
{
"name": "CMake",
"bytes": "98961"
},
{
"name": "CSS",
"bytes": "1070"
},
{
"name": "D",
"bytes": "645069"
},
{
"name": "Dart",
"bytes": "146402"
},
{
"name": "Emacs Lisp",
"bytes": "5361"
},
{
"name": "Erlang",
"bytes": "310672"
},
{
"name": "Go",
"bytes": "451086"
},
{
"name": "HTML",
"bytes": "23089"
},
{
"name": "Haskell",
"bytes": "122881"
},
{
"name": "Haxe",
"bytes": "304443"
},
{
"name": "Java",
"bytes": "964893"
},
{
"name": "JavaScript",
"bytes": "352684"
},
{
"name": "LLVM",
"bytes": "15884"
},
{
"name": "Lua",
"bytes": "48477"
},
{
"name": "Makefile",
"bytes": "15010"
},
{
"name": "OCaml",
"bytes": "39241"
},
{
"name": "Objective-C",
"bytes": "154184"
},
{
"name": "PHP",
"bytes": "314576"
},
{
"name": "Pascal",
"bytes": "387563"
},
{
"name": "Perl",
"bytes": "119756"
},
{
"name": "Python",
"bytes": "357482"
},
{
"name": "Ruby",
"bytes": "392021"
},
{
"name": "Shell",
"bytes": "28875"
},
{
"name": "Smalltalk",
"bytes": "22944"
},
{
"name": "Swift",
"bytes": "31618"
},
{
"name": "Thrift",
"bytes": "311211"
},
{
"name": "VimL",
"bytes": "2846"
},
{
"name": "Yacc",
"bytes": "26807"
}
],
"symlink_target": ""
} |
import json
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from common.serializers import AbstractFieldsMixin, ContactSerializer
from common.models import Contact, ContactType
from .models import (
CommunityHealthUnit,
CommunityHealthWorker,
CommunityHealthWorkerContact,
Status,
CommunityHealthUnitContact,
CHUService,
CHURating,
ChuUpdateBuffer
)
class ChuUpdateBufferSerializer(
AbstractFieldsMixin, serializers.ModelSerializer):
class Meta:
model = ChuUpdateBuffer
class CHUServiceSerializer(AbstractFieldsMixin, serializers.ModelSerializer):
class Meta(object):
model = CHUService
class CommunityHealthWorkerSerializer(
AbstractFieldsMixin, serializers.ModelSerializer):
name = serializers.CharField(read_only=True)
class Meta(object):
model = CommunityHealthWorker
read_only_fields = ('health_unit_approvals',)
class CommunityHealthWorkerPostSerializer(
AbstractFieldsMixin, serializers.ModelSerializer):
name = serializers.CharField(read_only=True)
class Meta(object):
model = CommunityHealthWorker
exclude = ('health_unit',)
class CommunityHealthUnitSerializer(
AbstractFieldsMixin, serializers.ModelSerializer):
status_name = serializers.ReadOnlyField(source="status.name")
health_unit_workers = serializers.ReadOnlyField(source='workers')
facility_name = serializers.ReadOnlyField(source='facility.name')
facility_ward = serializers.ReadOnlyField(source='facility.ward.name')
facility_subcounty = serializers.ReadOnlyField(
source='facility.ward.constituency.name')
facility_county = serializers.ReadOnlyField(
source='facility.ward.constituency.county.name')
ward_code = serializers.ReadOnlyField(source='facility.ward.code')
lat_long = serializers.ReadOnlyField(source='facility.lat_long')
contacts = serializers.ReadOnlyField()
pending_updates = serializers.ReadOnlyField()
latest_update = serializers.ReadOnlyField(source='latest_update.id')
avg_rating = serializers.ReadOnlyField(source='average_rating')
number_of_ratings = serializers.ReadOnlyField(source='rating_count')
inlined_errors = {}
class Meta(object):
model = CommunityHealthUnit
read_only_fields = ('code', )
def get_basic_updates(self, chu_instance, validated_data):
updates = self.initial_data
if ('facility' in self.initial_data and
validated_data['facility'] != chu_instance):
updates['facility'] = {
"facility_id": str(validated_data['facility'].id),
"facility_name": validated_data['facility'].name,
}
if ('status' in self.initial_data and
validated_data['status'] != chu_instance.status):
updates['status'] = {
'status_id': str(validated_data['status'].id),
'status_name': validated_data['status'].name
}
updates.pop('health_unit_workers', None)
updates.pop('contacts', None)
return json.dumps(updates)
def buffer_updates(
self, validated_data, chu_instance, chews=None, contacts=None, ):
try:
update = ChuUpdateBuffer.objects.get(
health_unit=chu_instance,
is_approved=False, is_rejected=False)
except ChuUpdateBuffer.DoesNotExist:
update = ChuUpdateBuffer.objects.create(
health_unit=chu_instance,
created_by_id=self.context['request'].user.id,
updated_by_id=self.context['request'].user.id,
is_new=True)
basic_updates = self.get_basic_updates(chu_instance, validated_data)
update.basic = basic_updates if basic_updates \
and not update.basic else update.basic
if chews:
for chew in chews:
chew.pop('created', None)
chew.pop('updated', None)
chew.pop('updated_by', None)
chew.pop('created_by', None)
chews = json.dumps(chews)
update.workers = chews
if contacts:
for contact in contacts:
contact_type = ContactType.objects.get(
id=contact['contact_type'])
contact['contact_type_name'] = contact_type.name
contacts = json.dumps(contacts)
update.contacts = contacts
update.save()
def _ensure_all_chew_required_provided(self, chew):
if 'first_name' and 'last_name' not in chew:
self.inlined_errors.update({
"Community Health Worker": [
"Ensure the CHEW first name and last name are provided"
]
})
def _validate_chew(self, chews):
for chew in chews:
self._ensure_all_chew_required_provided(chew)
def save_chew(self, instance, chews, context):
for chew in chews:
chew_id = chew.pop('id', None)
if chew_id:
chew_obj = CommunityHealthWorker.objects.get(id=chew_id)
chew_obj.first_name = chew['first_name']
chew_obj.last_name = chew['last_name']
chew_obj.is_incharge = chew['is_incharge']
chew_obj.save()
else:
chew['health_unit'] = instance.id
chew_data = CommunityHealthWorkerSerializer(
data=chew, context=context)
chew_data.save() if chew_data.is_valid() else None
def _validate_contacts(self, contacts):
for contact in contacts:
if 'contact' not in contact or 'contact_type' not in contact:
self.inlined_errors.update(
{
"contact": [
"Contact type of contact field is missing from"
" the payload"]
}
)
continue
try:
ContactType.objects.get(id=contact['contact_type'])
except (ContactType.DoesNotExist, ValueError):
self.inlined_errors.update(
{
"contact": ["The provided contact_type does not exist"]
}
)
def create_contact(self, contact_data):
try:
if 'id' in contact_data:
contact = Contact.objects.get(
id=contact_data['id']
)
contact.contact = contact_data['contact']
contact.contact_type_id = contact_data['contact_type']
contact.save()
return contact
else:
contact = Contact.objects.get(
contact=contact_data['contact']
)
return contact
except Contact.DoesNotExist:
contact = ContactSerializer(
data=contact_data, context=self.context)
return contact.save() if contact.is_valid() else \
self.inlined_errors.update(contact.errors)
def create_chu_contacts(self, instance, contacts, validated_data):
for contact_data in contacts:
contact = self.create_contact(contact_data)
health_unit_contact_data_unadit = {
"contact": contact.id,
"health_unit": instance.id
}
try:
CommunityHealthUnitContact.objects.get(
contact_id=contact.id, health_unit_id=instance.id)
except CommunityHealthUnitContact.DoesNotExist:
chu_contact = CommunityHealthUnitContactSerializer(
data=health_unit_contact_data_unadit,
context=self.context)
chu_contact.save() if chu_contact.is_valid() else None
def create(self, validated_data):
self.inlined_errors = {}
chews = self.initial_data.pop('health_unit_workers', [])
contacts = self.initial_data.pop('contacts', [])
self._validate_contacts(contacts)
self._validate_chew(chews)
if not self.inlined_errors:
validated_data.pop('health_unit_workers', None)
chu = super(CommunityHealthUnitSerializer, self).create(
validated_data)
self.save_chew(chu, chews, self.context)
self.create_chu_contacts(chu, contacts, validated_data)
return chu
else:
raise ValidationError(self.inlined_errors)
def update(self, instance, validated_data):
self.inlined_errors = {}
chews = self.initial_data.pop('health_unit_workers', [])
contacts = self.initial_data.pop('contacts', [])
chu = CommunityHealthUnit.objects.get(id=instance.id)
self._validate_contacts(contacts)
self._validate_chew(chews)
if not self.inlined_errors:
if chu.is_approved and not instance.is_rejected:
self.buffer_updates(validated_data, instance, chews, contacts)
return instance
super(CommunityHealthUnitSerializer, self).update(
instance, validated_data)
self.save_chew(instance, chews, self.context)
self.create_chu_contacts(instance, contacts, validated_data)
return instance
else:
raise ValidationError(self.inlined_errors)
class CommunityHealthWorkerContactSerializer(
AbstractFieldsMixin, serializers.ModelSerializer):
class Meta(object):
model = CommunityHealthWorkerContact
class StatusSerializer(AbstractFieldsMixin, serializers.ModelSerializer):
class Meta(object):
model = Status
class CommunityHealthUnitContactSerializer(
AbstractFieldsMixin, serializers.ModelSerializer):
class Meta(object):
model = CommunityHealthUnitContact
class CHURatingSerializer(AbstractFieldsMixin, serializers.ModelSerializer):
facility_name = serializers.ReadOnlyField(source='chu.facility.name')
facility_id = serializers.ReadOnlyField(source='chu.facility.id')
chu_name = serializers.ReadOnlyField(source='chu.name')
class Meta(object):
model = CHURating
| {
"content_hash": "55dce8f4d0aa5ce53a877e0fd60ce9b7",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 79,
"avg_line_length": 36.24825174825175,
"alnum_prop": 0.6066364425581171,
"repo_name": "Nyto035/Konza_backend",
"id": "65ad302c7a1768270f70442c37f8223d524a45af",
"size": "10367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chul/serializers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "34"
},
{
"name": "HTML",
"bytes": "55286"
},
{
"name": "JavaScript",
"bytes": "1285"
},
{
"name": "PLpgSQL",
"bytes": "4394"
},
{
"name": "Python",
"bytes": "1030238"
},
{
"name": "Ruby",
"bytes": "1251"
},
{
"name": "Shell",
"bytes": "1455"
}
],
"symlink_target": ""
} |
import datetime
import json
import unittest
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange
from django import forms
from django.contrib.postgres import fields as pg_fields, forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator, RangeMinValueValidator,
)
from django.core import exceptions, serializers
from django.db import connection
from django.test import TestCase
from django.utils import timezone
from .models import RangesModel
def skipUnlessPG92(test):
if not connection.vendor == 'postgresql':
return unittest.skip('PostgreSQL required')(test)
PG_VERSION = connection.pg_version
if PG_VERSION < 90200:
return unittest.skip('PostgreSQL >= 9.2 required')(test)
return test
@skipUnlessPG92
class TestSaveLoad(TestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
floats=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.floats, loaded.floats)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, '[]')
instance = RangesModel(floats=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.floats)
self.assertTrue(10 in loaded.floats)
def test_unbounded(self):
r = NumericRange(None, None, '()')
instance = RangesModel(floats=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.floats)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(None, loaded.ints)
@skipUnlessPG92
class TestQuerying(TestCase):
@classmethod
def setUpTestData(cls):
cls.objs = [
RangesModel.objects.create(ints=NumericRange(0, 10)),
RangesModel.objects.create(ints=NumericRange(5, 15)),
RangesModel.objects.create(ints=NumericRange(None, 0)),
RangesModel.objects.create(ints=NumericRange(empty=True)),
RangesModel.objects.create(ints=None),
]
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
@skipUnlessPG92
class TestSerialization(TestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": 10, \\"lower\\": 0, '
'\\"bounds\\": \\"[)\\"}", "floats": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": null, "dates": null}, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
def test_dumping(self):
instance = RangesModel(ints=NumericRange(0, 10), floats=NumericRange(empty=True))
data = serializers.serialize('json', [instance])
dumped = json.loads(data)
dumped[0]['fields']['ints'] = json.loads(dumped[0]['fields']['ints'])
check = json.loads(self.test_data)
check[0]['fields']['ints'] = json.loads(check[0]['fields']['ints'])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.floats, NumericRange(empty=True))
self.assertEqual(instance.dates, None)
class TestValidators(TestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], 'Ensure that this range is completely less than or equal to 5.')
self.assertEqual(cm.exception.code, 'max_value')
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], 'Ensure that this range is completely greater than or equal to 5.')
self.assertEqual(cm.exception.code, 'min_value')
class TestFormField(TestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(['1', '2'])
self.assertEqual(value, NumericRange(1, 2))
def test_valid_floats(self):
field = pg_forms.FloatRangeField()
value = field.clean(['1.12345', '2.001'])
self.assertEqual(value, NumericRange(1.12345, 2.001))
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(['01/01/2014 00:00:00', '02/02/2014 12:12:12'])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(['01/01/2014', '02/02/2014'])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(str(form), '''
<tr>
<th>
<label for="id_field_0">Field:</label>
</th>
<td>
<input id="id_field_0_0" name="field_0_0" type="text" />
<input id="id_field_0_1" name="field_0_1" type="text" />
<input id="id_field_1_0" name="field_1_0" type="text" />
<input id="id_field_1_1" name="field_1_1" type="text" />
</td>
</tr>
''')
form = SplitForm({
'field_0_0': '01/01/2014',
'field_0_1': '00:00:00',
'field_1_0': '02/02/2014',
'field_1_1': '12:12:12',
})
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data['field'], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(['', ''])
self.assertEqual(value, None)
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(str(RangeForm()), '''
<tr>
<th><label for="id_ints_0">Ints:</label></th>
<td>
<input id="id_ints_0" name="ints_0" type="number" />
<input id="id_ints_1" name="ints_1" type="number" />
</td>
</tr>
''')
def test_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['10', '2'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(['', '0'])
self.assertEqual(value, NumericRange(None, 0))
def test_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1')
self.assertEqual(cm.exception.messages[0], 'Enter two valid values.')
self.assertEqual(cm.exception.code, 'invalid')
def test_invalid_lower(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '2'])
self.assertEqual(cm.exception.messages[0], 'Enter a whole number.')
def test_invalid_upper(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a whole number.')
def test_required(self):
field = pg_forms.IntegerRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean([1, ''])
self.assertEqual(value, NumericRange(1, None))
def test_model_field_formfield_integer(self):
model_field = pg_fields.IntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
def test_model_field_formfield_biginteger(self):
model_field = pg_fields.BigIntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
def test_model_field_formfield_float(self):
model_field = pg_fields.FloatRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.FloatRangeField)
def test_model_field_formfield_date(self):
model_field = pg_fields.DateRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateRangeField)
def test_model_field_formfield_datetime(self):
model_field = pg_fields.DateTimeRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
class TestWidget(TestCase):
def test_range_widget(self):
f = pg_forms.ranges.DateTimeRangeField()
self.assertHTMLEqual(
f.widget.render('datetimerange', ''),
'<input type="text" name="datetimerange_0" /><input type="text" name="datetimerange_1" />'
)
self.assertHTMLEqual(
f.widget.render('datetimerange', None),
'<input type="text" name="datetimerange_0" /><input type="text" name="datetimerange_1" />'
)
dt_range = DateTimeTZRange(
datetime.datetime(2006, 1, 10, 7, 30),
datetime.datetime(2006, 2, 12, 9, 50)
)
self.assertHTMLEqual(
f.widget.render('datetimerange', dt_range),
'<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00" /><input type="text" name="datetimerange_1" value="2006-02-12 09:50:00" />'
)
| {
"content_hash": "73599209c861fd21089a1fb80a76fc94",
"timestamp": "",
"source": "github",
"line_count": 399,
"max_line_length": 158,
"avg_line_length": 36.19799498746867,
"alnum_prop": 0.6093609360936093,
"repo_name": "devops2014/djangosite",
"id": "c26382a15bd243ee97c5852ed206e502eb4c9245",
"size": "14443",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/postgres_tests/test_ranges.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52479"
},
{
"name": "JavaScript",
"bytes": "106009"
},
{
"name": "Makefile",
"bytes": "5765"
},
{
"name": "Python",
"bytes": "10489293"
},
{
"name": "Shell",
"bytes": "10452"
}
],
"symlink_target": ""
} |
import sys
import time
import unittest2 as unittest
from pyspider.libs import counter
class TestCounter(unittest.TestCase):
def test_010_TimebaseAverageEventCounter(self):
c = counter.TimebaseAverageEventCounter(2, 1)
for i in range(100):
time.sleep(0.1)
c.event(100+i)
self.assertEqual(c.sum, float(180+199)*20/2)
self.assertEqual(c.avg, float(180+199)/2)
def test_020_delete(self):
c = counter.CounterManager()
c.event(('a', 'b'), 1)
c.event(('a', 'c'), 1)
c.event(('b', 'c'), 1)
self.assertIsNotNone(c['a'])
self.assertIsNotNone(c['b'])
del c['a']
self.assertNotIn('a', c)
self.assertIsNotNone(c['b'])
| {
"content_hash": "9a5b2d39ac8b4e9af141fe0b7c83b206",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 53,
"avg_line_length": 26.964285714285715,
"alnum_prop": 0.5774834437086093,
"repo_name": "VDuda/pyspider",
"id": "39baace3bedfd00cfbcdff528600f32905c3d89c",
"size": "942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_counter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25384"
},
{
"name": "HTML",
"bytes": "24850"
},
{
"name": "JavaScript",
"bytes": "50802"
},
{
"name": "Python",
"bytes": "516744"
}
],
"symlink_target": ""
} |
from .basededatos import BaseDeDatos
class PerInstitucionMedica(BaseDeDatos):
def obtener_uno(self, id_):
"""
Obtiene y retorna un objeto según el id dado.
:param id_: int >= 0
:return: object
"""
if id_ >= 0:
id_ = (id_,)
sql = 'SELECT * FROM instituciones_medicas WHERE id=?'
return self.obtener(sql, id_)
else:
print 'El parámetro debe ser mayor o igual a 0.'
return None
def obtener_listado(self, **kwargs):
"""
Obtiene y retorna un listado de objetos según los filtros pasados.
:param kwargs: dict
:return: dict
"""
if 'pagina' in kwargs:
total_filas = self.contar_filas('pacientes')
offset = kwargs['pagina'] * 10 #resultados por pagina
dataset = None
if offset < total_filas: # TODO: ver aca el asunto de paginacion
sql = 'SELECT * FROM instituciones_medicas LIMIT(10) OFFSET(?) WHERE ' \
'baja=0'
data = (offset,)
dataset = self.obtener(sql, data, True)
else:
sql = 'SELECT * FROM instituciones_medicas WHERE baja=0'
dataset = self.obtener(sql, lista=True)
return dataset
else:
return []
def agregar_objeto(self, obj):
"""
Prepara los datos de un objeto para ser insertado en la base de datos.
:param obj: object
:return: object
"""
sql = 'INSERT INTO instituciones_medicas VALUES (null, ?, ?)'
id_ = self.salvar(sql, (obj.nombre, obj.baja,))
obj.id_ = id_
return obj
def actualizar_objeto(self, obj):
"""
Prepara los datos de un objeto para actualizar su registro correlativo
en la base de datos.
:param obj: object
:return: bool
"""
sql = 'UPDATE instituciones_medicas SET nombre = ?, baja = ? WHERE \
id = ?'
return self.actualizar(sql, (obj.nombre, obj.baja))
def baja_objeto(self, obj):
"""
Obtiene el id del objeto para dar una baja lógica en el registro co-
rrespondiente en la base de datos.
:param obj: object
:return: bool
"""
sql = 'UPDATE instituciones_medicas SET baja = ? WHERE id = ?'
return self.actualizar(sql, (1, obj.id_))
| {
"content_hash": "d53534faf0bdf13c799b7fc703438198",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 88,
"avg_line_length": 33.63013698630137,
"alnum_prop": 0.5389002036659878,
"repo_name": "gabofer82/taller_programacion_2017",
"id": "b406f1ee63afddb885114a592e6e983dd1f4be73",
"size": "2483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Programa/persistencia/perinstitucionmedica.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "286708"
}
],
"symlink_target": ""
} |
class DailyInfo:
def __init__(self, errorCount, tweetCount, dailyFlag, oldFollowers, newFollowers, user):
self.errorCount = errorCount
self.tweetCount = tweetCount
self.dailyFlag = dailyFlag
self.oldFollowers = oldFollowers
self.newFollowers = newFollowers
self.user = user
| {
"content_hash": "b08cc6eb2a2e6a6f3ff1cf84716f9cc3",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 92,
"avg_line_length": 40.75,
"alnum_prop": 0.6748466257668712,
"repo_name": "foxof72/botMessager",
"id": "9967a5ab496fce3a2ac3890c955251f73706202a",
"size": "359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "botmessenger/dailyInfo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4097"
}
],
"symlink_target": ""
} |
import sys
sys.path.insert(1, "../../")
import h2o, tests
def score_history_test(ip,port):
air_train = h2o.import_file(path=h2o.locate("smalldata/airlines/AirlinesTrain.csv.zip"))
gbm_mult = h2o.gbm(x=air_train[["Origin", "Dest", "Distance", "UniqueCarrier", "IsDepDelayed", "fDayofMonth","fMonth"]],
y=air_train["fDayOfWeek"].asfactor(),
distribution="multinomial")
score_history = gbm_mult.score_history()
print score_history
if __name__ == "__main__":
tests.run_test(sys.argv, score_history_test)
| {
"content_hash": "4c2a7766b1c19ce2fe353183c1f87c5f",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 124,
"avg_line_length": 32.22222222222222,
"alnum_prop": 0.6155172413793103,
"repo_name": "bospetersen/h2o-3",
"id": "22f7f17880546bb938507e50d0557d35425515fa",
"size": "580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_misc/pyunit_score_history.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "CSS",
"bytes": "163561"
},
{
"name": "CoffeeScript",
"bytes": "262107"
},
{
"name": "Emacs Lisp",
"bytes": "8914"
},
{
"name": "Groovy",
"bytes": "78"
},
{
"name": "HTML",
"bytes": "146874"
},
{
"name": "Java",
"bytes": "5441396"
},
{
"name": "JavaScript",
"bytes": "88331"
},
{
"name": "Makefile",
"bytes": "31513"
},
{
"name": "Python",
"bytes": "2021301"
},
{
"name": "R",
"bytes": "1829960"
},
{
"name": "Rebol",
"bytes": "3997"
},
{
"name": "Ruby",
"bytes": "299"
},
{
"name": "Scala",
"bytes": "16336"
},
{
"name": "Shell",
"bytes": "44718"
},
{
"name": "TeX",
"bytes": "470617"
}
],
"symlink_target": ""
} |
import json
import time
import redis
import logging
#
# This server services a status queue defined by the graph terminal node "RABBITMQ_STATUS_QUEUE".
#
# 1. If used as a package the function queue_message is used to send a message to the cloud server
# 2. If used as a main program the program takes messages from the queue and publishes it to a redis server located in the cloud
class Status_Queue():
def __init__(self,redis_handle,status_queue ):
self.redis_handle = redis_handle
self.status_queue = status_queue
def queue_message( self, routing_key, data ):
data["routing_key"] = routing_key
self.redis_handle.rpush( self.status_queue, json.dumps(data))
self.redis_handle.ltrim( self.status_queue, 0, 100 )
def free_messages( self ):
if self.redis_handle.llen( self.status_queue ) > 0:
return_value = True
else:
return_value = False
return return_value
def dequeue_message( self ):
return self.redis_handle.lpop(self.status_queue )
def get_message( self ):
return self.redis_handle.lindex(self.status_queue, -1)
if __name__ == "__main__":
import time
from redis_graph_py3 import farm_template_py3
import pika
import json
import time
import os
redis_startup = redis.StrictRedis( host = "localhost", port=6379, db = 0 )
rabbit_user_name = redis_startup.hget("status_gateway", "user_name" )
rabbit_password = redis_startup.hget("status_gateway", "password" )
graph_management = farm_template_py3.Graph_Management("PI_1","main_remote","LaCima_DataStore")
status_servers = graph_management.match_terminal_relationship("RABBITMQ_STATUS_QUEUE")
#print ("status_servers",type(status_servers),len(status_servers))
status_server = status_servers[0]
vhost = status_server["vhost"]
queue = status_server[ "queue"]
port = int(status_server[ "port" ])
server = status_server["server"]
#print "user_name",rabbit_user_name
#print "password",rabbit_password
credentials = pika.PlainCredentials( rabbit_user_name, rabbit_password )
parameters = pika.ConnectionParameters( server,
port, #ssl port
vhost,
credentials,
ssl = True ,
)
connection = pika.BlockingConnection(parameters)
channel = connection.channel()
channel.exchange_declare(exchange= queue,
type='fanout')
data_store_nodes = graph_management.find_data_stores()
# find ip and port for redis data store
data_server_ip = data_store_nodes[0]["ip"]
data_server_port = data_store_nodes[0]["port"]
redis_handle = redis.StrictRedis( host = data_server_ip, port=data_server_port, db = 12 , decode_responses=True)
status_stores = graph_management.match_terminal_relationship("CLOUD_STATUS_STORE")
#print ("status_stores",status_stores)
status_store = status_stores[0]
queue_name = status_store["queue_name"]
status_queue = Status_Queue( redis_handle, queue_name )
while True:
time.sleep(1.0)
if status_queue.free_messages() :
data_json = status_queue.get_message()
data = json.loads(data_json)
routing_key = data["routing_key"]
channel.basic_publish(exchange=queue,
routing_key=routing_key,
body=data_json)
print(" [x] Sent %r" % "test message")
status_queue.dequeue_message()
connection.close()
| {
"content_hash": "749c586ef971c3fea6fa7d53e71170cf",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 128,
"avg_line_length": 32.35897435897436,
"alnum_prop": 0.6048600105652404,
"repo_name": "glenn-edgar/local_controller_3",
"id": "693e4bc0becbdfe3709678c483e6607a7a41b550",
"size": "3809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rabbit_cloud_status_publish_py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2510"
},
{
"name": "CSS",
"bytes": "4575415"
},
{
"name": "HTML",
"bytes": "2215958"
},
{
"name": "JavaScript",
"bytes": "9981211"
},
{
"name": "Makefile",
"bytes": "5136"
},
{
"name": "PHP",
"bytes": "124476"
},
{
"name": "Python",
"bytes": "4396570"
},
{
"name": "Shell",
"bytes": "569"
},
{
"name": "Smalltalk",
"bytes": "252"
},
{
"name": "TeX",
"bytes": "3153"
},
{
"name": "TypeScript",
"bytes": "11006"
}
],
"symlink_target": ""
} |
"""Network Table."""
from collections import OrderedDict
import psycopg2.extras
from pyiem.util import get_dbconn
class Table:
"""Our class"""
def __init__(self, network, cursor=None, only_online=True):
"""A class representing a network(s) of IEM metadata
Args:
network (str or list): A network identifier used by the IEM, this can
be either a string or a list of strings.
cursor (dbcursor,optional): A database cursor to use for the query
only_online (bool,otional): Should the listing of stations include
only those that are currently flagged as online.
"""
self.sts = OrderedDict()
if network is None:
return
if cursor is None:
dbconn = get_dbconn("mesosite")
cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
if isinstance(network, str):
network = [network]
online_extra = " and online " if only_online else ""
cursor.execute(
f"""
WITH myattrs as (
SELECT a.iemid, array_agg(attr) as attrs,
array_agg(value) as attr_values from stations s JOIN
station_attributes a on (s.iemid = a.iemid) WHERE
s.network in %s GROUP by a.iemid
), mythreading as (
SELECT a.iemid, array_agg(source_iemid) as threading_sources,
array_agg(begin_date) as threading_begin_dates,
array_agg(coalesce(end_date, 'TOMORROW'::date))
as threading_end_dates
from stations s JOIN
station_threading a on (s.iemid = a.iemid) WHERE
s.network in %s GROUP by a.iemid
)
SELECT s.*, ST_x(geom) as lon, ST_y(geom) as lat,
a.attrs, a.attr_values, m.threading_sources,
m.threading_begin_dates, m.threading_end_dates
from stations s
LEFT JOIN myattrs a on (s.iemid = a.iemid)
LEFT JOIN mythreading m on (s.iemid = m.iemid)
WHERE network in %s {online_extra} ORDER by name ASC
""",
(tuple(network), tuple(network), tuple(network)),
)
for row in cursor:
self.sts[row["id"]] = dict(row)
self.sts[row["id"]]["attributes"] = dict(
zip(row["attrs"] or [], row["attr_values"] or [])
)
td = self.sts[row["id"]].setdefault("threading", [])
for i, s, e in zip(
row["threading_sources"] or [],
row["threading_begin_dates"] or [],
row["threading_end_dates"] or [],
):
td.append({"iemid": i, "begin_date": s, "end_date": e})
def get_threading_id(self, sid, valid) -> str:
"""Return a station identifier (not iemid) based on threading.
Lookup what the threaded station identifier is based on this given
timestamp/date.
Args:
sid (str): station identifier to check threading for.
valid (datetime.date): lookup for comparison.
"""
entry = self.sts.get(sid)
if entry is None or not entry["threading"]:
return None
for tinfo in entry["threading"]:
if valid < tinfo["begin_date"] or valid >= tinfo["end_date"]:
continue
return self.get_id_by_key("iemid", tinfo["iemid"])
return None
def get_id_by_key(self, key, value) -> str:
"""Find a station id by a given attribute = value.
Args:
key (str): attribute to lookup.
value (mixed): value to compare against
Returns:
station_id
"""
for sid in self.sts:
if self.sts[sid].get(key) == value:
return sid
return None
| {
"content_hash": "b2ca1ed14eafb3f9e7d469dd8a97f1d4",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 79,
"avg_line_length": 37.62135922330097,
"alnum_prop": 0.5447741935483871,
"repo_name": "akrherz/pyIEM",
"id": "d8aabc6607dc7a68653610071c7d17c836197d23",
"size": "3875",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/pyiem/network.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "16242"
},
{
"name": "Python",
"bytes": "1085104"
}
],
"symlink_target": ""
} |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------
from Cheetah.Template import Template
from morphforge.simulation.neuron.simulationdatacontainers.mhocfile import MHocFileData
from morphforge.simulation.neuron.simulationdatacontainers.mhocfile import MHOCSections
from morphforge.simulation.neuron.core.neuronsimulationenvironment import NEURONEnvironment
from morphforge.simulation.base.synaptictriggers import SynapticTriggerByVoltageThreshold, SynapticTriggerAtTimes
#preTmpl = """
#// Pre-Synapse (will drive:[$synnames])
##for syn_name_post in $synnamespost:
#objref $(synnamepre)_$(syn_name_post)
#${cellname}.internalsections[$sectionindex] $(synnamepre)_$(syn_name_post) = new NetCon(&v($sectionpos), $syn_name_post, $threshold.rescale("mV").magnitude, $delay.rescale("ms").magnitude, 1.0 )
##end for
#"""
preTmpl = """
// Pre-Synapse (will drive:[$synname])
objref $synname
${cellname}.internalsections[$sectionindex] $(synname) = new NetCon(&v($sectionpos), $synnamepost, $threshold.rescale("mV").magnitude, $delay.rescale("ms").magnitude, 1.0 )
"""
class NeuronSynapseTriggerVoltageThreshold(SynapticTriggerByVoltageThreshold):
def build_hoc_syn(self, synapse, hocfile_obj):
cell = self.cell_location.cell
section = self.cell_location.morphlocation.section
syn_name = synapse.get_name()
#
#print hocfile_obj[MHocFileData.Synapses].keys()
synnamespost = hocfile_obj[MHocFileData.Synapses][synapse.get_postsynaptic_mechanism()]['synnamepost']
hoc_data = hocfile_obj[MHocFileData.Cells][cell]
data = {
'synname': syn_name,
'synnamepost': synnamespost,
'cell': cell,
'cellname': hoc_data['cell_name'],
'sectionindex': hoc_data['section_indexer'][section],
'sectionpos': self.cell_location.morphlocation.sectionpos,
'threshold': self.voltage_threshold,
'delay': self.delay,
}
assert not (synapse,self) in hocfile_obj[MHocFileData.Synapses]
hocfile_obj[MHocFileData.Synapses][(synapse,self)] = data
text = Template(preTmpl, data).respond()
hocfile_obj.add_to_section(MHOCSections.InitSynapsesChemPre, text)
def build_mod(self, modfile_set):
pass
preTmplList = """
// Pre-Synapse, which drives the following: [$synname]
objref ${synnamepre}_NullObj
objref $synnamepre
$synnamepre = new NetCon(${synnamepre}_NullObj, $synnamepost, 0, 0, 1.0)
objref fih_${synnamepre}
fih_${synnamepre} = new FInitializeHandler("loadqueue_${synnamepre}()")
proc loadqueue_${synnamepre}() {
#for $event in $timelist:
${synnamepre}.event($event.get_time.rescale('ms').magnitude )
#end for
}
"""
class NeuronSynapseTriggerTimeList(SynapticTriggerAtTimes):
def build_hoc_syn(self, hocfile_obj, synapse):
syn_name = synapse.get_name()
syn_name_post = hocfile_obj[MHocFileData.Synapses][synapse.get_postsynaptic_mechanism()]['synnamepost']
syn_name_pre = synapse.get_name() + 'Pre'
data = {
'synname': syn_name,
'synnamepost': syn_name_post,
'synnamepre': syn_name_pre,
'timelist': self.time_list,
}
assert not (synapse,self) in hocfile_obj[MHocFileData.Synapses]
hocfile_obj[MHocFileData.Synapses][(synapse,self)] = data
text = Template(preTmplList, data).respond()
hocfile_obj.add_to_section(MHOCSections.InitSynapsesChemPre, text)
def build_mod(self, modfile_set):
pass
NEURONEnvironment.presynapticmechanisms.register_plugin(SynapticTriggerByVoltageThreshold, NeuronSynapseTriggerVoltageThreshold)
NEURONEnvironment.presynapticmechanisms.register_plugin(SynapticTriggerAtTimes, NeuronSynapseTriggerTimeList)
| {
"content_hash": "99f4a70fe20bb58f36970058f9edc99a",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 195,
"avg_line_length": 37.97841726618705,
"alnum_prop": 0.6978594430763402,
"repo_name": "mikehulluk/morphforge",
"id": "53afcd50b2b82abae1bc6cc49124bf5937f0cc39",
"size": "5279",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/morphforge/simulation/neuron/synaptictriggers/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "AMPL",
"bytes": "4818"
},
{
"name": "C",
"bytes": "1499"
},
{
"name": "Makefile",
"bytes": "4436"
},
{
"name": "Python",
"bytes": "1557833"
},
{
"name": "Shell",
"bytes": "14"
},
{
"name": "XSLT",
"bytes": "94266"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("fluent_contents", "0001_initial")]
operations = [
migrations.CreateModel(
name="TwitterRecentEntriesItem",
fields=[
(
"contentitem_ptr",
models.OneToOneField(
parent_link=True,
auto_created=True,
primary_key=True,
serialize=False,
to="fluent_contents.ContentItem",
),
),
(
"title",
models.CharField(
help_text="You may use Twitter markup here, such as a #hashtag or @username.",
max_length=200,
verbose_name="Title",
blank=True,
),
),
(
"twitter_user",
models.CharField(max_length=75, verbose_name="Twitter user"),
),
(
"amount",
models.PositiveSmallIntegerField(
default=5, verbose_name="Number of results"
),
),
(
"footer_text",
models.CharField(
help_text="You may use Twitter markup here, such as a #hashtag or @username.",
max_length=200,
verbose_name="Footer text",
blank=True,
),
),
(
"include_retweets",
models.BooleanField(default=False, verbose_name="Include retweets"),
),
(
"include_replies",
models.BooleanField(default=False, verbose_name="Include replies"),
),
],
options={
"db_table": "contentitem_twitterfeed_twitterrecententriesitem",
"verbose_name": "Recent twitter entries",
"verbose_name_plural": "Recent twitter entries",
},
bases=("fluent_contents.contentitem",),
),
migrations.CreateModel(
name="TwitterSearchItem",
fields=[
(
"contentitem_ptr",
models.OneToOneField(
parent_link=True,
auto_created=True,
primary_key=True,
serialize=False,
to="fluent_contents.ContentItem",
),
),
(
"title",
models.CharField(
help_text="You may use Twitter markup here, such as a #hashtag or @username.",
max_length=200,
verbose_name="Title",
blank=True,
),
),
(
"query",
models.CharField(
default="",
help_text='<a href="https://support.twitter.com/articles/71577" target="_blank">Twitter search syntax</a> is allowed.',
max_length=200,
verbose_name="Search for",
),
),
(
"amount",
models.PositiveSmallIntegerField(
default=5, verbose_name="Number of results"
),
),
(
"footer_text",
models.CharField(
help_text="You may use Twitter markup here, such as a #hashtag or @username.",
max_length=200,
verbose_name="Footer text",
blank=True,
),
),
(
"include_retweets",
models.BooleanField(default=False, verbose_name="Include retweets"),
),
(
"include_replies",
models.BooleanField(default=False, verbose_name="Include replies"),
),
],
options={
"db_table": "contentitem_twitterfeed_twittersearchitem",
"verbose_name": "Twitter search feed",
"verbose_name_plural": "Twitter search feed",
},
bases=("fluent_contents.contentitem",),
),
]
| {
"content_hash": "cca95afd7328fe16167a22342ecee2d5",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 143,
"avg_line_length": 37.09230769230769,
"alnum_prop": 0.39257569473247617,
"repo_name": "edoburu/django-fluent-contents",
"id": "9358f0bfa9757553f4a8939dc533af7007fa13a9",
"size": "4846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fluent_contents/plugins/twitterfeed/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13178"
},
{
"name": "HTML",
"bytes": "35807"
},
{
"name": "JavaScript",
"bytes": "80446"
},
{
"name": "Python",
"bytes": "494720"
}
],
"symlink_target": ""
} |
import json
import datetime
from decimal import Decimal
from django.core.management.base import BaseCommand
from game.users.models import User
from game.interactive.models import Interactive, InteractiveRound, Survey
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Decimal):
return float(o)
if isinstance(o, datetime.datetime):
return str(o)
if isinstance(o, datetime.timedelta):
return o.seconds
return super(DecimalEncoder, self).default(o)
class Command(BaseCommand):
def handle(self, *args, **options):
users = []
for game in Interactive.objects.all():
for u in game.users.all():
rounds = InteractiveRound.objects.filter(user=u)
if rounds.count() < 1:
continue
d = {'user': u.username,
'final_score': u.get_score,
'condition': 'control',
'time_created': u.date_joined,
'game_id': game.id,
'unanswered': rounds.filter(guess__lt=0).count(),
}
try:
s = Survey.objects.get(username=u.username)
survey = s.dump()
except Survey.DoesNotExist:
survey = None
d['survey'] = survey
d['rounds'] = [r.round_data() for r in rounds]
# d['completed_hit'] = c.max_rounds == len(d['rounds'])
users.append(d)
print(json.dumps(users, cls=DecimalEncoder))
| {
"content_hash": "7c91d82032f087ff38d095317967178a",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 73,
"avg_line_length": 34.166666666666664,
"alnum_prop": 0.5317073170731708,
"repo_name": "adminq80/Interactive_estimation",
"id": "62400d07bacda4febff0b6e3b0f2b0390f642a8d",
"size": "1640",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/interactive/management/commands/dump_interactive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7033"
},
{
"name": "HTML",
"bytes": "56962"
},
{
"name": "JavaScript",
"bytes": "21105"
},
{
"name": "Nginx",
"bytes": "2324"
},
{
"name": "Python",
"bytes": "274453"
},
{
"name": "Shell",
"bytes": "8085"
}
],
"symlink_target": ""
} |
"""Sensors."""
import appdirs
import pathlib
import collections
import numpy as np
import toml
from PySide2 import QtCore, QtWidgets
import WrightTools as wt
import yaqc
import pycmds
import pycmds.project.classes as pc
import pycmds.project.widgets as pw
config = toml.load(pathlib.Path(appdirs.user_config_dir("pycmds", "pycmds")) / "config.toml")
class Data(QtCore.QMutex):
def __init__(self):
QtCore.QMutex.__init__(self)
self.WaitCondition = QtCore.QWaitCondition()
self.shape = (1,)
self.size = 1
self.channels = {}
self.signed = []
self.map = None
def read(self):
return self.channels
def write(self, channels):
self.lock()
self.channels = channels
self.WaitCondition.wakeAll()
self.unlock()
def write_properties(self, shape, channels, signed=False, map=None):
self.lock()
self.shape = shape
self.size = np.prod(shape)
self.channels = channels
self.signed = signed
if not signed:
self.signed = [False] * len(self.channels)
self.map = map
self.WaitCondition.wakeAll()
self.unlock()
def wait_for_update(self, timeout=5000):
if self.value:
self.lock()
self.WaitCondition.wait(self, timeout)
self.unlock()
class Sensor(pc.Hardware):
settings_updated = QtCore.Signal()
def __init__(self, *args, **kwargs):
self.freerun = pc.Bool(initial_value=False)
self.Widget = kwargs.pop("Widget")
self.data = Data()
self.active = True
# shape
if "shape" in kwargs.keys():
self.shape = kwargs.pop("shape")
else:
self.shape = (1,)
# map
if "has_map" in kwargs.keys():
self.has_map = kwargs.pop("has_map")
else:
self.has_map = False
self.has_map = False # turning this feature off for now --Blaise 2020-09-25
self.measure_time = pc.Number(initial_value=np.nan, display=True, decimals=3)
super().__init__(*args, **kwargs)
self.settings_updated.emit()
self.freerun.write(True)
self.on_freerun_updated()
@property
def channel_names(self):
return list(self.data.channels.keys())
@property
def channels(self):
return self.data.channels
def get_headers(self):
out = collections.OrderedDict()
return out
def give_widget(self, widget):
self.widget = widget
self.gui.create_frame(widget)
def initialize(self):
self.wait_until_still()
self.freerun.updated.connect(self.on_freerun_updated)
self.update_ui.emit()
self.driver.update_ui.connect(self.on_driver_update_ui)
self.settings_updated.emit()
def load_settings(self, aqn):
pass
def measure(self):
self.q.push("measure")
def on_driver_update_ui(self):
self.update_ui.emit()
def on_freerun_updated(self):
self.q.push("loop")
def set_freerun(self, state):
self.freerun.write(state)
self.on_freerun_updated()
self.settings_updated.emit() # TODO: should probably remove this
def wait_until_still(self):
while self.busy.read():
self.busy.wait_for_update()
class Driver(pc.Driver):
settings_updated = QtCore.Signal()
running = False
def __init__(self, sensor, yaqd_port):
super().__init__()
self.client = yaqc.Client(yaqd_port)
# attributes
self.name = self.client.id()["name"]
self.enqueued = sensor.enqueued
self.busy = sensor.busy
self.freerun = sensor.freerun
self.data = sensor.data
self.shape = sensor.shape
self.measure_time = sensor.measure_time
self.thread = sensor.thread
def initialize(self):
self.measure()
pycmds.sensors.signals.sensors_changed.emit()
pycmds.sensors.signals.channels_changed.emit()
def loop(self):
while self.freerun.read() and not self.enqueued.read():
self.measure()
self.busy.write(False)
def measure(self):
timer = wt.kit.Timer(verbose=False)
with timer:
self.busy.write(True)
self.client.measure(loop=False)
while self.client.busy():
time.sleep(0.1)
out = self.client.get_measured()
del out["measurement_id"]
signed = [False for _ in out]
self.data.write_properties(self.shape, out, signed)
self.busy.write(False)
self.measure_time.write(timer.interval)
self.update_ui.emit()
def shutdown(self):
pass
class SensorWidget(QtWidgets.QWidget):
def __init__(self):
QtWidgets.QWidget.__init__(self)
def load(self, aqn_path):
# TODO:
pass
def save(self, aqn_path):
# TODO:
ini = wt.kit.INI(aqn_path)
ini.add_section("Virtual")
ini.write("Virtual", "use", True)
class Widget(QtWidgets.QWidget):
def __init__(self):
QtWidgets.QWidget.__init__(self)
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
layout.setMargin(0)
input_table = pw.InputTable()
input_table.add("Virtual", None)
self.use = pc.Bool(initial_value=True)
input_table.add("Use", self.use)
layout.addWidget(input_table)
def load(self, aqn_path):
pass
def save(self, aqn_path):
ini = wt.kit.INI(aqn_path)
ini.add_section("virtual")
ini.write("virtual", "use", self.use.read())
| {
"content_hash": "5c879f4467ce78d8d41222bf40994b2a",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 93,
"avg_line_length": 26.49767441860465,
"alnum_prop": 0.5866245392311743,
"repo_name": "wright-group/PyCMDS",
"id": "b7b471c31da8695bc25470bab6b48114eb3984b1",
"size": "5697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycmds/sensors/_sensors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "212"
},
{
"name": "C",
"bytes": "15060"
},
{
"name": "C++",
"bytes": "43260"
},
{
"name": "Python",
"bytes": "516847"
}
],
"symlink_target": ""
} |
import os.path
import sys
from alembic import command, util, autogenerate as autogen
from alembic.config import Config, CommandLine
from alembic.environment import EnvironmentContext
from alembic.script import ScriptDirectory
from ggrc import settings
import ggrc.app
from ggrc.extensions import get_extension_module, get_extension_modules
# Monkey-patch Alembic classes to enable configuration-per-module
# Monkey-patch ScriptDirectory to allow config-specified `versions` directory
_old_ScriptDirectory_from_config = ScriptDirectory.from_config
@classmethod
def ScriptDirectory_from_config(cls, config):
script_directory = _old_ScriptDirectory_from_config(config)
# Override location of `versions` directory to be independent of `env.py`
versions_location = config.get_main_option('versions_location')
if versions_location:
script_directory.versions = versions_location
return script_directory
ScriptDirectory.from_config = ScriptDirectory_from_config
# Monkey-patch EnvironmentContext to override `version_table` based on
# the config-specified extension module
_old_EnvironmentContext___init__ = EnvironmentContext.__init__
def EnvironmentContext___init__(self, config, script, **kw):
extension_module_name = config.get_main_option('extension_module_name')
kw['version_table'] = extension_version_table(extension_module_name)
return _old_EnvironmentContext___init__(self, config, script, **kw)
EnvironmentContext.__init__ = EnvironmentContext___init__
# Helpers for handling migrations
def get_extension_dir(module):
return os.path.dirname(os.path.abspath(module.__file__))
def get_extension_migrations_dir(module):
return os.path.join(
get_extension_dir(module),
'migrations',
)
def get_base_migrations_dir():
import ggrc
return get_extension_migrations_dir(ggrc)
def get_base_config_file():
return os.path.join(get_base_migrations_dir(), 'alembic.ini')
def make_extension_config(extension_module_name):
config = Config(get_base_config_file())
# Record the current `extension_module_name` in the config to make it
# available to `ScriptDirectory` and `EnvironmentContext`
config.set_main_option('extension_module_name', extension_module_name)
module = get_extension_module(extension_module_name)
# If the extension module contains a `migrations/env.py`, then use that,
# otherwise use `ggrc/migrations/env.py`
module_script_location = get_extension_migrations_dir(module)
if os.path.exists(os.path.join(module_script_location, 'env.py')):
script_location = module_script_location
else:
script_location = get_base_migrations_dir()
config.set_main_option('script_location', script_location)
# Specify location of `versions` directory to be independent of `env.py`
module_versions_location = os.path.join(module_script_location, 'versions')
config.set_main_option('versions_location', module_versions_location)
return config
def extension_version_table(module):
module_name = module if type(module) is str else module.__name__
return '{0}_alembic_version'.format(module_name)
def extension_migrations_list():
ret = []
for extension_module in get_extension_modules():
migrations_dir = get_extension_migrations_dir(extension_module)
if os.path.exists(migrations_dir):
ret.append(migrations_dir)
return ret
def all_extensions():
extension_modules = ['ggrc']
extension_modules.extend(getattr(settings, 'EXTENSIONS', []))
return extension_modules
# Additional commands for `migrate.py` command
def upgradeall(config=None):
'''Upgrade all modules'''
for module_name in all_extensions():
print("Upgrading {}".format(module_name))
config = make_extension_config(module_name)
command.upgrade(config, 'head')
def downgradeall(config=None, drop_versions_table=False):
'''Downgrade all modules'''
for module_name in reversed(all_extensions()):
print("Downgrading {}".format(module_name))
config = make_extension_config(module_name)
command.downgrade(config, 'base')
if drop_versions_table:
from ggrc.app import db
extension_module_name = config.get_main_option('extension_module_name')
db.session.execute('DROP TABLE {0}'.format(
extension_version_table(extension_module_name)))
class MigrateCommandLine(CommandLine):
def _generate_args(self, prog):
super(MigrateCommandLine, self)._generate_args(prog)
# Add subparsers for `upgradeall` and `downgradeall`
#subparsers = self.parser.add_subparsers()
subparsers = self.parser._subparsers._actions[-1]
downgradeall_subparser = subparsers.add_parser(
"downgradeall", help=downgradeall.__doc__)
downgradeall_subparser.add_argument(
"--drop-versions-table",
action="store_true",
help="Drop version tables after downgrading")
downgradeall_subparser.set_defaults(
cmd=(downgradeall, [], ["drop_versions_table"]))
upgradeall_subparser = subparsers.add_parser(
"upgradeall", help=upgradeall.__doc__)
upgradeall_subparser.set_defaults(
cmd=(upgradeall, [], []))
def main(args):
if len(args) < 3:
print 'usage: migrate module_name <alembic command string>'
return -1
extension_module_name = args[1]
cmd_line = MigrateCommandLine()
options = cmd_line.parser.parse_args(args[2:])
cfg = make_extension_config(extension_module_name)
cmd_line.run_cmd(cfg, options)
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "56b0083c338ffd5eb7cd1cf2556a5f6e",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 77,
"avg_line_length": 35.94078947368421,
"alnum_prop": 0.732930624199158,
"repo_name": "kr41/ggrc-core",
"id": "2072c2c01c8ac2dbf1b50ff3e73a94a36df273bc",
"size": "5576",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "src/ggrc/migrate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "191076"
},
{
"name": "Cucumber",
"bytes": "136322"
},
{
"name": "HTML",
"bytes": "1079513"
},
{
"name": "JavaScript",
"bytes": "1718280"
},
{
"name": "Makefile",
"bytes": "7103"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2389878"
},
{
"name": "Shell",
"bytes": "30802"
}
],
"symlink_target": ""
} |
import subprocess
import sys
import platform
import shutil
import logging
import time
import os
from os.path import dirname, join, abspath
from optparse import OptionParser
###########################
# This variable is important
PROJECT_NAME = 'ATRR'
########################
PROJECT_VERSION = '3.0STD0'
###########################
# DEBUG mode or not
IS_PROD = True
###########################
if type('') is not type(b''):
def u(s):
return s
bytes_type = bytes
unicode_type = str
basestring_type = str
else:
def u(s):
return s.decode('unicode_escape')
bytes_type = str
unicode_type = unicode
basestring_type = basestring
_TO_UNICODE_TYPES = (unicode_type, type(None))
def to_unicode(value):
"""Converts a string argument to a unicode string.
If the argument is already a unicode string or None, it is returned
unchanged. Otherwise it must be a byte string and is decoded as utf8.
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
assert isinstance(value, bytes_type), \
"Expected bytes, unicode, or None; got %r" % type(value)
return value.decode("utf-8")
# to_unicode was previously named _unicode not because it was private,
# but to avoid conflicts with the built-in unicode() function/type
_unicode = to_unicode
try:
import curses
except ImportError:
curses = None
def _stderr_supports_color():
color = False
if curses and sys.stderr.isatty():
try:
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except Exception:
pass
return color
class LogFormatter(logging.Formatter):
"""Log formatter used in Tornado.
Key features of this formatter are:
* Color support when logging to a terminal that supports it.
* Timestamps on every log line.
* Robust against str/bytes encoding problems.
This formatter is enabled automatically by
`tornado.options.parse_command_line` (unless ``--logging=none`` is
used).
"""
def __init__(self, color=True, *args, **kwargs):
logging.Formatter.__init__(self, *args, **kwargs)
self._color = color and _stderr_supports_color()
if self._color:
fg_color = (curses.tigetstr("setaf") or
curses.tigetstr("setf") or "")
if (3, 0) < sys.version_info < (3, 2, 3):
fg_color = unicode_type(fg_color, "ascii")
self._colors = {
logging.DEBUG: unicode_type(curses.tparm(fg_color, 4), # Blue
"ascii"),
logging.INFO: unicode_type(curses.tparm(fg_color, 2), # Green
"ascii"),
logging.WARNING: unicode_type(curses.tparm(fg_color, 3), # Yellow
"ascii"),
logging.ERROR: unicode_type(curses.tparm(fg_color, 1), # Red
"ascii"),
}
self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii")
def format(self, record):
try:
record.message = record.getMessage()
except Exception as e:
record.message = "Bad message (%r): %r" % (e, record.__dict__)
assert isinstance(record.message, basestring_type) # guaranteed by logging
prefix = ''
def safe_unicode(s):
try:
return _unicode(s)
except UnicodeDecodeError:
return repr(s)
if self._color:
formatted = prefix + self._colors.get(record.levelno, self._normal) + safe_unicode(record.message)
else:
formatted = prefix + safe_unicode(record.message)
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
lines = [formatted.rstrip()]
lines.extend(safe_unicode(ln) for ln in record.exc_text.split('\n'))
formatted = '\n'.join(lines)
return formatted.replace("\n", "\n ")
############################################################
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO if IS_PROD else logging.DEBUG)
channel = logging.StreamHandler()
channel.setFormatter(LogFormatter())
logger.addHandler(channel)
ROOT_DIR = dirname(os.path.abspath(__file__))
BUILDOUT_DIR = join(ROOT_DIR, 'buildout')
BUILDOUT_BIN_DIR = join(BUILDOUT_DIR, 'bin')
if sys.platform == 'win32':
BUILDOUT_CMD = join(BUILDOUT_BIN_DIR, 'buildout-script.py')
else:
BUILDOUT_CMD = join(BUILDOUT_BIN_DIR, 'buildout')
BUILDOUT_CFG = join(BUILDOUT_DIR, 'buildout.cfg')
BUILDOUT_PROD_CFG = join(BUILDOUT_DIR, 'for_release.cfg')
DOWNLOAD_DIR = join(BUILDOUT_DIR, join('downloads' if IS_PROD else 'downloads_dev', 'dist'))
_interpreter = 'python{0}'.format('_dev' if os.path.exists(join(BUILDOUT_BIN_DIR, 'python_dev')) else '')
python_interpreter = '{0}{1}'.format(_interpreter, '-script.py' if sys.platform == 'win32' else '')
buildout_python = join(BUILDOUT_BIN_DIR, python_interpreter)
def is_bitnami_env():
return sys.executable.endswith('.python.bin')
def get_executable_python():
import sys
if is_bitnami_env():
executable_python = 'python'
else:
executable_python = sys.executable
return executable_python
def splitter(split):
logger.info(split * 60)
def print_error():
logger.error(",------.,------. ,------. ,-----. ,------.")
logger.error("| .---'| .--. '| .--. '' .-. '| .--. '")
logger.error("| `--, | '--'.'| '--'.'| | | || '--'.'")
logger.error("| `---.| |\ \ | |\ \ ' '-' '| |\ \ ")
logger.error("`------'`--' '--'`--' '--' `-----' `--' '--'")
def print_project_name():
#http://patorjk.com/software/taag/#p=display&f=Soft&t=ATRR
logger.warn(" ,---. ,--------.,------. ,------.")
logger.warn(" / O \\'--. .--'| .--. '| .--. '")
logger.warn("| .-. | | | | '--'.'| '--'.'")
logger.warn("| | | | | | | |\ \ | |\ \\")
logger.warn("`--' `--' `--' `--' '--'`--' '--'")
# 1. if < 0.7, uninstall the old version
# 2. install the latest version
# python ez_setup.py --download-base=setuptools-1.1.6.tar.gz --user
def _check_setuptools_version():
splitter('-')
logger.info('[CHECKING Setuptools]')
splitter('-')
msg = 'Dependent Setuptools version for {1} installation. [{0}]'
try:
import setuptools
version = setuptools.__version__
if version < '0.7':
_install_setuptools()
else:
logger.info(msg.format('OK', PROJECT_NAME))
except ImportError:
logger.error(msg.format('failed', PROJECT_NAME))
logger.error('Setuptools is not installed')
logger.info('Prepare install setuptools...')
_install_setuptools()
logger.info(' ')
def _install_setuptools():
#import site; site.getsitepackages()
#['/usr/local/lib/python2.7/dist-packages', '/usr/lib/python2.7/dist-packages']
#setuptools_package = join(ROOT_DIR, join('utils', 'setuptools-1.1.6.tar.gz'))
util_dir = join(ROOT_DIR, 'utils')
command = r"cd utils " \
" && " \
"{0} {1} " \
"--download-base={2}".format(get_executable_python(),
join(util_dir, 'ez_setup.py'),
util_dir + os.sep)
if sys.platform in ['linux2', 'darwin']:
# if python is in VirtualEnv, ignore the user level
if not hasattr(sys, 'real_prefix'):
command += ' --user'
logger.debug('[command]{0}'.format(command))
subprocess.check_call(command, shell=True)
def _check_command(cmd):
if sys.platform in ['linux2', 'darwin']:
splitter('-')
logger.info('[CHECKING {0}]'.format(cmd))
splitter('-')
msg = 'Dependent {1} for {2} installation. [{0}]'
command = ['which', cmd]
try:
result = subprocess.call(command)
except subprocess.CalledProcessError:
result = -1
if result == 0:
logger.info(msg.format('OK', cmd, PROJECT_NAME))
else:
logger.error(msg.format('failed', cmd, PROJECT_NAME))
print_error()
logger.info('Install {0} as prerequisite first.'.format(cmd))
exit(0)
logger.info(' ')
def _check_make():
_check_command('make')
def _check_gcc():
_check_command('gcc')
def _check_g2plus():
_check_command('g++')
def _check_space_in_cur_dir():
#todo: Support the path with space in next version
cur_dir = dirname(abspath(__file__))
if cur_dir.find(' ') > 0:
print_error()
logger.error('Please make sure {0}\'s root path does NOT have SPACE'.format(PROJECT_NAME))
exit(0)
def check_prerequisites():
splitter('=')
logger.info('[CHECKING PREREQUISITIONS]')
splitter('=')
_check_make()
_check_gcc()
_check_g2plus()
_check_setuptools_version()
_check_space_in_cur_dir()
#todo: check if mysql existed on Posix compatible platform
# check /usr/bin/mysql_config existed
#todo: Package the Mysql-python lib with distribute
#todo: test installing the reportlab without network connection
logger.info(' ')
def is_python_version_valid():
min_version = '2.6.5'
version = sys.version
if version < min_version:
splitter('-')
print_error()
logger.error('####### ERROR: PYTHON VERSION IS TOO LOW #########')
logger.error('Python version is too low, please use >%s version' % min_version)
logger.error('Quit.')
return False
return True
def check_admin_right():
"""
:check_Permissions
echo Administrative permissions required. Detecting permissions...
net session >nul 2>&1
if %errorLevel% == 0 (
echo Success: Administrative permissions confirmed.
) else (
echo Failure: Current permissions inadequate.
)
pause >nul
"""
if sys.platform == 'win32':
msg = 'The Admin Permission. [{0}]'
check_script = join(ROOT_DIR, join('utils', 'check_Permissions.bat'))
#logger.debug(check_script)
splitter('=')
logger.info('[CHECKING ADMIN PERMISSION]')
splitter('=')
rtn_code = subprocess.check_output(check_script)
#logger.error(rtn_code)
if rtn_code is not None and rtn_code[0] == '0':
logger.info(msg.format('OK'))
logger.info('\n')
else:
logger.error(msg.format('failed'))
print_error()
logger.error('####### ERROR: ADMINISTRATOR PRIVILEGES REQUIRED #########')
logger.error('Please open command terminal with Administrative permission!!!')
logger.error('\n')
exit(0)
def platform_validation():
info_list = platform.uname()
logger.info('[PLATFORM] {0}'.format(info_list[0]))
logger.info(' ')
def wait_for_required_bin(app_name):
#build_out_path = os.path.join(BUILDOUT_BIN_DIR, appName)
while True:
if os.path.exists(app_name):
break
else:
time.sleep(5)
def run_buildout(task=None):
splitter('=')
logger.info("[BUILDOUT RUNNING]")
splitter('=')
wait_for_required_bin(BUILDOUT_CMD)
buildout_cfg_file = BUILDOUT_PROD_CFG if IS_PROD else BUILDOUT_CFG
command = [get_executable_python(), BUILDOUT_CMD, ]
if task is None:
command.extend(['-c',
buildout_cfg_file])
else:
command.extend(['install', task,
'-c', buildout_cfg_file])
subprocess.call(command, shell=True if sys.platform == 'win32' else False)
logger.info('[command] {0}'.format(' '.join(command)))
logger.info('\n')
def setup_buildout_env():
splitter('=')
logger.info('[BOOTSTRAP RUNNING]')
splitter('=')
logger.info('Setup the BUILDOUT environment...')
logger.info(' ')
_download_dir = 'downloads' if IS_PROD else 'downloads_dev'
if not os.path.exists(DOWNLOAD_DIR):
logger.info('{0} folder not existed, create new one...'.format(_download_dir))
os.makedirs(DOWNLOAD_DIR)
logger.debug('Copying the zc.buildout package to download folder to setup env.')
zc_buildout_name = 'zc.buildout-2.2.1.tar.gz'
zc_buildout_package = join(ROOT_DIR, join('utils', zc_buildout_name))
shutil.copy(zc_buildout_package, join(DOWNLOAD_DIR, zc_buildout_name))
buildout_conf_file = BUILDOUT_PROD_CFG if IS_PROD else BUILDOUT_CFG
command = [
get_executable_python(),
join(BUILDOUT_DIR, 'bootstrap.py'),
'-c', buildout_conf_file,
'-f', DOWNLOAD_DIR,
'-v', '2.2.1',
]
logger.debug('[command]'+' '.join(command))
subprocess.call(command)
logger.debug('\n')
def gen_key_by_proj_name(key, _project_name=None):
if key is None or key == '':
raise ValueError("{0} can't be None or empty")
if _project_name is None:
project_name = os.environ['QT_PROJ_NAME']
else:
project_name = _project_name
return '{0}_{1}'.format(project_name, key.upper())
def set_env_vars():
os.environ['QT_PROJ_NAME'] = PROJECT_NAME
os.environ[gen_key_by_proj_name('HOME')] = dirname(abspath(__file__))
os.environ[gen_key_by_proj_name('IS_PROD')] = str(IS_PROD)
def create_buildout_env():
if os.path.exists(buildout_python):
subprocess.call([get_executable_python(),
buildout_python,
'deploy.py',
'-k'],
shell=True if sys.platform == 'win32' else False)
if sys.platform == 'win32':
subprocess.call(['cls'], shell=True)
else:
subprocess.call('clear')
splitter('*')
splitter('*')
print_project_name()
logger.warn(' ' * 32 + '{0} [{1}] INSTALLATION {2} '.format(PROJECT_NAME,
PROJECT_VERSION,
'(dev mode)' if not IS_PROD else ''))
splitter('*')
splitter('*')
time.sleep(3)
logger.debug(' ')
platform_validation()
#check_admin_right()
if is_python_version_valid():
check_prerequisites()
setup_buildout_env()
run_buildout()
logger.info('\n')
else:
exit(0)
def run_deploy():
if os.path.exists(buildout_python):
subprocess.call([get_executable_python(), buildout_python, 'deploy.py'],
shell=True if sys.platform == 'win32' else False)
else:
logger.info('\n')
splitter('*')
print_error()
logger.info('Product running environment building failed.')
splitter('*')
logger.info('\n')
if __name__ == '__main__':
usage = '''\
[DESIRED PYTHON FOR APPLICATION] deploy.py [options]
Bootstraps {0} django application.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
'''.format(PROJECT_NAME)
parser = OptionParser(usage=usage)
parser.add_option("-b", "--build_env",
dest="build_env",
action="store_true",
default=False,
help="Setup buildout environment")
parser.add_option("-i", "--init",
dest="init",
action="store_true",
default=False,
help="Init buildout settings")
parser.add_option("-s", "--start",
dest="start",
action="store_true",
default=False,
help="Start app server(s)")
parser.add_option("-e", "--restart",
dest="restart",
action="store_true",
default=False,
help="Restart app and web server(s)")
parser.add_option("-k", "--stop",
dest="stop",
action="store_true",
default=False,
help="Stop host server")
parser.add_option("-a", "--stop-all",
dest="stop_all",
action="store_true",
default=False,
help="Stop all servers")
parser.add_option("-u", "--upgrade",
dest="upgrade",
action="store_true",
default=False,
help="Upgrade")
options, args = parser.parse_args()
set_env_vars()
if options.build_env is not None and options.build_env:
create_buildout_env()
elif options.init is not None and options.init:
create_buildout_env()
run_deploy()
else:
from qt.deploy.deploy import Deploy
deploy = Deploy()
if options.upgrade is not None and options.upgrade:
deploy.upgrade()
if options.start is not None and options.start:
deploy.splitter()
deploy.start_default_app_server()
#deploy.splitter()
#deploy.start_all_user_app_servers()
deploy.splitter()
deploy.start_nginx_server()
deploy.splitter()
elif options.restart is not None and options.restart:
#deploy.splitter()
#deploy.kill_all_app_servers()
deploy.splitter()
deploy.kill_all_app_servers()
deploy.splitter()
deploy.stop_default_app_server()
deploy.start_default_app_server()
#deploy.splitter()
#deploy.start_all_user_app_servers()
deploy.splitter()
deploy.restart_nginx_server()
deploy.splitter()
host_name, port, ip = (deploy.helper.get_host_name(),
deploy.get_web_server_port(),
deploy.helper.get_host_ip())
logger.info('\n')
deploy.splitter('*')
deploy.splitter('-')
logger.info("Service is up now")
deploy.splitter('-')
logger.info(' ')
logger.info("- Open one of following address in browser to visit the application.")
logger.info(' http://%s:%s' % (host_name, port))
logger.info(' http://%s:%s' % (deploy.helper.get_host_ip(), deploy.get_web_server_port()))
logger.info(' ')
deploy.splitter('*')
elif options.stop is not None and options.stop:
deploy.splitter()
deploy.kill_all_app_servers()
deploy.stop_default_app_server()
deploy.splitter()
deploy.stop_nginx_server()
deploy.splitter()
elif options.stop_all is not None and options.stop_all:
deploy.splitter()
deploy.kill_all_app_servers()
deploy.stop_default_app_server()
deploy.kill_all_app_servers()
deploy.splitter()
deploy.stop_nginx_server()
deploy.splitter()
else:
deploy.deploy()
#try:
#except Exception:
# raise ImportError('qt.deploy.deploy not existed')
| {
"content_hash": "52e168d3ac84f00b4f016e06fa759078",
"timestamp": "",
"source": "github",
"line_count": 663,
"max_line_length": 110,
"avg_line_length": 29.553544494720967,
"alnum_prop": 0.5453199959171174,
"repo_name": "seawaywen/memodir_skels",
"id": "9f86cdfed21c33dbfde5ea9c05b74a7532a9dade",
"size": "19596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "memodir/skels/templates/buildout_project/deploy.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "533966"
},
{
"name": "JavaScript",
"bytes": "1295964"
},
{
"name": "Python",
"bytes": "49246"
},
{
"name": "Shell",
"bytes": "140"
}
],
"symlink_target": ""
} |
import os
from ec2 import lookup
from ec2.base.action import Action, CLMException
from ec2.error import InvalidAction, MethodNotAllowed, InvalidArgument, InvalidURI, EC2Exception
from ec2.settings import BUCKETS_PATH
"""@package src.base.s3action
S3 base action
@copyright Copyright (c) 2012 Institute of Nuclear Physics PAS <http://www.ifj.edu.pl/>
@author Łukasz Chrząszcz <[email protected]>
"""
class S3Action(object):
"""Superclass for S3 API actions."""
def __init__(self, parameters):
self.parameters = parameters
path = self.parameters['path_info']
user_name = self.parameters['authorization'].split(' ')[1].split(':')[0]
path = user_name + '/' + path
if path.find('..') != -1:
raise InvalidURI
path = os.path.normpath(path) # is it required?
if path.startswith('/'):
path = path[1:]
slash = path.find('/')
if slash != -1:
bucket_name = path[:slash + 1]
else:
bucket_name = path
normpath = os.path.join(BUCKETS_PATH, path)
self.path = normpath
self.bucket_name = bucket_name
def __new__(cls, parameters):
"""Return an object of a concrete S3 action class.
Args:
parameters <dict> of the action
cluster_manager <ClusterManager> the action will be run at
"""
concrete_class = None
if cls == S3Action:
if parameters['query_string']:
raise MethodNotAllowed()
path = os.path.normpath(parameters['path_info'])
if path.startswith('/'):
path = path[1:]
if path.endswith('/'):
path = path[:-1]
path_parts = path.split('/')
if len(path_parts) == 1: # bucket
if parameters['request_method'] == 'GET' or parameters['request_method'] == 'HEAD':
concrete_class_name = 'ListBucket'
else:
if parameters['request_method'] == 'PUT':
concrete_class_name = 'PutObject'
if parameters['request_method'] == 'GET':
concrete_class_name = 'GetObject'
for concrete_class in cls.__subclasses__():
if concrete_class.__name__ == concrete_class_name:
break
else:
concrete_class = cls
action = super(S3Action, cls).__new__(concrete_class, parameters)
action.concrete_class_name = concrete_class_name
return action
def _get_template(self):
name = '%s.xml' % self.concrete_class_name
return lookup.get_template(name)
def execute(self):
context = self._execute()
# if body is dict then parse it to xml
if context['body'].__class__ is dict:
template = self._get_template()
response = template.render(**context['body'])
else:
# if it isn't dict then pass that object directly
response = context['body']
result = {'body': response,
'headers': context.get('headers')}
return result
| {
"content_hash": "295f67179dce35e3aadda1b414157672",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 99,
"avg_line_length": 31.475247524752476,
"alnum_prop": 0.5555206039635106,
"repo_name": "cc1-cloud/cc1",
"id": "1483ea8473547e9dbeadfd41a564a6a20b2963fe",
"size": "3878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ec2/base/s3action.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "63829"
},
{
"name": "HTML",
"bytes": "323260"
},
{
"name": "JavaScript",
"bytes": "458924"
},
{
"name": "Python",
"bytes": "1466456"
},
{
"name": "Shell",
"bytes": "14317"
}
],
"symlink_target": ""
} |
#!/opt/local/bin/python2.5
import os, re, base64
image_dir='../../Source/images'
dst='../../Source/images/MifImage.js'
dst_ie='../../Source/images/MifImage.mht'
data={}
for filename in os.listdir(image_dir):
if not re.search('\.(gif|png|jpg)$', filename):
continue
else:
file=os.path.join(image_dir, filename)
data[filename]=base64.b64encode(open(file).read())
result='/*\nContent-Type: multipart/related; boundary="SEPARATOR"\n\n*/\n'
result+="var MifImage={\n"
for filename in data:
header="""
--SEPARATOR
Content-Type:image/%s
Content-Location:%s
Content-Transfer-Encoding:base64
*/
""" % (filename[-3:], filename)
result+="\n\t'"+filename+"': /*\n"+header+"\n'"+data[filename]+"',\n"
result=result[0:-2]+"\n"
result+="\n}\n"
result+="\n/*\n--SEPARATOR--\n*/\n"
open(dst, 'w').write(result)
| {
"content_hash": "f1567b62cd904588358bba6a0a53cfb8",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 74,
"avg_line_length": 26.06451612903226,
"alnum_prop": 0.6608910891089109,
"repo_name": "creaven/mif",
"id": "b81b23471edee2ff75daab2949d6b09b9999c827",
"size": "808",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assets/scripts/img2base64.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "458911"
},
{
"name": "PHP",
"bytes": "5334"
}
],
"symlink_target": ""
} |
from docopt_subcommands import command, main
# We need to provide a new top-level documentation template which contains
# our --verbose option.
DOC_TEMPLATE = """{program}
Usage: {program} [options] <command> [<args> ...]
Options:
-h --help Show this screen.
-v --version Show the program version.
--verbose Use verbose output
Available commands:
{available_commands}
See '{program} <command> -h' for help on specific commands.
"""
# Use the `command` decorator to add subcommand functions.
@command()
def foo_handler(precommand_args, args):
"""usage: {program} foo <name>
Apply foo to a name.
"""
if precommand_args['--version']:
print('version!')
return
if precommand_args['--verbose']:
print('[verbose mode]')
print("Foo, {}".format(args['<name>']))
@command()
def bar_handler(precommand_args, args):
"""usage: {program} bar [options] <name>
Apply bar to a name.
Options:
--fnord Insert a fnord
"""
if precommand_args['--version']:
print('version!')
return
if precommand_args['--verbose']:
print('[verbose mode]')
print("Bar, {}".format(args['<name>']))
if args['--fnord']:
print('fnord')
# 5. Pass out Subcommands subclass instance to `main()`.
main(program='example', doc_template=DOC_TEMPLATE)
| {
"content_hash": "f3eb2d9659d1c8276dd2ccaa405cfe72",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 74,
"avg_line_length": 23.736842105263158,
"alnum_prop": 0.6245380635624538,
"repo_name": "abingham/docopt-subcommands",
"id": "d397ebe914426e3b9e166e03fdda4769bb657178",
"size": "1483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/precommand_options.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12971"
}
],
"symlink_target": ""
} |
"""
Code containing Growler's asyncio.Protocol code for handling all
streaming (TCP) connections.
This module has a 'hard' dependency on asyncio, so if you're using a
diffent event loop (for example, curio) then you should NOT be using
this class.
Alternative Protocol classes may use this as an example.
For more information, see the :module:`growler.responder` module
for event-loop independent client handling.
"""
from typing import Callable
import asyncio
import logging
from growler.responder import GrowlerResponder, ResponderHandler
ResponderFactoryType = Callable[['GrowlerProtocol'], GrowlerResponder]
logger = logging.getLogger(__name__)
class GrowlerProtocol(asyncio.Protocol, ResponderHandler):
"""
The 'base' protocol for handling all requests made to a growler
application.
This implementation internally uses a stack of 'responder'
objects, the top of which will receive incoming client data via
the `on_data` method.
This design provides a way to temporarily (or permanently) modify
the server's behavior.
To change behavior when a client has already connected, such as
during an HTTP upgrade or to support starttls encryption, simply
add a new responder to the protocol's stack.
If all responders are removed, the :method:`handle_error` method
will be called with the IndexError exception.
This method is not implemented by default and SHOULD be
implemented in all subclasses.
Because of this delegate-style design, the user should NOT
overload the :method:`data_received` method when creating a
subclass of GrowlerProtocol.
To simplify the creation of the initial responder, a factory (or
simply the type/constructor) is passed to the GrowlerProtocol
object upon construction.
This factory is run when 'connection_made' is called on the
protocol (which should happen immediately after construction).
It is recommended that subclasses of :class:`GrowlerProtocol`
specify a particular *default responder* by setting the keyword
argument, `responder_factory`, in a call to super().__init__().
Two methods, :method:`factory` and :method:`get_factory`, are
provided to make the construction of servers 'easy', without the
need for lambdas.
If you have a subclass:
.. code:: python
class GP(GrowlerProtocol):
...
you can create a server easy using this protocol via:
.. code:: python
asyncio.get_event_loop().create_server(GP.factory, ...)
or
.. code:: python
asyncio.get_event_loop().create_server(GP.get_factory('a','b'), ...)
arguments passed to get_factory in the later example are
forwarded to the protocol constructor (called whenever a client
connects).
Note, calling GP.factory() will not work as `create_server`
expects the factory and *not an instance* of the protocol.
"""
def __init__(self, _loop, responder_factory: ResponderFactoryType):
"""
Args:
responder_factory (callable): Returns the first responder
for this protocol.
This could simply be a constructor for the type (i.e.
the type's name).
This function will only be passed the protocol object.
The event loop should be aquired from the protocol via
the 'loop' member.
The responder returned only needs to have a method
defined called 'on_data' which gets passed the bytes
received.
Note: 'on_data' should only be a function and NOT a
coroutine.
"""
from typing import List, Optional
self.make_responder = responder_factory
self.log = logger.getChild("id=%x" % id(self))
self.responders: List[GrowlerResponder] = []
self.transport = None
self.is_done_transmitting = False
def connection_made(self, transport: asyncio.BaseTransport):
"""
(asyncio.Protocol member)
Called upon when there is a new socket connection.
This creates a new responder (as determined by the member
'responder_type') and stores in a list.
Incoming data from this connection will always call on_data
to the last element of this list.
Args:
transport (asyncio.Transport): The Transport handling the
socket communication
"""
self.transport = transport
self.responders.append(self.make_responder(self))
try:
good_func = callable(self.responders[0].on_data)
except AttributeError:
good_func = False
if not good_func:
err_str = "Provided responder MUST implement an 'on_data' method"
raise TypeError(err_str)
self.log.info("Connection from %s:%d",
self.remote_hostname, self.remote_port)
def connection_lost(self, exc):
"""
(asyncio.Protocol member)
Called upon when a socket closes.
This class simply logs the disconnection
Args:
exc (Exception or None): Error if connection closed
unexpectedly, None if closed cleanly.
"""
if exc:
self.log.error("connection_lost %r", exc)
else:
self.log.info("connection_lost")
def data_received(self, data):
"""
(asyncio.Protocol member)
Called upon when there is new data to be passed to the
protocol.
The data is forwarded to the top of the responder stack (via
the on_data method).
If an excpetion occurs while this is going on, the Exception
is forwarded to the protocol's handle_error method.
Args:
data (bytes): Bytes from the latest data transmission
"""
try:
self.responders[-1].on_data(data)
except Exception as error:
self.handle_error(error)
def eof_received(self):
"""
(asyncio.Protocol member)
Called upon when the client signals it will not be sending
any more data to the server.
Default behavior is to simply set the `is_done_transmitting`
property to True.
"""
self.is_done_transmitting = True
self.log.info("eof_received")
def handle_error(self, error):
"""
An error handling function which will be called when an error
is raised during a responder's :method:`on_data()` function.
There is no default functionality and all subclasses SHOULD
overload this.
Args:
error (Exception): The exception raised from the code
"""
raise NotImplementedError(error)
@classmethod
def factory(cls, *args, **kw):
"""
A class function which simply calls the constructor.
Useful for explicity stating that this is a factory.
All arguments are forwarded to the constructor.
"""
return cls(*args, **kw)
@classmethod
def get_factory(cls, *args, **kw):
"""
A class function which returns a runnable which calls the
factory function (i.e. the constructor) of the class with
the arguments provided.
This should makes it easy to bind `GrowlerProtocol`
construction explicitly.
All arguments are forwarded to the constructor.
"""
from functools import partial
return partial(cls.factory, *args, **kw)
# clean namespace
del Callable
| {
"content_hash": "568ce2bbbed5903bb93f71479b298a6f",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 77,
"avg_line_length": 35.306976744186045,
"alnum_prop": 0.647740745619813,
"repo_name": "akubera/Growler",
"id": "6ce163363a9b820ce45ec3a66ff04edc508db485",
"size": "7621",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "growler/aio/protocol.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "249705"
}
],
"symlink_target": ""
} |
import base_solver as base
import game
from lib import helpers
import numpy as np
import redis
r = redis.StrictRedis(host='localhost', port=6379, db=0)
STATE_MISS = 0
STATE_HIT = 1
STATE_UNKNOWN = 2
SHIP_SIZES = helpers.SHIP_SIZES
class OpenCLSolver(base.BaseSolver):
def __init__(self):
super(OpenCLSolver,self).__init__()
current_state = np.empty([10,10]).astype(np.uint8)
current_state.fill(STATE_UNKNOWN);
def mark_tile_used(self,tile):
self.remaining_tiles.remove(tile)
def get_next_target_random(self):
ret = self.tiles[self.turn]
self.turn+=1
return ret
def get_next_target(self,misses,hits):
state = self.get_state(hits,misses)
target = self.get_from_cache(state)
if target:
return self.string_coordinates_to_array(target)
shipBoards = helpers.get_ship_boards(misses,matrix=True)
s12 = helpers.shortInterpolate(shipBoards[0],shipBoards[1],9)
s123 = helpers.shortInterpolate(s12,shipBoards[2],12)
s45 = helpers.shortInterpolate(shipBoards[3],shipBoards[4],5)
print "Combinations to Compute: " + str(len(s123)*len(s45))
target = helpers.opencl_interpolate(helpers.bool2IntArray(s123),helpers.bool2IntArray(s45),hits)
self.set_to_cache(state,target)
return self.int_coordinates_to_array(target)
def get_state(self,hits,misses):
state = np.copy(hits)
for miss in misses:
state[miss[0]][miss[1]] = 2
print state
return state
def get_from_cache(self,state):
#return None
key = ""
for row in state:
key += "".join(map(str,row))
#print key
return r.get(key)
def set_to_cache(self,state,target):
key = ""
for row in state:
key += "".join(map(str,row))
r.set(key,target)
def string_coordinates_to_array(self,coord):
return [int(coord[0]),int(coord[1])]
def int_coordinates_to_array(self,coord):
return [int(coord / 10),int(coord % 10)]
def play_game(bs_game,solver):
limit = 100
misses = []
hits = np.zeros([10,10],dtype=np.uint8)
for turn in xrange(limit):
print misses
tile = solver.get_next_target(misses,hits)
print tile
ret = bs_game.play_turn(tile)
#solver.mark_tile_used(tile)
print ret
if (ret["code"] == STATE_MISS):
misses.append(tile)
if (ret["code"] == STATE_HIT):
x,y = tile
hits[x][y] = 1
if (ret["code"] == -1):
print(turn +1)
return
solver = OpenCLSolver();
rounds = 1
for x in xrange(rounds):
bs_game = game.BattleshipGame()
solver.reset()
play_game(bs_game,solver)
| {
"content_hash": "7882db69f1315e3a64ea67cd9e62c9bd",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 98,
"avg_line_length": 22.172727272727272,
"alnum_prop": 0.6859368593685937,
"repo_name": "nicofff/baas",
"id": "3857d93c16994e18b3ab24cb6d284086c1210cf7",
"size": "2439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "solvers/opencl_solver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "11993"
},
{
"name": "Python",
"bytes": "19398"
}
],
"symlink_target": ""
} |
from functools import wraps
from sqlalchemy.orm.collections import InstrumentedList as _InstrumentedList
from .arrow import ArrowType # noqa
from .choice import Choice, ChoiceType # noqa
from .color import ColorType # noqa
from .country import CountryType # noqa
from .currency import CurrencyType # noqa
from .email import EmailType # noqa
from .encrypted import EncryptedType # noqa
from .ip_address import IPAddressType # noqa
from .json import JSONType # noqa
from .locale import LocaleType # noqa
from .ltree import LtreeType # noqa
from .password import Password, PasswordType # noqa
from .pg_composite import ( # noqa
CompositeArray,
CompositeType,
register_composites,
remove_composite_listeners
)
from .phone_number import ( # noqa
PhoneNumber,
PhoneNumberParseException,
PhoneNumberType
)
from .range import ( # noqa
DateRangeType,
DateTimeRangeType,
DateTimeTzRangeType,
IntRangeType,
NumericRangeType
)
from .scalar_list import ScalarListException, ScalarListType # noqa
from .timezone import TimezoneType # noqa
from .ts_vector import TSVectorType # noqa
from .url import URLType # noqa
from .uuid import UUIDType # noqa
from .weekdays import WeekDaysType # noqa
class InstrumentedList(_InstrumentedList):
"""Enhanced version of SQLAlchemy InstrumentedList. Provides some
additional functionality."""
def any(self, attr):
return any(getattr(item, attr) for item in self)
def all(self, attr):
return all(getattr(item, attr) for item in self)
def instrumented_list(f):
@wraps(f)
def wrapper(*args, **kwargs):
return InstrumentedList([item for item in f(*args, **kwargs)])
return wrapper
| {
"content_hash": "fab73d2563cc12f0fab583002685ab3f",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 76,
"avg_line_length": 29.775862068965516,
"alnum_prop": 0.7371163867979155,
"repo_name": "JackWink/sqlalchemy-utils",
"id": "209b3f29510ab9c38c3eab15fe54a6a07e94ec11",
"size": "1727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sqlalchemy_utils/types/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "531048"
}
],
"symlink_target": ""
} |
import copy
import inspect
import logging
import os
import sys
import uuid
logger = logging.getLogger(__name__)
class Fixture(object):
def __init__(self, context):
self.context = context
@property
def category(self):
return self.__module__.__name__.rsplit('.', 2)[-2].replace('test_', '')
@property
def description(self):
return '{0.uuid.hex}—{1}'.format(self, self.context.real_module)
@property
def name(self):
return 'test_' + self.__class__.__name__
def initialize(self):
pass
def setup(self):
pass
def check(self):
pass
def _execute(self):
executed = {
'setup': { id(Fixture.setup), },
'check': { id(Fixture.check), },
}
classes = list(reversed(self.__class__.mro()))[2:]
for cls in classes:
setup = getattr(cls, 'setup')
if id(setup) not in executed['setup']:
setup(self)
executed['setup'].add(id(setup))
self.run() # TODO implicit error checks
for cls in classes:
check = getattr(cls, 'check')
if id(check) not in executed['check']:
check(self)
executed['check'].add(id(check))
def register_fixture(namespace, base_classes, properties):
props = copy.deepcopy(properties)
desc = props.pop('description', None)
caller_frame = inspect.stack()[1]
caller_file = caller_frame[1]
caller_module = inspect.getmodule(caller_frame[0])
my_uuid = uuid.UUID(os.path.basename(caller_file).replace('.py', '').rsplit('_', 1)[-1])
class_name = 'f_' + my_uuid.hex
original_length = len(class_name)
count = 0
while class_name in namespace:
count += 1
class_name = class_name[:original_length] + '_' + str(count)
@property
def description(self):
_ = super(self.__class__, self).description
if desc is not None:
_ += '—' + desc
return _
def __init__(self, context):
super(self.__class__, self).__init__(context)
functions = {}
for name, value in props.items():
if name == 'error':
self.error = value['class'](*value.get('args', ()), **value.get('kwargs', {}))
continue
if inspect.isfunction(value):
functions[name] = value
continue
if inspect.isclass(value):
if issubclass(value, Fixture):
value = value(self.context)
else:
value = value()
setattr(self, name, value)
exc_info = None
function_count = float('inf')
while function_count > len(functions):
function_count = len(functions)
for name, function in copy.copy(functions).items():
try:
value = copy.deepcopy(function(self))
setattr(self, name, value)
except AttributeError:
exc_info = sys.exc_info()
continue
else:
del functions[name]
if len(functions):
logger.exception('unprocessed fixture properties: %s', ','.join(functions.keys()), exc_info = exc_info)
self.initialize()
namespace[class_name] = type(class_name, base_classes, {
'__init__': __init__,
'__module__': caller_module,
'description': description,
'uuid': my_uuid,
})
| {
"content_hash": "1f265b1fa9ce45a5a5dee3b930b5d169",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 115,
"avg_line_length": 25.52857142857143,
"alnum_prop": 0.5223838836038053,
"repo_name": "alunduil/muniments",
"id": "7ca3f37b8376baffd5b8fc2d23aa315274354487",
"size": "3787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_muniments/test_fixtures/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60386"
}
],
"symlink_target": ""
} |
from libcloud.autoscale.providers import get_driver as as_get_driver
from libcloud.autoscale.types import Provider as as_provider
ACCESS_ID = 'your access id'
SECRET_KEY = 'your secret key'
as_driver = as_get_driver(as_provider.AWS_AUTOSCALE)(ACCESS_ID, SECRET_KEY)
group = as_driver.list_auto_scale_groups()[0]
policy = as_driver.list_auto_scale_policies(group)[0]
as_driver.delete_auto_scale_policy(policy)
| {
"content_hash": "55fde14265c6c9404fc0353e96c1cdcc",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 75,
"avg_line_length": 37.45454545454545,
"alnum_prop": 0.7742718446601942,
"repo_name": "Cloud-Elasticity-Services/as-libcloud",
"id": "2fb70b4e83fcaed56adec796e44d1a055d09a617",
"size": "412",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "docs/examples/autoscale/aws/delete_auto_scale_policy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "Python",
"bytes": "3871921"
},
{
"name": "Shell",
"bytes": "13868"
}
],
"symlink_target": ""
} |
import base64
import hmac
import hashlib
import urllib, urllib2
import logging
import time
from datetime import datetime, tzinfo
try:
import xml.etree.ElementTree as ET
except ImportError:
import elementtree.ElementTree as ET
_log = logging.getLogger("fpys")
def _attr_name_from_tag(tag_name):
# some tag names have an XML namespace that we
# aren't really concerned with. This strips them:
tag_name = tag_name[tag_name.find("}")+1:]
# Then we lowercase the first letter
return tag_name[0].lower() + tag_name[1:]
class Error(Exception):
"""Base class for exceptions in this module."""
pass
class AmazonError(Error):
"""Error returned by amazon"""
def __init__(self, response):
if isinstance(response.errors, list):
Error.__init__(self, "%s: %s" % (response.errors[0].errorCode, response.errors[0].reasonText))
else:
Error.__init__(self, "%s: %s" % (response.errors.error.code, response.errors.error.message))
self.response = response
class FPSResponse(object):
def __init__(self, element=None):
if element is not None:
if isinstance(element, str):
element = ET.fromstring(element)
self.element = element
for child in element.getchildren():
if len(child.getchildren()) ==0:
value = child.text
if child.tag.find("Date") >= 0:
# TODO this is a little less than ideal
# we truncate the milliseconds and time zone info
value = value[0:value.find(".")]
value = datetime.strptime(value,
"%Y-%m-%dT%H:%M:%S")
if child.tag == "Amount":
value = float(child.text)
if child.tag.find("Size") >= 0:
value = int(child.text)
setattr(self, _attr_name_from_tag(child.tag), value)
else:
if child.tag == "Errors" and child.getchildren()[0].tag == "Errors":
self.errors = []
for e in child.getchildren():
self.errors.append(FPSResponse(e))
elif child.tag =="Transactions":
if not hasattr(self, "transactions"):
self.transactions = []
self.transactions.append(FPSResponse(child))
else:
setattr(self, _attr_name_from_tag(child.tag), FPSResponse(child))
if hasattr(self, "status"):
self.success = (self.status == "Success")
if hasattr(self, "transactionResponse"):
setattr(self, "transaction", self.transactionResponse)
delattr(self, "transactionResponse")
if hasattr(self, "errors"):
raise AmazonError(self)
class FlexiblePaymentClient(object):
def __init__(self, aws_access_key_id, aws_secret_access_key,
fps_url="https://fps.sandbox.amazonaws.com",
pipeline_url="https://authorize.payments-sandbox.amazon.com/cobranded-ui/actions/start"):
self.access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key
self.fps_url = fps_url
self.fps_host = fps_url.split("://")[1].split("/")[0]
self.pipeline_url = pipeline_url
self.pipeline_path = pipeline_url.split("amazon.com")[1]
self.pipeline_host = pipeline_url.split("://")[1].split("/")[0]
def sign_string(self, string, hashfunc):
"""
Strings going to and from the Amazon FPS service must be cryptographically
signed to validate the identity of the caller.
Sign the given string with the aws_secret_access_key using the SHA1 algorithm,
Base64 encode the result and strip whitespace.
"""
_log.debug("to sign: %s" % string)
sig = base64.encodestring(hmac.new(self.aws_secret_access_key,
string,
hashfunc).digest()).strip()
_log.debug(sig)
return(sig)
def get_signature(self, parameters, path=None, http_verb='GET', http_host=None, hashfunc=hashlib.sha256):
"""
Returns the signature for the Amazon FPS Pipeline request that will be
made with the given parameters. Pipeline signatures are calculated with
a different algorithm from the REST interface. Names and values are
url encoded and separated with an equal sign, unlike the REST
signature calculation.
"""
if path is None:
path = self.pipeline_path
if http_host is None:
http_host = self.pipeline_host
keys = parameters.keys()
keys.sort()
parameters_string = "&".join(["%s=%s" % (urllib.quote(k), urllib.quote(str(parameters[k])).replace("/", "%2F")) for k in keys])
signature_base_string = "\n".join([http_verb, http_host, path, parameters_string])
_log.debug(signature_base_string)
return self.sign_string(signature_base_string, hashfunc)
def execute(self, parameters, sign=True):
"""
A generic call to the FPS service. The parameters dictionary
is sorted, signed, and turned into a valid FPS REST call.
The response is read via urllib2 and parsed into an FPSResponse object
"""
# Throw out parameters that == None
parameters = dict([(k,v) for k,v in parameters.items() if v != None])
parameters['Timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
parameters['Version'] = '2008-09-17'
if sign:
parameters['AWSAccessKeyId'] = self.access_key_id
parameters['SignatureVersion'] = 2
parameters['SignatureMethod'] = 'HmacSHA256'
parameters['Signature'] = self.get_signature(parameters, path='/', http_host=self.fps_host)
query_str = urllib.urlencode(parameters)
_log.debug("request_url == %s/?%s" % (self.fps_url, query_str))
data = None
try:
response = urllib2.urlopen("%s/?%s" % (self.fps_url, query_str))
data = response.read()
response.close()
except urllib2.HTTPError, httperror:
data = httperror.read()
httperror.close()
return FPSResponse(ET.fromstring(data))
def cancel(self, transaction_id, description=None):
params = {'Action': 'Cancel',
'TransactionId': transaction_id,
'Description': description}
return self.execute(params)
def cancel_token(self, token_id, reason=None):
params = {'Action': 'CancelToken',
'TokenId': token_id,
'ReasonText': reason}
return self.execute(params)
def get_receipient_verification_status(self, token_id):
params = {'Action': 'GetReceipientVerificationStatus',
'RecipientTokenID': token_id}
return self.execute(params)
def get_transaction_status(self, transaction_id):
params = {'Action': 'GetTransactionStatus',
'TransactionId': transaction_id}
return self.execute(params)
def get_pipeline_url(self,
caller_reference,
payment_reason,
transaction_amount,
return_url,
pipeline_name="SingleUse",
recurring_period=None
):
"""Gets the URL for making a co-branded service request, like in this Java
code:
http://docs.amazonwebservices.com/AmazonFPS/latest/FPSGettingStartedGuide/index.html?gsMakingCoBrandedUIRequests.html#d0e1242
"""
parameters = {'callerReference': caller_reference,
'paymentReason': payment_reason,
'transactionAmount': transaction_amount,
'callerKey': self.access_key_id,
'pipelineName': pipeline_name,
'returnURL': return_url,
'signatureVersion': 2,
'signatureMethod': 'HmacSHA256'
}
if recurring_period is not None:
parameters['recurringPeriod'] = recurring_period
parameters['signature'] = self.get_signature(parameters)
query_string = urllib.urlencode(parameters)
url = "%s?%s" % (self.pipeline_url, query_string)
_log.debug(url)
return url
def get_token_by_caller(self, token_id=None, caller_reference=None):
params = {'Action': 'GetTokenByCaller',
'CallerReference': caller_reference,
'TokenId': token_id}
return self.execute(params)
def pay(self,
sender_token,
amount,
caller_reference,
recipient_token=None,
caller_description = None,
charge_fee_to='Recipient'):
params = {'Action': 'Pay',
'SenderTokenId': sender_token,
'RecipientTokenId': recipient_token,
'TransactionAmount.Value': amount,
'TransactionAmount.CurrencyCode': 'USD',
'CallerReference': caller_reference,
'CallerDescription': caller_description,
'ChargeFeeTo': charge_fee_to,
}
return self.execute(params)
def refund(self,
transaction_id,
caller_reference,
refund_amount=None,
caller_description=None,
):
params = {'Action': 'Refund',
'TransactionId': transaction_id,
'CallerReference': caller_reference,
'CallerDescription': caller_description,
'RefundAmount.Value': refund_amount,
'RefundAmount.CurrencyCode': (refund_amount and "USD")}
return self.execute(params)
def reserve(self,
sender_token,
amount,
caller_reference,
caller_description=None):
params = {'Action': 'Reserve',
'SenderTokenId': sender_token,
'TransactionAmount.Value': amount,
'TransactionAmount.CurrencyCode': 'USD',
'CallerReference': caller_reference,
'CallerDescription': caller_description
}
return self.execute(params)
def settle(self,
transaction_id,
amount=None):
params = {'Action': 'Settle',
'ReserveTransactionId': transaction_id,
'TransactionAmount.Value': amount,
'TransactionAmount.CurrencyCode': (amount and 'USD')}
return self.execute(params)
def verify_signature(self,
url_endpoint,
http_parameters):
params = {'Action': 'VerifySignature',
'UrlEndPoint': url_endpoint,
'HttpParameters': http_parameters}
return self.execute(params, sign=False) | {
"content_hash": "914f20dc7224cfd973381da52f6a0dd9",
"timestamp": "",
"source": "github",
"line_count": 282,
"max_line_length": 135,
"avg_line_length": 40.1063829787234,
"alnum_prop": 0.5562334217506631,
"repo_name": "wadey/fpys2",
"id": "ef697d956676fbc3b8a4c0fa100073f035f2843e",
"size": "11310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fpys2/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "77348"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(337, 589)
self.verticalLayout_2 = QtGui.QVBoxLayout(Form)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.pointsTableWidget = QtGui.QTableWidget(Form)
self.pointsTableWidget.setObjectName(_fromUtf8("pointsTableWidget"))
self.pointsTableWidget.setColumnCount(2)
self.pointsTableWidget.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.pointsTableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.pointsTableWidget.setHorizontalHeaderItem(1, item)
self.horizontalLayout_3.addWidget(self.pointsTableWidget)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.addPushButton = QtGui.QPushButton(Form)
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.addPushButton.setFont(font)
self.addPushButton.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8("icons/1403292182_Add.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.addPushButton.setIcon(icon)
self.addPushButton.setObjectName(_fromUtf8("addPushButton"))
self.verticalLayout.addWidget(self.addPushButton)
self.delPushButton = QtGui.QPushButton(Form)
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.delPushButton.setFont(font)
self.delPushButton.setText(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8("icons/1403362103_Delete.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.delPushButton.setIcon(icon1)
self.delPushButton.setObjectName(_fromUtf8("delPushButton"))
self.verticalLayout.addWidget(self.delPushButton)
spacerItem = QtGui.QSpacerItem(20, 30, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
self.verticalLayout.addItem(spacerItem)
self.savePushButton = QtGui.QPushButton(Form)
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.savePushButton.setFont(font)
self.savePushButton.setText(_fromUtf8(""))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8("icons/1404769242_Download.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.savePushButton.setIcon(icon2)
self.savePushButton.setObjectName(_fromUtf8("savePushButton"))
self.verticalLayout.addWidget(self.savePushButton)
self.loadPushButton = QtGui.QPushButton(Form)
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.loadPushButton.setFont(font)
self.loadPushButton.setText(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8("icons/1404769249_Upload.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.loadPushButton.setIcon(icon3)
self.loadPushButton.setObjectName(_fromUtf8("loadPushButton"))
self.verticalLayout.addWidget(self.loadPushButton)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem1)
self.horizontalLayout_3.addLayout(self.verticalLayout)
self.verticalLayout_2.addLayout(self.horizontalLayout_3)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label = QtGui.QLabel(Form)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.methodsComboBox = QtGui.QComboBox(Form)
self.methodsComboBox.setObjectName(_fromUtf8("methodsComboBox"))
self.methodsComboBox.addItem(_fromUtf8(""))
self.methodsComboBox.addItem(_fromUtf8(""))
self.methodsComboBox.addItem(_fromUtf8(""))
self.methodsComboBox.addItem(_fromUtf8(""))
self.methodsComboBox.addItem(_fromUtf8(""))
self.horizontalLayout_2.addWidget(self.methodsComboBox)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem2)
self.countPushButton = QtGui.QPushButton(Form)
self.countPushButton.setObjectName(_fromUtf8("countPushButton"))
self.horizontalLayout.addWidget(self.countPushButton)
self.exitPushButton = QtGui.QPushButton(Form)
self.exitPushButton.setObjectName(_fromUtf8("exitPushButton"))
self.horizontalLayout.addWidget(self.exitPushButton)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.retranslateUi(Form)
QtCore.QObject.connect(self.exitPushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), Form.close)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Кластерный анализ", None, QtGui.QApplication.UnicodeUTF8))
self.pointsTableWidget.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Точки</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
item = self.pointsTableWidget.horizontalHeaderItem(0)
item.setText(QtGui.QApplication.translate("Form", "x", None, QtGui.QApplication.UnicodeUTF8))
item = self.pointsTableWidget.horizontalHeaderItem(1)
item.setText(QtGui.QApplication.translate("Form", "y", None, QtGui.QApplication.UnicodeUTF8))
self.addPushButton.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Добавить точку</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.delPushButton.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Удалить точку</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.savePushButton.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Сохранить точки в файл</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.savePushButton.setWhatsThis(QtGui.QApplication.translate("Form", "Save", None, QtGui.QApplication.UnicodeUTF8))
self.loadPushButton.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Загрузить точки из файла</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.loadPushButton.setWhatsThis(QtGui.QApplication.translate("Form", "load", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "Метод:", None, QtGui.QApplication.UnicodeUTF8))
self.methodsComboBox.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Выберите метод кластеризации</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.methodsComboBox.setItemText(0, QtGui.QApplication.translate("Form", "Последовательная кластеризация", None, QtGui.QApplication.UnicodeUTF8))
self.methodsComboBox.setItemText(1, QtGui.QApplication.translate("Form", "Кинга", None, QtGui.QApplication.UnicodeUTF8))
self.methodsComboBox.setItemText(2, QtGui.QApplication.translate("Form", "К-средних", None, QtGui.QApplication.UnicodeUTF8))
self.methodsComboBox.setItemText(3, QtGui.QApplication.translate("Form", "\"Форель\"", None, QtGui.QApplication.UnicodeUTF8))
self.methodsComboBox.setItemText(4, QtGui.QApplication.translate("Form", "\"Краб\"", None, QtGui.QApplication.UnicodeUTF8))
self.countPushButton.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Выполнить кластеризацию и отобразить результаты</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.countPushButton.setText(QtGui.QApplication.translate("Form", "Рассчитать", None, QtGui.QApplication.UnicodeUTF8))
self.exitPushButton.setToolTip(QtGui.QApplication.translate("Form", "<html><head/><body><p>Выйти из программы</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.exitPushButton.setText(QtGui.QApplication.translate("Form", "Выход", None, QtGui.QApplication.UnicodeUTF8))
| {
"content_hash": "03c63323783cf524514943c1984bb7cf",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 206,
"avg_line_length": 65.29629629629629,
"alnum_prop": 0.7161656267725468,
"repo_name": "liveegor/cluster",
"id": "f682a400fb4d074b926275852f659e5b1ef51047",
"size": "9287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "form.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27688"
}
],
"symlink_target": ""
} |
import mock
from oslo.config import cfg
from neutron.agent.common import config as agent_config
from neutron.agent import l3_agent
from neutron.agent import l3_ha_agent
from neutron.agent.linux import interface
from neutron.common import config as base_config
from neutron.openstack.common import uuidutils
from neutron.tests import base
from neutron_vpnaas.services.vpn import agent
from neutron_vpnaas.services.vpn import device_drivers
_uuid = uuidutils.generate_uuid
NOOP_DEVICE_CLASS = 'NoopDeviceDriver'
NOOP_DEVICE = ('neutron.tests.unit.services.'
'vpn.test_vpn_agent.%s' % NOOP_DEVICE_CLASS)
class NoopDeviceDriver(device_drivers.DeviceDriver):
def sync(self, context, processes):
pass
def create_router(self, process_id):
pass
def destroy_router(self, process_id):
pass
class TestVPNAgent(base.BaseTestCase):
def setUp(self):
super(TestVPNAgent, self).setUp()
self.conf = cfg.CONF
self.conf.register_opts(base_config.core_opts)
self.conf.register_opts(l3_agent.L3NATAgent.OPTS)
self.conf.register_opts(l3_ha_agent.OPTS)
self.conf.register_opts(interface.OPTS)
agent_config.register_interface_driver_opts_helper(self.conf)
agent_config.register_use_namespaces_opts_helper(self.conf)
agent_config.register_agent_state_opts_helper(self.conf)
agent_config.register_root_helper(self.conf)
self.conf.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
self.conf.set_override(
'vpn_device_driver',
[NOOP_DEVICE],
'vpnagent')
for clazz in [
'neutron.agent.linux.ip_lib.device_exists',
'neutron.agent.linux.ip_lib.IPWrapper',
'neutron.agent.linux.interface.NullDriver',
'neutron.agent.linux.utils.execute'
]:
mock.patch(clazz).start()
l3pluginApi_cls = mock.patch(
'neutron.agent.l3_agent.L3PluginApi').start()
self.plugin_api = mock.MagicMock()
l3pluginApi_cls.return_value = self.plugin_api
looping_call_p = mock.patch(
'neutron.openstack.common.loopingcall.FixedIntervalLoopingCall')
looping_call_p.start()
self.fake_host = 'fake_host'
self.agent = agent.VPNAgent(self.fake_host)
def test_setup_drivers(self):
self.assertEqual(1, len(self.agent.devices))
device = self.agent.devices[0]
self.assertEqual(
NOOP_DEVICE_CLASS,
device.__class__.__name__
)
def test_get_namespace(self):
router_id = _uuid()
ns = "ns-" + router_id
ri = l3_agent.RouterInfo(router_id, self.conf.root_helper,
{}, ns_name=ns)
self.agent.router_info = {router_id: ri}
namespace = self.agent.get_namespace(router_id)
self.assertTrue(namespace.endswith(router_id))
self.assertFalse(self.agent.get_namespace('fake_id'))
def test_add_nat_rule(self):
router_id = _uuid()
ri = l3_agent.RouterInfo(router_id, self.conf.root_helper, {})
iptables = mock.Mock()
ri.iptables_manager.ipv4['nat'] = iptables
self.agent.router_info = {router_id: ri}
self.agent.add_nat_rule(router_id, 'fake_chain', 'fake_rule', True)
iptables.add_rule.assert_called_once_with(
'fake_chain', 'fake_rule', top=True)
def test_add_nat_rule_with_no_router(self):
self.agent.router_info = {}
#Should do nothing
self.agent.add_nat_rule(
'fake_router_id',
'fake_chain',
'fake_rule',
True)
def test_remove_rule(self):
router_id = _uuid()
ri = l3_agent.RouterInfo(router_id, self.conf.root_helper, {})
iptables = mock.Mock()
ri.iptables_manager.ipv4['nat'] = iptables
self.agent.router_info = {router_id: ri}
self.agent.remove_nat_rule(router_id, 'fake_chain', 'fake_rule', True)
iptables.remove_rule.assert_called_once_with(
'fake_chain', 'fake_rule', top=True)
def test_remove_rule_with_no_router(self):
self.agent.router_info = {}
#Should do nothing
self.agent.remove_nat_rule(
'fake_router_id',
'fake_chain',
'fake_rule')
def test_iptables_apply(self):
router_id = _uuid()
ri = l3_agent.RouterInfo(router_id, self.conf.root_helper, {})
iptables = mock.Mock()
ri.iptables_manager = iptables
self.agent.router_info = {router_id: ri}
self.agent.iptables_apply(router_id)
iptables.apply.assert_called_once_with()
def test_iptables_apply_with_no_router(self):
#Should do nothing
self.agent.router_info = {}
self.agent.iptables_apply('fake_router_id')
def test_router_added(self):
mock.patch(
'neutron.agent.linux.iptables_manager.IptablesManager').start()
router_id = _uuid()
router = {'id': router_id}
device = mock.Mock()
self.agent.devices = [device]
self.agent._router_added(router_id, router)
device.create_router.assert_called_once_with(router_id)
def test_router_removed(self):
self.plugin_api.get_external_network_id.return_value = None
mock.patch(
'neutron.agent.linux.iptables_manager.IptablesManager').start()
router_id = _uuid()
ri = l3_agent.RouterInfo(router_id, self.conf.root_helper, {},
ns_name="qrouter-%s" % router_id)
ri.router = {
'id': router_id,
'admin_state_up': True,
'routes': [],
'external_gateway_info': {},
'distributed': False}
device = mock.Mock()
self.agent.router_info = {router_id: ri}
self.agent.devices = [device]
self.agent._router_removed(router_id)
device.destroy_router.assert_called_once_with(router_id)
def test_process_router_if_compatible(self):
self.plugin_api.get_external_network_id.return_value = None
router = {'id': _uuid(),
'admin_state_up': True,
'routes': [],
'external_gateway_info': {}}
device = mock.Mock()
self.agent.devices = [device]
self.agent._process_router_if_compatible(router)
device.sync.assert_called_once_with(mock.ANY, [router])
| {
"content_hash": "5537b03e26f18aebf498587689f6ab1b",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 78,
"avg_line_length": 36.58659217877095,
"alnum_prop": 0.6066575049625897,
"repo_name": "citrix-openstack-build/neutron-vpnaas",
"id": "441ecdd5644779e6e126918df7e4b35c3de8ca19",
"size": "7190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron_vpnaas/tests.skip/unit/services/vpn/test_vpn_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "615224"
}
],
"symlink_target": ""
} |
from sqlalchemy import ForeignKey
from sqlalchemy.schema import Column
from sqlalchemy.schema import MetaData
from sqlalchemy.schema import UniqueConstraint
from trove.db.sqlalchemy.migrate_repo.schema import Boolean
from trove.db.sqlalchemy.migrate_repo.schema import create_tables
from trove.db.sqlalchemy.migrate_repo.schema import DateTime
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
meta = MetaData()
datastore_version_metadata = Table(
'datastore_version_metadata',
meta,
Column('id', String(36), primary_key=True, nullable=False),
Column(
'datastore_version_id',
String(36),
ForeignKey('datastore_versions.id', ondelete='CASCADE'),
),
Column('key', String(128), nullable=False),
Column('value', String(128)),
Column('created', DateTime(), nullable=False),
Column('deleted', Boolean(), nullable=False, default=False),
Column('deleted_at', DateTime()),
Column('updated_at', DateTime()),
UniqueConstraint(
'datastore_version_id', 'key', 'value',
name='UQ_datastore_version_metadata_datastore_version_id_key_value')
)
def upgrade(migrate_engine):
meta.bind = migrate_engine
# Load the datastore_versions table into the session.
# creates datastore_version_metadata table
Table('datastore_versions', meta, autoload=True)
create_tables([datastore_version_metadata])
| {
"content_hash": "92c4fe230eb624bc597a4b0ddcb9c8c6",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 76,
"avg_line_length": 36.325,
"alnum_prop": 0.7260839642119752,
"repo_name": "openstack/trove",
"id": "e98674a2ebf77021185267a99f1d9f809d9ad3ec",
"size": "2078",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "trove/db/sqlalchemy/migrate_repo/versions/036_add_datastore_version_metadata.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1166"
},
{
"name": "Python",
"bytes": "3667406"
},
{
"name": "Shell",
"bytes": "136049"
}
],
"symlink_target": ""
} |
from io import StringIO
from textwrap import dedent
import pytest
from jedi._compatibility import u, is_py3
from jedi.parser.token import NAME, OP, NEWLINE, STRING, INDENT
from jedi.parser import ParserWithRecovery, load_grammar, tokenize
from ..helpers import unittest
class TokenTest(unittest.TestCase):
def test_end_pos_one_line(self):
parsed = ParserWithRecovery(load_grammar(), dedent(u('''
def testit():
a = "huhu"
''')))
tok = parsed.module.subscopes[0].statements[0].children[2]
assert tok.end_pos == (3, 14)
def test_end_pos_multi_line(self):
parsed = ParserWithRecovery(load_grammar(), dedent(u('''
def testit():
a = """huhu
asdfasdf""" + "h"
''')))
tok = parsed.module.subscopes[0].statements[0].children[2].children[0]
assert tok.end_pos == (4, 11)
def test_simple_no_whitespace(self):
# Test a simple one line string, no preceding whitespace
simple_docstring = u('"""simple one line docstring"""')
simple_docstring_io = StringIO(simple_docstring)
tokens = tokenize.generate_tokens(simple_docstring_io.readline)
token_list = list(tokens)
_, value, _, prefix = token_list[0]
assert prefix == ''
assert value == '"""simple one line docstring"""'
def test_simple_with_whitespace(self):
# Test a simple one line string with preceding whitespace and newline
simple_docstring = u(' """simple one line docstring""" \r\n')
simple_docstring_io = StringIO(simple_docstring)
tokens = tokenize.generate_tokens(simple_docstring_io.readline)
token_list = list(tokens)
assert token_list[0][0] == INDENT
typ, value, start_pos, prefix = token_list[1]
assert prefix == ' '
assert value == '"""simple one line docstring"""'
assert typ == STRING
typ, value, start_pos, prefix = token_list[2]
assert prefix == ' '
assert typ == NEWLINE
def test_function_whitespace(self):
# Test function definition whitespace identification
fundef = dedent(u('''
def test_whitespace(*args, **kwargs):
x = 1
if x > 0:
print(True)
'''))
fundef_io = StringIO(fundef)
tokens = tokenize.generate_tokens(fundef_io.readline)
token_list = list(tokens)
for _, value, _, prefix in token_list:
if value == 'test_whitespace':
assert prefix == ' '
if value == '(':
assert prefix == ''
if value == '*':
assert prefix == ''
if value == '**':
assert prefix == ' '
if value == 'print':
assert prefix == ' '
if value == 'if':
assert prefix == ' '
def test_identifier_contains_unicode(self):
fundef = dedent(u('''
def 我あφ():
pass
'''))
fundef_io = StringIO(fundef)
tokens = tokenize.generate_tokens(fundef_io.readline)
token_list = list(tokens)
unicode_token = token_list[1]
if is_py3:
assert unicode_token[0] == NAME
else:
# Unicode tokens in Python 2 seem to be identified as operators.
# They will be ignored in the parser, that's ok.
assert unicode_token[0] == OP
def test_quoted_strings(self):
string_tokens = [
'u"test"',
'u"""test"""',
'U"""test"""',
"u'''test'''",
"U'''test'''",
]
for s in string_tokens:
parsed = ParserWithRecovery(load_grammar(), u('''a = %s\n''' % s))
simple_stmt = parsed.module.children[0]
expr_stmt = simple_stmt.children[0]
assert len(expr_stmt.children) == 3
string_tok = expr_stmt.children[2]
assert string_tok.type == 'string'
assert string_tok.value == s
assert string_tok.eval() == 'test'
def test_tokenizer_with_string_literal_backslash():
import jedi
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
assert c[0]._name.parent.obj == 'foo'
def test_ur_literals():
"""
Decided to parse `u''` literals regardless of Python version. This makes
probably sense:
- Python 3.2 doesn't support it and is still supported by Jedi, but might
not be. While this is incorrect, it's just incorrect for one "old" and in
the future not very important version.
- All the other Python versions work very well with it.
"""
def check(literal):
io = StringIO(u(literal))
tokens = tokenize.generate_tokens(io.readline)
token_list = list(tokens)
typ, result_literal, _, _ = token_list[0]
assert typ == STRING
assert result_literal == literal
check('u""')
check('ur""')
check('Ur""')
check('UR""')
check('bR""')
# Must be in the right order.
with pytest.raises(AssertionError):
check('Rb""')
| {
"content_hash": "2634a8ceb07830ec77081aba3c96613b",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 79,
"avg_line_length": 34.3,
"alnum_prop": 0.5615160349854227,
"repo_name": "NcLang/vimrc",
"id": "e53f85a6446ff4f77075d8fad9977f801ba202ae",
"size": "5215",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sources_non_forked/YouCompleteMe/third_party/ycmd/third_party/JediHTTP/vendor/jedi/test/test_parser/test_tokenize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "568"
},
{
"name": "CSS",
"bytes": "6320"
},
{
"name": "CoffeeScript",
"bytes": "1402"
},
{
"name": "Erlang",
"bytes": "3232"
},
{
"name": "GCC Machine Description",
"bytes": "525"
},
{
"name": "Go",
"bytes": "2239"
},
{
"name": "HTML",
"bytes": "134"
},
{
"name": "JavaScript",
"bytes": "1064"
},
{
"name": "Makefile",
"bytes": "8657"
},
{
"name": "Perl",
"bytes": "2705"
},
{
"name": "Python",
"bytes": "704814"
},
{
"name": "Ruby",
"bytes": "33390"
},
{
"name": "Shell",
"bytes": "9370"
},
{
"name": "TeX",
"bytes": "6193"
},
{
"name": "VimL",
"bytes": "3170590"
},
{
"name": "XSLT",
"bytes": "4217"
}
],
"symlink_target": ""
} |
from beritest_tools import BaseBERITestCase
from nose.plugins.attrib import attr
class test_raw_fpu_trunc_l_d64(BaseBERITestCase):
@attr('float64')
def test_raw_fpu_trunc_l_d64_1(self):
'''Test double precision trunc.l of -0.75'''
self.assertRegisterEqual(self.MIPS.a0 , 0, "-0.75 did not round up to 0")
@attr('float64')
def test_raw_fpu_trunc_l_d64_2(self):
'''Test double precision trunc.l of -0.5'''
self.assertRegisterEqual(self.MIPS.a1 , 0, "-0.5 did not round up to 0")
@attr('float64')
def test_raw_fpu_trunc_l_d64_3(self):
'''Test double precision trunc.l of -0.25'''
self.assertRegisterEqual(self.MIPS.a2, 0, "-0.25 did not round up to 0")
@attr('float64')
def test_raw_fpu_trunc_l_d64_4(self):
'''Test double precision trunc.l of 0.5'''
self.assertRegisterEqual(self.MIPS.a3, 0, "0.5 did not round down to 0")
@attr('float64')
def test_raw_fpu_trunc_l_d64_5(self):
'''Test double precision trunc.l of 1.5'''
self.assertRegisterEqual(self.MIPS.a4, 1, "1.5 did not round down to 1")
@attr('float64')
def test_raw_fpu_trunc_l_d64_6(self):
'''Test trunc of double precision to 64 bit int'''
self.assertRegisterEqual(self.MIPS.a5, 0x10000000000001, "2^52 + 1 was not correctly converted from double precision to a 64 bit integer")
| {
"content_hash": "9c396e552ffb98329f3fdc9ab69dd9f4",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 146,
"avg_line_length": 37.638888888888886,
"alnum_prop": 0.6627306273062731,
"repo_name": "8l/beri",
"id": "e8db1e46ff2151118b83214316f3f80d6d5c92a3",
"size": "2571",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cheritest/trunk/tests/fpu/test_raw_fpu_trunc_l_d64.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1629022"
},
{
"name": "Bluespec",
"bytes": "2336405"
},
{
"name": "C",
"bytes": "1058899"
},
{
"name": "C++",
"bytes": "1864"
},
{
"name": "Groff",
"bytes": "14381"
},
{
"name": "Haskell",
"bytes": "11711"
},
{
"name": "Lex",
"bytes": "2894"
},
{
"name": "Makefile",
"bytes": "242450"
},
{
"name": "Mathematica",
"bytes": "291"
},
{
"name": "Objective-C",
"bytes": "2387"
},
{
"name": "OpenEdge ABL",
"bytes": "568"
},
{
"name": "Perl",
"bytes": "19159"
},
{
"name": "Python",
"bytes": "1491002"
},
{
"name": "Shell",
"bytes": "91130"
},
{
"name": "SystemVerilog",
"bytes": "12058"
},
{
"name": "Tcl",
"bytes": "132818"
},
{
"name": "TeX",
"bytes": "4996"
},
{
"name": "Verilog",
"bytes": "125674"
},
{
"name": "Yacc",
"bytes": "5871"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
from bs4 import BeautifulSoup
from flexget import plugin
from flexget.event import event
from flexget.utils import requests
log = logging.getLogger('nnm-club')
class UrlRewriteNnmClub(object):
"""Nnm-club.me urlrewriter."""
def url_rewritable(self, task, entry):
return entry['url'].startswith('http://nnm-club.me/forum/viewtopic.php?t=')
def url_rewrite(self, task, entry):
try:
r = task.requests.get(entry['url'])
except requests.RequestException as e:
log.error('Error while fetching page: %s' % e)
entry['url'] = None
return
html = r.content
soup = BeautifulSoup(html)
links = soup.findAll('a', href=True)
magnets = [x for x in links if x.get('href').startswith('magnet')]
if not magnets:
log.error('There is no magnet links on page (%s)' % entry['url'])
entry['url'] = None
return
entry['url'] = magnets[0]
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteNnmClub, 'nnm-club', groups=['urlrewriter'], api_ver=2)
| {
"content_hash": "3a94e4df40d2906565fb1cd3569c3512",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 85,
"avg_line_length": 30.547619047619047,
"alnum_prop": 0.6352299298519096,
"repo_name": "qvazzler/Flexget",
"id": "8f8d2ae4915318c027e59426c27cde42a758656d",
"size": "1283",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "flexget/plugins/urlrewrite/nnmclub.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5275"
},
{
"name": "HTML",
"bytes": "33930"
},
{
"name": "JavaScript",
"bytes": "58811"
},
{
"name": "Python",
"bytes": "2428468"
}
],
"symlink_target": ""
} |
import math
from native_tags.decorators import function, filter, comparison
def func_factory(method):
try:
func = getattr(math, method)
except AttributeError:
return
def inner(arg1, arg2=None):
try:
return func(arg1, arg2)
except TypeError:
return func(arg1)
inner.__name__ = method
doc = func.__doc__.splitlines()
if len(doc) > 1 and not doc[1]:
doc = doc[2:]
inner.__doc__ = '\n'.join(doc)
if method.startswith('is'):
return comparison(inner)
return filter(function(inner))
acos = func_factory('acos')
acosh = func_factory('acosh')
asin = func_factory('asin')
asinh = func_factory('asinh')
atan = func_factory('atan')
atan2 = func_factory('atan2')
atanh = func_factory('atanh')
ceil = func_factory('ceil')
copysign = func_factory('copysign')
cos = func_factory('cos')
cosh = func_factory('cosh')
degrees = func_factory('degrees')
exp = func_factory('exp')
fabs = func_factory('fabs')
factorial = func_factory('factorial')
floor = func_factory('floor')
fmod = func_factory('fmod')
frexp = func_factory('frexp')
fsum = func_factory('fsum')
hypot = func_factory('hypot')
isinf = func_factory('isinf')
isnan = func_factory('isnan')
ldexp = func_factory('ldexp')
log = func_factory('log')
log10 = func_factory('log10')
log1p = func_factory('log1p')
modf = func_factory('modf')
pow = func_factory('pow')
radians = func_factory('radians')
sin = func_factory('sin')
sinh = func_factory('sinh')
sqrt = func_factory('sqrt')
tan = func_factory('tan')
tanh = func_factory('tanh')
trunc = func_factory('trunc')
| {
"content_hash": "1d25cc5938a74dfdbf66fe1c7cc41f08",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 63,
"avg_line_length": 27.322033898305083,
"alnum_prop": 0.6550868486352357,
"repo_name": "justquick/django-native-tags",
"id": "19920195872980418fd4557cc564d6ef1c8e85d4",
"size": "1612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "native_tags/contrib/math_.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "90620"
},
{
"name": "Shell",
"bytes": "3090"
}
],
"symlink_target": ""
} |
import argparse
from configparser import ConfigParser
import logging
import logging.handlers
import os
import socket
import sys
import time
from keystoneauth1 import loading
from keystoneauth1 import session
from novaclient import client
config = ConfigParser(strict=False)
debug = os.getenv('__OS_DEBUG', 'false')
if debug.lower() == 'true':
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
LOG = logging.getLogger('nova_wait_for_compute_service')
LOG_FORMAT = ('%(asctime)s.%(msecs)03d %(levelname)s '
'%(name)s %(message)s')
DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
LOG.handlers.clear()
LOG.setLevel(loglevel)
LOG.propagate = True
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=DATE_FORMAT)
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
LOG.addHandler(stream_handler)
iterations = 60
timeout = 10
nova_cfg = '/etc/nova/nova.conf'
if __name__ == '__main__':
parser = argparse.ArgumentParser(usage='%(prog)s [options]')
parser.add_argument('-k', '--insecure',
action="store_false",
dest='insecure',
default=True,
help='Allow insecure connection when using SSL')
args = parser.parse_args()
LOG.debug('Running with parameter insecure = %s',
args.insecure)
if os.path.isfile(nova_cfg):
try:
config.read(nova_cfg)
except Exception:
LOG.exception('Error while reading nova.conf:')
else:
LOG.error('Nova configuration file %s does not exist', nova_cfg)
sys.exit(1)
my_host = config.get('DEFAULT', 'host')
if not my_host:
# If host isn't set nova defaults to this
my_host = socket.gethostname()
loader = loading.get_plugin_loader('password')
auth = loader.load_from_options(
auth_url=config.get('neutron',
'auth_url'),
username=config.get('neutron',
'username'),
password=config.get('neutron',
'password'),
project_name=config.get('neutron',
'project_name'),
project_domain_name=config.get('neutron',
'project_domain_name'),
user_domain_name=config.get('neutron',
'user_domain_name'))
sess = session.Session(auth=auth, verify=args.insecure)
nova = client.Client('2.11', session=sess, endpoint_type='internal',
region_name=config.get('neutron', 'region_name'))
# Wait until this host is listed in the service list
for i in range(iterations):
try:
service_list = nova.services.list(binary='nova-compute')
for entry in service_list:
host = getattr(entry, 'host', '')
zone = getattr(entry, 'zone', '')
if host == my_host and zone != 'internal':
LOG.info('Nova-compute service registered')
sys.exit(0)
LOG.info('Waiting for nova-compute service to register')
except Exception:
LOG.exception(
'Error while waiting for nova-compute service to register')
time.sleep(timeout)
sys.exit(1)
# vim: set et ts=4 sw=4 :
| {
"content_hash": "69dba178dd0f5bd6f5aa22dd0eb4f11a",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 75,
"avg_line_length": 32.66990291262136,
"alnum_prop": 0.5839524517087668,
"repo_name": "openstack/tripleo-heat-templates",
"id": "61e0d618a2a6cfaefdcca1ebfaae0d3a8d5e0e4e",
"size": "3966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "container_config_scripts/nova_wait_for_compute_service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "80675"
},
{
"name": "Python",
"bytes": "391465"
},
{
"name": "Shell",
"bytes": "52827"
}
],
"symlink_target": ""
} |
from re import sub
from vyapp.plugins import ENV
from vyapp.areavi import AreaVi
from re import escape
def strip(chars=' '):
"""
Strip chars off the beginning of all selected lines.
if chars is not given it removes spaces.
"""
AreaVi.ACTIVE.replace_ranges('sel',
'^[%s]+' % escape(chars), '')
def rstrip(chars=' '):
"""
Strip chars off the beginning of all selected lines.
if chars is not given it removes spaces.
"""
AreaVi.ACTIVE.replace_ranges('sel',
'[%s]+$' % escape(chars), '')
ENV['strip'] = strip
ENV['rstrip'] = rstrip
| {
"content_hash": "123db15299cc8ec865044ef68e9746fc",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 56,
"avg_line_length": 20.964285714285715,
"alnum_prop": 0.6303236797274276,
"repo_name": "iogf/vy",
"id": "14e8ac469a07ba3625fc90284f9c73d125d3d5a3",
"size": "587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vyapp/plugins/line_strips.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "232984"
},
{
"name": "Shell",
"bytes": "5671"
}
],
"symlink_target": ""
} |
from time import perf_counter as clock
import numpy as np
import tables as tb
N = 144_000
#N = 144
def timed(func, *args, **kwargs):
start = clock()
res = func(*args, **kwargs)
print(f"{clock() - start:.3f}s elapsed.")
return res
def create_table(output_path):
print("creating array...", end=' ')
dt = np.dtype([('field%d' % i, int) for i in range(320)])
a = np.zeros(N, dtype=dt)
print("done.")
output_file = tb.open_file(output_path, mode="w")
table = output_file.create_table("/", "test", dt) # , filters=blosc4)
print("appending data...", end=' ')
table.append(a)
print("flushing...", end=' ')
table.flush()
print("done.")
output_file.close()
def copy1(input_path, output_path):
print(f"copying data from {input_path} to {output_path}...")
input_file = tb.open_file(input_path, mode="r")
output_file = tb.open_file(output_path, mode="w")
# copy nodes as a batch
input_file.copy_node("/", output_file.root, recursive=True)
output_file.close()
input_file.close()
def copy2(input_path, output_path):
print(f"copying data from {input_path} to {output_path}...")
input_file = tb.open_file(input_path, mode="r")
input_file.copy_file(output_path, overwrite=True)
input_file.close()
def copy3(input_path, output_path):
print(f"copying data from {input_path} to {output_path}...")
input_file = tb.open_file(input_path, mode="r")
output_file = tb.open_file(output_path, mode="w")
table = input_file.root.test
table.copy(output_file.root)
output_file.close()
input_file.close()
def copy4(input_path, output_path, complib='zlib', complevel=0):
print(f"copying data from {input_path} to {output_path}...")
input_file = tb.open_file(input_path, mode="r")
output_file = tb.open_file(output_path, mode="w")
input_table = input_file.root.test
print("reading data...", end=' ')
data = input_file.root.test.read()
print("done.")
filter = tb.Filters(complevel=complevel, complib=complib)
output_table = output_file.create_table("/", "test", input_table.dtype,
filters=filter)
print("appending data...", end=' ')
output_table.append(data)
print("flushing...", end=' ')
output_table.flush()
print("done.")
input_file.close()
output_file.close()
def copy5(input_path, output_path, complib='zlib', complevel=0):
print(f"copying data from {input_path} to {output_path}...")
input_file = tb.open_file(input_path, mode="r")
output_file = tb.open_file(output_path, mode="w")
input_table = input_file.root.test
filter = tb.Filters(complevel=complevel, complib=complib)
output_table = output_file.create_table("/", "test", input_table.dtype,
filters=filter)
chunksize = 10_000
rowsleft = len(input_table)
start = 0
for chunk in range((len(input_table) / chunksize) + 1):
stop = start + min(chunksize, rowsleft)
data = input_table.read(start, stop)
output_table.append(data)
output_table.flush()
rowsleft -= chunksize
start = stop
input_file.close()
output_file.close()
if __name__ == '__main__':
timed(create_table, 'tmp.h5')
# timed(copy1, 'tmp.h5', 'test1.h5')
timed(copy2, 'tmp.h5', 'test2.h5')
# timed(copy3, 'tmp.h5', 'test3.h5')
timed(copy4, 'tmp.h5', 'test4.h5')
timed(copy5, 'tmp.h5', 'test5.h5')
| {
"content_hash": "443f3e71004ea13136262c51997f60c7",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 75,
"avg_line_length": 30.53913043478261,
"alnum_prop": 0.6101936218678815,
"repo_name": "PyTables/PyTables",
"id": "af629114857538de860f3c4e58105ea29ada5ca7",
"size": "3512",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bench/table-copy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "165578"
},
{
"name": "CMake",
"bytes": "2417"
},
{
"name": "Cython",
"bytes": "283042"
},
{
"name": "Gnuplot",
"bytes": "2104"
},
{
"name": "Makefile",
"bytes": "2489"
},
{
"name": "Python",
"bytes": "3119836"
},
{
"name": "Shell",
"bytes": "19408"
}
],
"symlink_target": ""
} |
"""
Created on Thu Mar 09 18:07:03 2017
@author: scram
"""
from gensim import corpora, models, similarities
import gensim
import json
import os
import pickle
from pymongo import MongoClient
import unicodedata as uniD
import sys
import nltk
import networkx as nx
import numpy as np
import itertools as it
import pprint
import collections as coll
import re
from collections import Counter
import feature_functions as feature
import nested_dict
from functools import reduce
from scipy import spatial
from sklearn.externals import joblib
from Mongo_functions import id2edgelist
pp = pprint.PrettyPrinter(indent=0)
def translatelabel(label):
if label == 'supporting'or label == 'support':
return 0
if label == 'denying'or label == 'deny':
return 1
if label == 'appeal-for-more-information'or label == 'query':
return 2
if label == 'comment' or label == '0':
return 3
if label =='imp-denying':
return 4
id_target_dic = {}
label_list =[]
event_target_dic = {}
event_ID_dic = {}
unlabeled_thread_list=[]
thread_list=[]
def files_to_mongo(root_path,label_dict,db):
""" Moves tweets from their file structure to mongo
"""
walk = os.walk(root_path)
threads_errors = []
root_errors = []
replies_errors = []
for current_dir in walk:
if 'structure.json' in current_dir[-1]:
event = current_dir[0].split("\\")[-2]
last_dir = current_dir[0].split("\\")[-1]
if last_dir == "replies":
json_list =[]
root_tweet = {}
rep_path =current_dir[0]
root_path = rep_path.split("\\")
with open("\\".join(root_path[:-1])+"\\"+'structure.json',"r")as jsonfile:
structure = json.load(jsonfile)
source_path = root_path
source_path[-1]="source-tweet"
source_path = "\\".join(source_path)
root =os.listdir(source_path)[0]
with open(source_path+"\\"+root,"r")as jsonfile:
root_tweet = json.load(jsonfile)
for json_path in current_dir[-1]:
with open(current_dir[0]+"\\"+json_path,"r")as jsonfile:
json_list.append(json.load(jsonfile))
thread_list = list(map(lambda x: x.get("id_str"),json_list))
root_id = root_tweet['id']
thread_list.append(str(root_id))
structure = nested_dict.subset_by_key(structure, thread_list)
edge_list = nested_dict.to_edge_list(nested_dict.map_keys(int,structure))
fields = ["parent","child"]
mongo_update = {"_id":root_id,
"event":event,
"edge_list":nested_dict.vecs_to_recs(edge_list,fields)}
try:
db.edge_list.insert_one(mongo_update).inserted_id
except Exception as err:
# print(err)
threads_errors.append(root_id)
for twt in json_list:
twt["_id"] = twt["id"]
twt['label'] = label_dict[int(twt['id'])]
try:
db.replies_to_trump.insert_one(twt).inserted_id
except Exception as err:
# print(err)
replies_errors.append(twt["id"])
root_tweet["_id"] = root_id
root_tweet["label"] = label_dict[int(root_id)]
try:
db.trump_tweets.insert_one(root_tweet)
except Exception as err:
# print(err)
root_errors.append(root_id)
return threads_errors, root_errors, replies_errors
def get_labeled_thread_tweets(thread,db=MongoClient('localhost', 27017).Alta_Real_New):
root_id = thread['_id']
thread_tweets = list(db.trump_tweets.find({'_id':root_id}))
thread_ids = set()
thread_edge_list =[]
# print(root_id)
# print( thread_tweets[0]['text'])
thread
for edg in thread['edge_list']:
parent = edg['parent']
child = edg['child']
thread_edge_list.append((parent,child))
thread_ids = list(thread_ids)
thread_tweets += list(db.replies_to_trump.find({'_id':{'$in':thread_ids},'label':{'$exists':True}}))
thread_ids = [i['id'] for i in thread_tweets]
thread_edge_list = list(filter(lambda x: x[0] in thread_ids and x[1] in thread_ids, thread_edge_list))
thread_ids = [root_id] + thread_ids
# print( len(thread_tweets))
return thread_tweets,thread_edge_list,thread_ids
def labels_to_train(db,db_source,labels_dic):
""" checks the Alta_Real database and creates sub-trees for any labeld tweets
"""
threads_errors = []
root_errors = []
replies_errors = []
print(db_source)
roots = list(db_source.trump_tweets.find({"label":{"$exists":True}}))
print("LENGTH ROOTS",len(roots))
# put labels to DB from target label file
for labeled_id in labels_dic:
tweet = list(db_source.replies_to_trump.find({'_id':labeled_id}))
if tweet:
tweet = tweet[0]
if not tweet.get('label',None):
db_source.replies_to_trump.update_one(
{"_id":labeled_id},
{"$set":
{"label":labels_dic[labeled_id]}
}
)
for root_tweet in roots:
root_id = root_tweet['id']
thread_edges = list(db_source.edge_list.find({"_id":root_id}))[0]
json_list,edge_list,thread_list = get_labeled_thread_tweets(thread_edges,db_source)
thread_list.append(str(root_id))
fields = ["parent","child"]
event_text = root_tweet['text']
# event_text = event_text.replace('/','').replace(':','').replace('-','').replace('#','')
if len(event_text) >20:
event = event_text.replace(" ","_")[:20]
else:
event = event_text.replace(" ","_")
mongo_update = {"_id":root_id,
"id_str":root_id,
"event":event,
"edge_list":nested_dict.vecs_to_recs(edge_list,fields)}
print(mongo_update)
try:
db.edge_list.insert_one(mongo_update).inserted_id
except Exception as err:
# print(err)
threads_errors.append(root_id)
for twt in json_list:
twt["_id"] = twt["id"]
if not twt['label']:
print(twt['id'])
print(twt['label'])
print(bool(twt['label']))
try:
db.replies_to_trump.insert_one(twt).inserted_id
except Exception as err:
# print(err)
replies_errors.append(twt["id"])
root_tweet["_id"] = root_id
try:
db.trump_tweets.insert_one(root_tweet)
except Exception as err:
# print(err)
root_errors.append(root_id)
return threads_errors, root_errors, replies_errors
def process_tweet(tweet):
feature_vector =[]
if tweet.get("full_text",None):
text =tweet['full_text']
else:
text =tweet['text']
ID = tweet['id_str']
thread_list.append(event)
if ID in id_target_dic:
label_list.append(id_target_dic[ID])
# print id_target_dic[root_id.split(".")[0]]
attribute_paths = [[u'entities',u'media'],
[u'entities',u'urls'],
[u'in_reply_to_screen_name']]
format_binary_vec = list(feature.attribute_binary_gen(tweet, attribute_paths))
feature_vector += format_binary_vec
punc_vec = list(feature.punc_binary_gen(text,punc_list))
feature_vector += punc_vec
if token_type == "zub":
cap_ratio = feature.zub_capital_ratio(text)
# print "Twit & Zub using same Capitalization Ratio\n\tIs this desirable?"
elif token_type == "twit":
cap_ratio = feature.zub_capital_ratio(text)
# print "Twit & Zub using same Capitalization Ratio\n\tIs this desirable?"
feature_vector += cap_ratio
word_char_count = feature.word_char_count(id_text_dic[ID])
feature_vector += word_char_count
swear_bool = feature.word_bool(text,swear_list)
feature_vector += swear_bool
neg_bool = feature.word_bool(text,negationwords)
feature_vector += neg_bool
if ID in id_pos_dic_full:
pos_vec = id_pos_dic_full[tweet["id"]]
else:
pos_vec = id_pos_dic[tweet["id"]]
feature_vector += feature.pos_vector(pos_vec)
d2v_text = D2V_id_text_dic[ID]
if embed_type == "word2vec":
embed_vector = feature.mean_W2V_vector(d2v_text,D2Vmodel)
elif embed_type == "doc2vec":
embed_vector = D2Vmodel.docvecs[ID]
feature_vector = np.concatenate((embed_vector,
np.array(feature_vector)))
# thread_id_list.append(ID)
# thread_dic[ID] = feature_vector
return int(ID), feature_vector
def get_thread_tweets(thread,db=MongoClient('localhost', 27017).Alta_Real_New):
root_id = thread['_id']
thread_tweets = list(db.trump_tweets.find({'_id':root_id}))
thread_ids = set()
thread_edge_list =[]
for edg in thread['edge_list']:
parent = edg['parent']
child = edg['child']
thread_ids.add(child)
thread_edge_list.append((parent,child))
thread_ids = list(thread_ids)
thread_tweets += list(db.replies_to_trump.find({'_id':{'$in':thread_ids}}))
thread_ids = [root_id] + thread_ids
return thread_tweets,thread_edge_list,thread_ids
def commeasuring(feats,struc,targs,ids):
targs = targs[0]
f_l = len(feats)
s_l = len(struc)
t_l = len(targs)
i_l = len(ids)
if f_l != s_l+1 or t_l != f_l or i_l != f_l or i_l != t_l:
print(f_l,s_l,t_l,i_l)
if len(feats) > len(targs):
l = len(targs)
elif any(map(lambda x:len(x) == 0,[feats,struc,targs,ids])):
return None,None,[None],None
else:
l = len(feats)
feats = feats[:l]
struc = np.array([i for i in struc if i[0]<l and i[1]<l])
targs = targs[:l]
ids = ids[:l]
# thread_edge_list = list(filter(lambda x: x[0] in thread_ids and x[1] in thread_ids, thread_edge_list))
print(len(feats),len(struc),len(targs),ids)
return feats,struc,[targs],ids
#### RUN FUNCTIONS TO CREATE FEATURES
if __name__ == '__main__':
### Import if python 2
if sys.version_info[0] < 3:
import cPickle as pickle
# Global Variables
dims =str(300)
#token_type = "zub"
embed_type = "word2vec"
token_type = "twit"
embed_type = "doc2vec"
id_text_dic = {}
text_list = []
id_list = []
POS_dir ="Data\\twitIE_pos\\"
doc2vec_dir = "Data\\doc2vec\\trump_plus"
ids_around_IDless_tweets=[136,139,1085,1087]
pos_file_path1 = POS_dir+token_type+"_semeval2017"+"_twitIE_POS"
pos_file_path2 = POS_dir+token_type+"_Alta_Real_New"+"_twitIE_POS"
pos_file_path3 = POS_dir+token_type+"_Alta_Real_New"+"_twitIE_POS_FULL_TEXT"
pos_file_path = [pos_file_path1, pos_file_path2,pos_file_path3]
id_pos_dic, index_pos_dic = feature.pos_extract(pos_file_path)
# pos_file_path1 = POS_dir+token_type+"_semeval2017"+"_twitIE_POS_FULL"
pos_file_path_full = POS_dir+token_type+"_Alta_Real_New"+"_twitIE_POS_FULL_TEXT"
pos_file_path_full = [pos_file_path_full]
id_pos_dic_full, index_pos_dic_full = feature.pos_extract(pos_file_path_full)
swear_path = "Data\\badwords.txt"
swear_list=[]
with open(swear_path,"r")as swearfile:
swear_list = swearfile.readlines()
id_text_dic ={}
with open(doc2vec_dir+token_type+"_"+"id_text_dic.json",'r')as textDicFile:
id_text_dic = json.load(textDicFile)
negationwords = ['not', 'no', 'nobody', 'nothing', 'none',
'never', 'neither', 'nor', 'nowhere', 'hardly',
'scarcely', 'barely', 'don*', 'isn*', 'wasn*',
'shouldn*', 'wouldn*', 'couldn*', 'doesn*',
'don', 'isn', 'wasn', 'nothin',
'shouldn', 'wouldn', 'couldn', 'doesn']
punc_list = [u"?",u"!",u"."]
attribute_paths = [[u'entities',u'media'],
[u'entities',u'urls'],
[u'in_reply_to_screen_name']]
err_dic = {}
graph_root_id = ""
graph_event = ""
graph_size = 0
graph_2_vis =[]
thread_dic = {}
event_target_dic = {}
root_id = 0
event_model_dic = {}
#MongoDB credentials and collections
# DBname = 'test-tree'
# DBname = 'test_recurse'
DBname = 'Alta_Real_New'
DBhost = 'localhost'
DBport = 27017
DBname_t = 'Train'
# initiate Mongo Client
client = MongoClient()
client = MongoClient(DBhost, DBport)
DB_trump = client[DBname]
DB_train = client[DBname_t]
######## Process each tweet to create a feature vector
id_target_dic = {i['_id']:i['label'] for i in list(DB_train.replies_to_trump.find({},{"label":1}))}
id_target_dic.update({i['_id']:i['label'] for i in list(DB_train.trump_tweets.find({},{"label":1}))} )
print
trump_ids = list(DB_trump.trump_tweets.distinct("_id"))
trump_parent_ids = list(DB_trump.edge_list.distinct("edge_list.parent",{"_id":{"$in":trump_ids}}))
trump_child_ids = list(DB_trump.edge_list.distinct("edgge_list.child",{"_id":{"$in":trump_ids}}))
trump_thread_ids = trump_parent_ids +trump_child_ids
# test_mongo = list(filter(lambda x: x['id'] in trump_thread_ids,trump_mongo))
if not DB_trump.edge_list.distinct('event'):
for thrd in list(DB_trump.edge_list.find()):
DB_trump.edge_list.update_one(
{'_id':thrd['_id']},
{'$set':{'event':thrd['_id']}})
trump_threads = list(DB_trump.edge_list.find({"_id":{"$in":trump_ids}}))
train_ids = list(DB_train.replies_to_trump.distinct('_id'))+list(DB_train.trump_tweets.distinct('_id'))
train_root_ids = list(DB_train.replies_to_trump.distinct('_id'))
trump_num = 0
train_num = 0
#Errror containers
total_tweets = 0
total_threads = 0
thread_errors = {}
tweet_errors = []
root_errors = []
root_error_count = 0
POSed_ids = [k for k,v in id_pos_dic.items()]
### make a list of threads that have been fully POS tagged
print(trump_threads[0],"before filter")
print("tweets_POSed",len(POSed_ids))
print("Total trump threads",len(trump_threads))
### temp code till all tweets are POSed
trump_threads = list(
# map(lambda y: y[-1],
filter(lambda X:all(
map(lambda x: x in POSed_ids ,get_thread_tweets(X)[-1])),
trump_threads
)
# )
)
print(trump_threads[0],"after filter")
print("trump threads fully POSed",len(trump_threads))
train_threads = list(DB_train.edge_list.find())
print("Total Train threads",len(train_threads))
train_threads = list(
# map(lambda y: y[-1],
filter(lambda X:all(
map(lambda x: x in POSed_ids ,get_thread_tweets(X)[-1])),
train_threads
)
# )
)
print("Train threads fully POSed",len(train_threads))
for thread in train_threads+trump_threads:
# for thread in train_threads+trump_threads:
total_threads +=1
root_id = thread['_id']
event = thread['event']
thread_id_list = []
tweet_list = []
thread_edge_list = []
thread_ids = []
# Error boolian
error_free = True
# if root_id not in train_root_ids:
if not isinstance(event,str):
# if root_id in trump_ids:
# print("EVENT NOT STR")
# print("event type", type(event))
tweet_list,thread_edge_list,thread_ids = get_thread_tweets(thread,db=DB_trump)
else:
# print("EVENT STR")
# print("event type", type(event))
# print("root NOT in Trump ids")
tweet_list,thread_edge_list,thread_ids = get_thread_tweets(thread,db=DB_train)
for tweet in tweet_list:
total_tweets += 1
try:
twt_ID, feat_vector = process_tweet(tweet)
thread_id_list.append(twt_ID)
thread_dic[twt_ID] = feat_vector
except Exception as err:
err_dic[tweet['_id']] = err
print(err)
if root_id in thread_errors:
thread_errors[root_id].append(tweet['_id'])
else:
thread_errors[root_id] = [tweet['_id']]
tweet_errors.append(tweet['_id'])
root_errors.append(root_id)
root_error_count += 1
print("{}/{} Errored Threads over Total threads".format(root_error_count,total_threads))
print("{}/{} Errored Tweets over Total tweets".format(len(tweet_errors),total_tweets))
print("Process Error tweet",tweet['_id'])
print("Process Error root",root_id,"\n_________________________")
error_free = False
if error_free:
if thread in train_threads:
id_dic = feature.id_index_dic(thread_edge_list)
id_order = [i[0] for i in sorted(id_dic.items(),key=lambda x:(x[1],x[0]))]
# print("LEN ID_ORDER",len(id_order))
# create an array for each thread an append to the event_target_dic
if root_id in id_target_dic:
print(event)
thread_target_vector = [np.array(list(
map(translatelabel,
[id_target_dic[i]
for i in id_order])
),dtype=int)]
edge_vector = np.array([np.array([id_dic[Id] for Id in edge])
for edge in thread_edge_list])
n_feats = np.array([thread_dic[i] for i in id_order])
outComes = commeasuring(n_feats, edge_vector, thread_target_vector, id_order)
n_feats, edge_vector, thread_target_vector, thread_id_list = outComes
if thread_id_list and isinstance(n_feats, np.ndarray) and isinstance(edge_vector, np.ndarray) and isinstance(thread_target_vector[0], np.ndarray):
X_train = [np.array([n_feats,edge_vector])]
if event in event_model_dic:
event_model_dic[event] += X_train
else:
event_model_dic[event] = X_train
if event in event_ID_dic:
event_ID_dic[event] += [thread_id_list]
else:
event_ID_dic[event] = [thread_id_list]
if event in event_target_dic:
event_target_dic[event] += thread_target_vector
else:
event_target_dic[event] = thread_target_vector
thread_dic = {}
else:
pass
print("errors in threads",Counter(root_errors))
event_model_dic = ["_".join([token_type,embed_type,dims]),event_model_dic]
joblib.dump(event_model_dic,"event_model_dic.joblib")
with open("event_model_dic","wb")as modelfile:
pickle.dump(event_model_dic,modelfile,protocol=2,fix_imports=True)
print("MODEL WRITTEN")
joblib.dump(event_target_dic,"event_target_dic.joblib")
with open("event_target_dic","wb")as modelfile:
pickle.dump(event_target_dic,modelfile,protocol=2,fix_imports=True)
print("TARGET WRITTEN")
joblib.dump(event_ID_dic,"event_ID_dic.joblib")
with open("event_ID_dic","wb")as modelfile:
pickle.dump(event_ID_dic,modelfile,protocol=2,fix_imports=True)
print("IDs WRITTEN")
joblib.dump(thread_errors,"error_ID_dic.joblib")
with open("error_ID_dic","wb")as errorfile:
pickle.dump(thread_errors,errorfile,protocol=2,fix_imports=True)
print("ERRORS WRITTEN")
print( event_ID_dic.keys())
#print graph_size
#print graph_event, graph_root_id
print(graph_2_vis)
#DG=nx.DiGraph()
#DG.add_edges_from(graph_2_vis)
#nx.draw_random(DG, with_labels=False)
with open("error_ID_dic","wb")as errorfile:
pickle.dump(thread_errors,errorfile,protocol=2,fix_imports=True)
#from networkx.drawing.nx_agraph import graphviz_layout
#
##nx.draw_spectral(DG, with_labels=False)
##pos=nx.graphviz_layout(DG, prog='dot')
#pos=graphviz_layout(DG, prog='dot')
#nx.draw(DG, pos, with_labels=False, arrows=False)
#from networkx.drawing.nx_pydot import to_pydot
#import pydot_ng as pydot
##import graphviz
#Gp = pydot.graph_from_edges(graph_2_vis)
#Gp.write_png('example1_graph.png')
#nx.draw_graphviz(DG)
#plt.show()
#label_count = coll.Counter(label_list)
#print label_count
| {
"content_hash": "e644f53f025ecfb035cbe6a4a1163aa4",
"timestamp": "",
"source": "github",
"line_count": 560,
"max_line_length": 162,
"avg_line_length": 38.45,
"alnum_prop": 0.5499721344974922,
"repo_name": "scramblingbalam/Alta_Real",
"id": "add2c43359be2b82c3e4c55b0b8cc5be205470b5",
"size": "21556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feature_creation_mongo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "178293"
}
],
"symlink_target": ""
} |
"""
Unit tests for django-registration.
These tests assume that you've completed all the prerequisites for
getting django-registration running in the default setup, to wit:
1. You have ``registration`` in your ``INSTALLED_APPS`` setting.
2. You have created all of the templates mentioned in this
application's documentation.
3. You have added the setting ``ACCOUNT_ACTIVATION_DAYS`` to your
settings file.
4. You have URL patterns pointing to the registration and activation
views, with the names ``registration_register`` and
``registration_activate``, respectively, and a URL pattern named
'registration_complete'.
"""
import datetime
import sha
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.core import management
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.conf import settings
from registration import forms
from registration.models import RegistrationProfile
class RegistrationTestCase(TestCase):
"""
Base class for the test cases; this sets up two users -- one
expired, one not -- which are used to exercise various parts of
the application.
"""
def setUp(self):
self.sample_user = RegistrationProfile.objects.create_inactive_user(username='alice',
password='secret',
email='[email protected]')
self.expired_user = RegistrationProfile.objects.create_inactive_user(username='bob',
password='swordfish',
email='[email protected]')
self.expired_user.date_joined -= datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
self.expired_user.save()
class RegistrationModelTests(RegistrationTestCase):
"""
Tests for the model-oriented functionality of django-registration,
including ``RegistrationProfile`` and its custom manager.
"""
def test_new_user_is_inactive(self):
"""
Test that a newly-created user is inactive.
"""
self.failIf(self.sample_user.is_active)
def test_registration_profile_created(self):
"""
Test that a ``RegistrationProfile`` is created for a new user.
"""
self.assertEqual(RegistrationProfile.objects.count(), 2)
def test_activation_email(self):
"""
Test that user signup sends an activation email.
"""
self.assertEqual(len(mail.outbox), 2)
def test_activation(self):
"""
Test that user activation actually activates the user and
properly resets the activation key, and fails for an
already-active or expired user, or an invalid key.
"""
# Activating a valid user returns the user.
self.failUnlessEqual(RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user=self.sample_user).activation_key).pk,
self.sample_user.pk)
# The activated user must now be active.
self.failUnless(User.objects.get(pk=self.sample_user.pk).is_active)
# The activation key must now be reset to the "already activated" constant.
self.failUnlessEqual(RegistrationProfile.objects.get(user=self.sample_user).activation_key,
RegistrationProfile.ACTIVATED)
# Activating an expired user returns False.
self.failIf(RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user=self.expired_user).activation_key))
# Activating from a key that isn't a SHA1 hash returns False.
self.failIf(RegistrationProfile.objects.activate_user('foo'))
# Activating from a key that doesn't exist returns False.
self.failIf(RegistrationProfile.objects.activate_user(sha.new('foo').hexdigest()))
def test_account_expiration_condition(self):
"""
Test that ``RegistrationProfile.activation_key_expired()``
returns ``True`` for expired users and for active users, and
``False`` otherwise.
"""
# Unexpired user returns False.
self.failIf(RegistrationProfile.objects.get(user=self.sample_user).activation_key_expired())
# Expired user returns True.
self.failUnless(RegistrationProfile.objects.get(user=self.expired_user).activation_key_expired())
# Activated user returns True.
RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user=self.sample_user).activation_key)
self.failUnless(RegistrationProfile.objects.get(user=self.sample_user).activation_key_expired())
def test_expired_user_deletion(self):
"""
Test that
``RegistrationProfile.objects.delete_expired_users()`` deletes
only inactive users whose activation window has expired.
"""
RegistrationProfile.objects.delete_expired_users()
self.assertEqual(RegistrationProfile.objects.count(), 1)
def test_management_command(self):
"""
Test that ``manage.py cleanupregistration`` functions
correctly.
"""
management.call_command('cleanupregistration')
self.assertEqual(RegistrationProfile.objects.count(), 1)
class RegistrationFormTests(RegistrationTestCase):
"""
Tests for the forms and custom validation logic included in
django-registration.
"""
def test_registration_form(self):
"""
Test that ``RegistrationForm`` enforces username constraints
and matching passwords.
"""
invalid_data_dicts = [
# Non-alphanumeric username.
{
'data':
{ 'username': 'foo/bar',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' },
'error':
('username', [u"Enter a valid value."])
},
# Already-existing username.
{
'data':
{ 'username': 'alice',
'email': '[email protected]',
'password1': 'secret',
'password2': 'secret' },
'error':
('username', [u"This username is already taken. Please choose another."])
},
# Mismatched passwords.
{
'data':
{ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'bar' },
'error':
('__all__', [u"You must type the same password each time"])
},
]
for invalid_dict in invalid_data_dicts:
form = forms.RegistrationForm(data=invalid_dict['data'])
self.failIf(form.is_valid())
self.assertEqual(form.errors[invalid_dict['error'][0]], invalid_dict['error'][1])
form = forms.RegistrationForm(data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.failUnless(form.is_valid())
def test_registration_form_tos(self):
"""
Test that ``RegistrationFormTermsOfService`` requires
agreement to the terms of service.
"""
form = forms.RegistrationFormTermsOfService(data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.failIf(form.is_valid())
self.assertEqual(form.errors['tos'], [u"You must agree to the terms to register"])
form = forms.RegistrationFormTermsOfService(data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo',
'tos': 'on' })
self.failUnless(form.is_valid())
def test_registration_form_unique_email(self):
"""
Test that ``RegistrationFormUniqueEmail`` validates uniqueness
of email addresses.
"""
form = forms.RegistrationFormUniqueEmail(data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.failIf(form.is_valid())
self.assertEqual(form.errors['email'], [u"This email address is already in use. Please supply a different email address."])
form = forms.RegistrationFormUniqueEmail(data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.failUnless(form.is_valid())
def test_registration_form_no_free_email(self):
"""
Test that ``RegistrationFormNoFreeEmail`` disallows
registration with free email addresses.
"""
base_data = { 'username': 'foo',
'password1': 'foo',
'password2': 'foo' }
for domain in ('aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com'):
invalid_data = base_data.copy()
invalid_data['email'] = u"foo@%s" % domain
form = forms.RegistrationFormNoFreeEmail(data=invalid_data)
self.failIf(form.is_valid())
self.assertEqual(form.errors['email'], [u"Registration using free email addresses is prohibited. Please supply a different email address."])
base_data['email'] = '[email protected]'
form = forms.RegistrationFormNoFreeEmail(data=base_data)
self.failUnless(form.is_valid())
class RegistrationViewTests(RegistrationTestCase):
"""
Tests for the views included in django-registration.
"""
def test_registration_view(self):
"""
Test that the registration view rejects invalid submissions,
and creates a new user and redirects after a valid submission.
"""
# Invalid data fails.
response = self.client.post(reverse('registration_register'),
data={ 'username': 'alice', # Will fail on username uniqueness.
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.assertEqual(response.status_code, 200)
self.failUnless(response.context['form'])
self.failUnless(response.context['form'].errors)
response = self.client.post(reverse('registration_register'),
data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], 'http://testserver%s' % reverse('registration_complete'))
self.assertEqual(RegistrationProfile.objects.count(), 3)
def test_activation_view(self):
"""
Test that the activation view activates the user from a valid
key and fails if the key is invalid or has expired.
"""
# Valid user puts the user account into the context.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': RegistrationProfile.objects.get(user=self.sample_user).activation_key }))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['account'].pk, self.sample_user.pk)
# Expired user sets the account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': RegistrationProfile.objects.get(user=self.expired_user).activation_key }))
self.failIf(response.context['account'])
# Invalid key gets to the view, but sets account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': 'foo' }))
self.failIf(response.context['account'])
# Nonexistent key sets the account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': sha.new('foo').hexdigest() }))
self.failIf(response.context['account'])
| {
"content_hash": "912964246cef1c98157704def892ab01",
"timestamp": "",
"source": "github",
"line_count": 319,
"max_line_length": 152,
"avg_line_length": 41.99686520376176,
"alnum_prop": 0.5617675599014704,
"repo_name": "jmptrader/dirigible-spreadsheet",
"id": "d3d083032c0d8b0d906993997c2b38dcabc4e6cf",
"size": "13397",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dirigible/registration/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21647"
},
{
"name": "HTML",
"bytes": "280170"
},
{
"name": "JavaScript",
"bytes": "772040"
},
{
"name": "Python",
"bytes": "1148287"
}
],
"symlink_target": ""
} |
from JumpScale import j
import ujson
class AUTH():
def load(self,osis):
pass
def authenticate(self,osis,method,user,passwd):
if j.core.osis.cmds._authenticateAdmin(user=user,passwd=passwd):
return True
if user=="node" and method in ["set","get"]:
if j.core.osis.nodeguids.has_key(passwd):
return True
return False
| {
"content_hash": "69ce00b4c2f13e5b73581e3765a240e0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 72,
"avg_line_length": 27,
"alnum_prop": 0.5925925925925926,
"repo_name": "Jumpscale/jumpscale6_core",
"id": "408914113a3fdeb8df2690d71e8dfc769d4ece98",
"size": "405",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/osis/logic/test_complextype/OSIS_auth.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "3681"
},
{
"name": "HTML",
"bytes": "11738"
},
{
"name": "JavaScript",
"bytes": "70132"
},
{
"name": "Lua",
"bytes": "2162"
},
{
"name": "Python",
"bytes": "5848017"
},
{
"name": "Shell",
"bytes": "7692"
}
],
"symlink_target": ""
} |
from ..interface import GreatFETInterface
class LED(GreatFETInterface):
""" Simple periheral that allows control of an LED through the GreatFET HAL."""
def __init__(self, board, led_number):
"""Create a new object representing a GreatFET LED.
board -- The GreatFET board object that owns the given LED.
led_number -- The one-indexed LED number. On GreatFET boards, this
matches the number printed on the silkscreen.
"""
# Store a reference to the parent board.
self.board = board
# Store which of the four(?) LEDs we refer to.
# TODO: Validate this?
self.led_number = led_number
# Function that toggles the relevant LED value. """
def toggle(self):
self.board.apis.leds.toggle(self.led_number)
# Function that turns on the relevant LED value. """
def on(self):
self.board.apis.leds.on(self.led_number)
# Function that turns off the relevant LED value. """
def off(self):
self.board.apis.leds.off(self.led_number)
| {
"content_hash": "fef4c2143ac226dd77ee9950552cbbcd",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 83,
"avg_line_length": 32.333333333333336,
"alnum_prop": 0.6410496719775071,
"repo_name": "dominicgs/GreatFET-experimental",
"id": "a4448e5fbe8556d874400a8a2b0c5fc347e49fd8",
"size": "1104",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "host/greatfet/interfaces/led.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "223018"
},
{
"name": "C++",
"bytes": "969"
},
{
"name": "CMake",
"bytes": "10105"
},
{
"name": "Python",
"bytes": "82219"
}
],
"symlink_target": ""
} |
__author__ = 'frank'
from flask.ext.wtf import Form
from wtforms import StringField, SubmitField, TextField, PasswordField, SelectField, TextAreaField, BooleanField, ValidationError
from wtforms.validators import DataRequired, Length, EqualTo, Email
from models import Work, User
class RegisterForm(Form):
email = StringField(
'邮箱*', description='未注册过的邮箱', validators=[DataRequired('邮箱不能为空'), Length(1, 64), Email('邮箱格式不正确')])
username = TextField(
'昵称*', description='未被使用过的昵称', validators=[DataRequired('昵称不能为空'), Length(1, 64)])
password = PasswordField('密码*', validators=[
DataRequired('密码不能为空'),
EqualTo('confirm', message='密码不一致,请重新输入密码')]
)
confirm = PasswordField(
'确认*', description='重复输入密码确认', validators=[DataRequired('密码不能为空')])
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('邮箱已经被注册过,请更换邮箱')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('用户名已经被使用,请更换用户名')
class SigninForm(Form):
email = StringField(
'邮箱*', description='使用已注册过的邮箱', validators=[DataRequired('邮箱不能为空'), Length(1, 64), Email('邮箱格式不正确')])
password = PasswordField('密码*', validators=[DataRequired('密码不能为空')])
remember_me = BooleanField('记住我')
submit = SubmitField('登陆')
class WorkForm(Form):
cate_id = SelectField(
'类别', default=1, validators=[DataRequired('作品类别不能为空')], coerce=int)
title1 = TextField('作品1', description='准确的作品名称')
title2 = TextField('作品2', description='准确的作品名称')
title3 = TextField('作品3', description='准确的作品名称')
title4 = TextField('作品4', description='准确的作品名称')
title5 = TextField('作品5', description='准确的作品名称')
recomm_reason1 = TextField('推荐词', description='选填')
recomm_reason2 = TextField('推荐词', description='选填')
recomm_reason3 = TextField('推荐词', description='选填')
recomm_reason4 = TextField('推荐词', description='选填')
recomm_reason5 = TextField('推荐词', description='选填')
class CommentForm(Form):
content = TextAreaField(
'内容', description='知道啥说啥,支持吐槽,鼠标移到右下角点击移动拖大此框,支持markdown', validators=[DataRequired('评论内容不能为空')])
class SingleRecommendationForm(Form):
name = TextField('作品名称')
remarks = TextField('审核备注')
recomm_reason = TextField('推荐原因')
cate_id = SelectField('作品类别', coerce=int)
status_id = SelectField('推荐状态', coerce=int)
user_id = SelectField('推荐人', coerce=int)
class SignupForm(Form):
"""Form for send email"""
email = TextField('邮箱', [DataRequired(message="邮箱不能为空"), Email(message="无效的邮箱")],
description='你常用的邮箱')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('邮箱已经被注册过,请更换邮箱')
| {
"content_hash": "cc24cc553f3c4cdc21cf9816213dc368",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 129,
"avg_line_length": 34.602409638554214,
"alnum_prop": 0.6702646239554317,
"repo_name": "Fansion/sharefun",
"id": "7123a5825ad27d3409d60fc33965a464a03c4340",
"size": "3523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sharefun/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7794"
},
{
"name": "JavaScript",
"bytes": "10163"
},
{
"name": "Python",
"bytes": "109217"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import logging
from ..conf import settings
logger = logging.getLogger(__name__)
class TelegramBot:
def __init__(self, chat_id=None, split_on=None):
from telegram.bot import Bot
telegram_creds = settings().creds['telegram']
token = telegram_creds['token']
if chat_id is not None:
self._chat_id = chat_id
else:
self._chat_id = telegram_creds.get('chat')
self.split_on = split_on
self.bot = Bot(token=token)
@property
def chat_id(self):
if self._chat_id is None:
chat = self.bot.getUpdates(limit=1)[0].message.chat
logger.debug("Imprinted chat id %d of type %s",
chat.id, chat.type)
self._chat_id = chat.id
return self._chat_id
def post(self, report, **kwargs):
if self.split_on:
report = report.split(self.split_on)
else:
report = [report]
for r in report:
# Telegram max message length is 4096 chars
messages = [r[i:i + 4096] for i in range(0, len(r), 4096)]
for m in messages:
self.send_message(m)
def send_message(self, message):
message = self.bot.send_message(
self.chat_id,
message,
parse_mode='Markdown',
)
return message
__call__ = post
def notify_factory(conf, value):
try:
chat_id = value['chat']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = None
return TelegramBot(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBot()
print(bot.chat_id)
| {
"content_hash": "7edddc57937acd78597f03ab568b4596",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 70,
"avg_line_length": 26.08695652173913,
"alnum_prop": 0.5538888888888889,
"repo_name": "kibitzr/kibitzr",
"id": "49f8edb0a10ebe9cde21346017ad66134b0fbb5f",
"size": "1800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kibitzr/notifier/telegram.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1024"
},
{
"name": "HTML",
"bytes": "1183"
},
{
"name": "Makefile",
"bytes": "2228"
},
{
"name": "Python",
"bytes": "125386"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import json
import logging
from pipes import quote as pquote
import requests
LOG = logging.getLogger(__name__)
def add_ssl_verify_to_kwargs(func):
def decorate(*args, **kwargs):
if isinstance(args[0], HTTPClient) and 'https' in getattr(args[0], 'root', ''):
cacert = getattr(args[0], 'cacert', None)
kwargs['verify'] = cacert if cacert is not None else False
return func(*args, **kwargs)
return decorate
def add_auth_token_to_headers(func):
def decorate(*args, **kwargs):
headers = kwargs.get('headers', dict())
token = kwargs.pop('token', None)
if token:
headers['X-Auth-Token'] = str(token)
kwargs['headers'] = headers
api_key = kwargs.pop('api_key', None)
if api_key:
headers['St2-Api-Key'] = str(api_key)
kwargs['headers'] = headers
return func(*args, **kwargs)
return decorate
def add_json_content_type_to_headers(func):
def decorate(*args, **kwargs):
headers = kwargs.get('headers', dict())
content_type = headers.get('content-type', 'application/json')
headers['content-type'] = content_type
kwargs['headers'] = headers
return func(*args, **kwargs)
return decorate
class HTTPClient(object):
def __init__(self, root, cacert=None, debug=False):
self.root = self._get_url_without_trailing_slash(root)
self.cacert = cacert
self.debug = debug
@add_ssl_verify_to_kwargs
@add_auth_token_to_headers
def get(self, url, **kwargs):
response = requests.get(self.root + url, **kwargs)
response = self._response_hook(response=response)
return response
@add_ssl_verify_to_kwargs
@add_auth_token_to_headers
@add_json_content_type_to_headers
def post(self, url, data, **kwargs):
response = requests.post(self.root + url, json.dumps(data), **kwargs)
response = self._response_hook(response=response)
return response
@add_ssl_verify_to_kwargs
@add_auth_token_to_headers
def post_raw(self, url, data, **kwargs):
response = requests.post(self.root + url, data, **kwargs)
response = self._response_hook(response=response)
return response
@add_ssl_verify_to_kwargs
@add_auth_token_to_headers
@add_json_content_type_to_headers
def put(self, url, data, **kwargs):
response = requests.put(self.root + url, json.dumps(data), **kwargs)
response = self._response_hook(response=response)
return response
@add_ssl_verify_to_kwargs
@add_auth_token_to_headers
@add_json_content_type_to_headers
def patch(self, url, data, **kwargs):
response = requests.patch(self.root + url, data, **kwargs)
response = self._response_hook(response=response)
return response
@add_ssl_verify_to_kwargs
@add_auth_token_to_headers
def delete(self, url, **kwargs):
response = requests.delete(self.root + url, **kwargs)
response = self._response_hook(response=response)
return response
def _response_hook(self, response):
if self.debug:
# Log cURL request line
curl_line = self._get_curl_line_for_request(request=response.request)
print("# -------- begin %d request ----------" % id(self))
print(curl_line)
print("# -------- begin %d response ----------" % (id(self)))
print(response.text)
print("# -------- end %d response ------------" % (id(self)))
print('')
return response
def _get_curl_line_for_request(self, request):
parts = ['curl']
# method
method = request.method.upper()
if method in ['HEAD']:
parts.extend(['--head'])
else:
parts.extend(['-X', pquote(method)])
# headers
for key, value in request.headers.items():
parts.extend(['-H ', pquote('%s: %s' % (key, value))])
# body
if request.body:
parts.extend(['--data-binary', pquote(request.body)])
# URL
parts.extend([pquote(request.url)])
curl_line = ' '.join(parts)
return curl_line
def _get_url_without_trailing_slash(self, value):
"""
Function which strips a trailing slash from the provided url if one is present.
:param value: URL to format.
:type value: ``str``
:rtype: ``str``
"""
result = value[:-1] if value.endswith('/') else value
return result
| {
"content_hash": "61bfb62411b07b77d1b39aa99a717f48",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 87,
"avg_line_length": 31.12751677852349,
"alnum_prop": 0.5851660198361363,
"repo_name": "pixelrebel/st2",
"id": "877ed173c4a8193315129f36a29dc7cad3a1ef19",
"size": "5418",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "st2client/st2client/utils/httpclient.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "41838"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "3734646"
},
{
"name": "Shell",
"bytes": "40304"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
} |
from requests.auth import HTTPBasicAuth
def apply_updates(doc, update_dict):
# updates the doc with items from the dict
# returns whether or not any updates were made
should_save = False
for key, value in update_dict.items():
if getattr(doc, key, None) != value:
setattr(doc, key, value)
should_save = True
return should_save
class EndpointMixin(object):
@classmethod
def from_config(cls, config):
return cls(config.url, config.username, config.password)
def _auth(self):
return HTTPBasicAuth(self.username, self.password)
def _urlcombine(self, base, target):
return '{base}{target}'.format(base=base, target=target)
| {
"content_hash": "b4198ba3c2e7c535db474507cd15ee68",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 64,
"avg_line_length": 28.64,
"alnum_prop": 0.6620111731843575,
"repo_name": "qedsoftware/commcare-hq",
"id": "96e734dc92c9bfe30c09882b5278257f306984ef",
"size": "716",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "custom/api/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
} |
import datetime
import functools
import hashlib
import importlib
import os
import os.path
import socket
import struct
import tempfile
import eventlet
import mock
import netaddr
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_context import context as common_context
from oslo_context import fixture as context_fixture
from oslo_utils import encodeutils
from oslo_utils import timeutils
from oslo_utils import units
import six
import nova
from nova import context
from nova import exception
from nova import test
from nova import utils
CONF = cfg.CONF
class GenericUtilsTestCase(test.NoDBTestCase):
def test_parse_server_string(self):
result = utils.parse_server_string('::1')
self.assertEqual(('::1', ''), result)
result = utils.parse_server_string('[::1]:8773')
self.assertEqual(('::1', '8773'), result)
result = utils.parse_server_string('2001:db8::192.168.1.1')
self.assertEqual(('2001:db8::192.168.1.1', ''), result)
result = utils.parse_server_string('[2001:db8::192.168.1.1]:8773')
self.assertEqual(('2001:db8::192.168.1.1', '8773'), result)
result = utils.parse_server_string('192.168.1.1')
self.assertEqual(('192.168.1.1', ''), result)
result = utils.parse_server_string('192.168.1.2:8773')
self.assertEqual(('192.168.1.2', '8773'), result)
result = utils.parse_server_string('192.168.1.3')
self.assertEqual(('192.168.1.3', ''), result)
result = utils.parse_server_string('www.example.com:8443')
self.assertEqual(('www.example.com', '8443'), result)
result = utils.parse_server_string('www.example.com')
self.assertEqual(('www.example.com', ''), result)
# error case
result = utils.parse_server_string('www.exa:mple.com:8443')
self.assertEqual(('', ''), result)
result = utils.parse_server_string('')
self.assertEqual(('', ''), result)
def test_hostname_unicode_sanitization(self):
hostname = u"\u7684.test.example.com"
self.assertEqual("test.example.com",
utils.sanitize_hostname(hostname))
def test_hostname_sanitize_periods(self):
hostname = "....test.example.com..."
self.assertEqual("test.example.com",
utils.sanitize_hostname(hostname))
def test_hostname_sanitize_dashes(self):
hostname = "----test.example.com---"
self.assertEqual("test.example.com",
utils.sanitize_hostname(hostname))
def test_hostname_sanitize_characters(self):
hostname = "(#@&$!(@*--#&91)(__=+--test-host.example!!.com-0+"
self.assertEqual("91----test-host.example.com-0",
utils.sanitize_hostname(hostname))
def test_hostname_translate(self):
hostname = "<}\x1fh\x10e\x08l\x02l\x05o\x12!{>"
self.assertEqual("hello", utils.sanitize_hostname(hostname))
def test_generate_password(self):
password = utils.generate_password()
self.assertTrue([c for c in password if c in '0123456789'])
self.assertTrue([c for c in password
if c in 'abcdefghijklmnopqrstuvwxyz'])
self.assertTrue([c for c in password
if c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'])
def test_read_file_as_root(self):
def fake_execute(*args, **kwargs):
if args[1] == 'bad':
raise processutils.ProcessExecutionError()
return 'fakecontents', None
self.stubs.Set(utils, 'execute', fake_execute)
contents = utils.read_file_as_root('good')
self.assertEqual(contents, 'fakecontents')
self.assertRaises(exception.FileNotFound,
utils.read_file_as_root, 'bad')
def test_temporary_chown(self):
def fake_execute(*args, **kwargs):
if args[0] == 'chown':
fake_execute.uid = args[1]
self.stubs.Set(utils, 'execute', fake_execute)
with tempfile.NamedTemporaryFile() as f:
with utils.temporary_chown(f.name, owner_uid=2):
self.assertEqual(fake_execute.uid, 2)
self.assertEqual(fake_execute.uid, os.getuid())
def test_xhtml_escape(self):
self.assertEqual('"foo"', utils.xhtml_escape('"foo"'))
self.assertEqual(''foo'', utils.xhtml_escape("'foo'"))
self.assertEqual('&', utils.xhtml_escape('&'))
self.assertEqual('>', utils.xhtml_escape('>'))
self.assertEqual('<', utils.xhtml_escape('<'))
self.assertEqual('<foo>', utils.xhtml_escape('<foo>'))
def test_is_valid_ipv6_cidr(self):
self.assertTrue(utils.is_valid_ipv6_cidr("2600::/64"))
self.assertTrue(utils.is_valid_ipv6_cidr(
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"))
self.assertTrue(utils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001/32"))
self.assertTrue(utils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001"))
self.assertFalse(utils.is_valid_ipv6_cidr("foo"))
self.assertFalse(utils.is_valid_ipv6_cidr("127.0.0.1"))
def test_get_shortened_ipv6(self):
self.assertEqual("abcd:ef01:2345:6789:abcd:ef01:c0a8:fefe",
utils.get_shortened_ipv6(
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254"))
self.assertEqual("::1", utils.get_shortened_ipv6(
"0000:0000:0000:0000:0000:0000:0000:0001"))
self.assertEqual("caca::caca:0:babe:201:102",
utils.get_shortened_ipv6(
"caca:0000:0000:caca:0000:babe:0201:0102"))
self.assertRaises(netaddr.AddrFormatError, utils.get_shortened_ipv6,
"127.0.0.1")
self.assertRaises(netaddr.AddrFormatError, utils.get_shortened_ipv6,
"failure")
def test_get_shortened_ipv6_cidr(self):
self.assertEqual("2600::/64", utils.get_shortened_ipv6_cidr(
"2600:0000:0000:0000:0000:0000:0000:0000/64"))
self.assertEqual("2600::/64", utils.get_shortened_ipv6_cidr(
"2600::1/64"))
self.assertRaises(netaddr.AddrFormatError,
utils.get_shortened_ipv6_cidr,
"127.0.0.1")
self.assertRaises(netaddr.AddrFormatError,
utils.get_shortened_ipv6_cidr,
"failure")
def test_safe_ip_format(self):
self.assertEqual("[::1]", utils.safe_ip_format("::1"))
self.assertEqual("127.0.0.1", utils.safe_ip_format("127.0.0.1"))
self.assertEqual("[::ffff:127.0.0.1]", utils.safe_ip_format(
"::ffff:127.0.0.1"))
self.assertEqual("localhost", utils.safe_ip_format("localhost"))
def test_get_hash_str(self):
base_str = b"foo"
base_unicode = u"foo"
value = hashlib.md5(base_str).hexdigest()
self.assertEqual(
value, utils.get_hash_str(base_str))
self.assertEqual(
value, utils.get_hash_str(base_unicode))
def test_use_rootwrap(self):
self.flags(disable_rootwrap=False, group='workarounds')
self.flags(rootwrap_config='foo')
cmd = utils._get_root_helper()
self.assertEqual('sudo nova-rootwrap foo', cmd)
def test_use_sudo(self):
self.flags(disable_rootwrap=True, group='workarounds')
cmd = utils._get_root_helper()
self.assertEqual('sudo', cmd)
def test_ssh_execute(self):
expected_args = ('ssh', '-o', 'BatchMode=yes',
'remotehost', 'ls', '-l')
with mock.patch('nova.utils.execute') as mock_method:
utils.ssh_execute('remotehost', 'ls', '-l')
mock_method.assert_called_once_with(*expected_args)
class TestCachedFile(test.NoDBTestCase):
@mock.patch('os.path.getmtime', return_value=1)
def test_read_cached_file(self, getmtime):
utils._FILE_CACHE = {
'/this/is/a/fake': {"data": 1123, "mtime": 1}
}
fresh, data = utils.read_cached_file("/this/is/a/fake")
fdata = utils._FILE_CACHE['/this/is/a/fake']["data"]
self.assertEqual(fdata, data)
@mock.patch('os.path.getmtime', return_value=2)
def test_read_modified_cached_file(self, getmtime):
utils._FILE_CACHE = {
'/this/is/a/fake': {"data": 1123, "mtime": 1}
}
fake_contents = "lorem ipsum"
with mock.patch('six.moves.builtins.open',
mock.mock_open(read_data=fake_contents)):
fresh, data = utils.read_cached_file("/this/is/a/fake")
self.assertEqual(data, fake_contents)
self.assertTrue(fresh)
def test_delete_cached_file(self):
filename = '/this/is/a/fake/deletion/of/cached/file'
utils._FILE_CACHE = {
filename: {"data": 1123, "mtime": 1}
}
self.assertIn(filename, utils._FILE_CACHE)
utils.delete_cached_file(filename)
self.assertNotIn(filename, utils._FILE_CACHE)
def test_delete_cached_file_not_exist(self):
# We expect that if cached file does not exist no Exception raised.
filename = '/this/is/a/fake/deletion/attempt/of/not/cached/file'
self.assertNotIn(filename, utils._FILE_CACHE)
utils.delete_cached_file(filename)
self.assertNotIn(filename, utils._FILE_CACHE)
class VPNPingTestCase(test.NoDBTestCase):
"""Unit tests for utils.vpn_ping()."""
def setUp(self):
super(VPNPingTestCase, self).setUp()
self.port = 'fake'
self.address = 'fake'
self.session_id = 0x1234
self.fmt = '!BQxxxxxQxxxx'
def fake_reply_packet(self, pkt_id=0x40):
return struct.pack(self.fmt, pkt_id, 0x0, self.session_id)
def setup_socket(self, mock_socket, return_value, side_effect=None):
socket_obj = mock.MagicMock()
if side_effect is not None:
socket_obj.recv.side_effect = side_effect
else:
socket_obj.recv.return_value = return_value
mock_socket.return_value = socket_obj
@mock.patch.object(socket, 'socket')
def test_vpn_ping_timeout(self, mock_socket):
"""Server doesn't reply within timeout."""
self.setup_socket(mock_socket, None, socket.timeout)
rc = utils.vpn_ping(self.address, self.port,
session_id=self.session_id)
self.assertFalse(rc)
@mock.patch.object(socket, 'socket')
def test_vpn_ping_bad_len(self, mock_socket):
"""Test a short/invalid server reply."""
self.setup_socket(mock_socket, 'fake_reply')
rc = utils.vpn_ping(self.address, self.port,
session_id=self.session_id)
self.assertFalse(rc)
@mock.patch.object(socket, 'socket')
def test_vpn_ping_bad_id(self, mock_socket):
"""Server sends an unknown packet ID."""
self.setup_socket(mock_socket, self.fake_reply_packet(pkt_id=0x41))
rc = utils.vpn_ping(self.address, self.port,
session_id=self.session_id)
self.assertFalse(rc)
@mock.patch.object(socket, 'socket')
def test_vpn_ping_ok(self, mock_socket):
self.setup_socket(mock_socket, self.fake_reply_packet())
rc = utils.vpn_ping(self.address, self.port,
session_id=self.session_id)
self.assertTrue(rc)
class MonkeyPatchTestCase(test.NoDBTestCase):
"""Unit test for utils.monkey_patch()."""
def setUp(self):
super(MonkeyPatchTestCase, self).setUp()
self.example_package = 'nova.tests.unit.monkey_patch_example.'
self.flags(
monkey_patch=True,
monkey_patch_modules=[self.example_package + 'example_a' + ':'
+ self.example_package + 'example_decorator'])
def test_monkey_patch(self):
utils.monkey_patch()
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION = []
from nova.tests.unit.monkey_patch_example import example_a
from nova.tests.unit.monkey_patch_example import example_b
self.assertEqual('Example function', example_a.example_function_a())
exampleA = example_a.ExampleClassA()
exampleA.example_method()
ret_a = exampleA.example_method_add(3, 5)
self.assertEqual(ret_a, 8)
self.assertEqual('Example function', example_b.example_function_b())
exampleB = example_b.ExampleClassB()
exampleB.example_method()
ret_b = exampleB.example_method_add(3, 5)
self.assertEqual(ret_b, 8)
package_a = self.example_package + 'example_a.'
self.assertIn(package_a + 'example_function_a',
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION)
self.assertIn(package_a + 'ExampleClassA.example_method',
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION)
self.assertIn(package_a + 'ExampleClassA.example_method_add',
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION)
package_b = self.example_package + 'example_b.'
self.assertNotIn(package_b + 'example_function_b',
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION)
self.assertNotIn(package_b + 'ExampleClassB.example_method',
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION)
self.assertNotIn(package_b + 'ExampleClassB.example_method_add',
nova.tests.unit.monkey_patch_example.CALLED_FUNCTION)
class MonkeyPatchDefaultTestCase(test.NoDBTestCase):
"""Unit test for default monkey_patch_modules value."""
def setUp(self):
super(MonkeyPatchDefaultTestCase, self).setUp()
self.flags(
monkey_patch=True)
def test_monkey_patch_default_mod(self):
# monkey_patch_modules is defined to be
# <module_to_patch>:<decorator_to_patch_with>
# Here we check that both parts of the default values are
# valid
for module in CONF.monkey_patch_modules:
m = module.split(':', 1)
# Check we can import the module to be patched
importlib.import_module(m[0])
# check the decorator is valid
decorator_name = m[1].rsplit('.', 1)
decorator_module = importlib.import_module(decorator_name[0])
getattr(decorator_module, decorator_name[1])
class AuditPeriodTest(test.NoDBTestCase):
def setUp(self):
super(AuditPeriodTest, self).setUp()
# a fairly random time to test with
self.test_time = datetime.datetime(second=23,
minute=12,
hour=8,
day=5,
month=3,
year=2012)
timeutils.set_time_override(override_time=self.test_time)
def tearDown(self):
timeutils.clear_time_override()
super(AuditPeriodTest, self).tearDown()
def test_hour(self):
begin, end = utils.last_completed_audit_period(unit='hour')
self.assertEqual(begin, datetime.datetime(
hour=7,
day=5,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(
hour=8,
day=5,
month=3,
year=2012))
def test_hour_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='hour@10')
self.assertEqual(begin, datetime.datetime(
minute=10,
hour=7,
day=5,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(
minute=10,
hour=8,
day=5,
month=3,
year=2012))
def test_hour_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='hour@30')
self.assertEqual(begin, datetime.datetime(
minute=30,
hour=6,
day=5,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(
minute=30,
hour=7,
day=5,
month=3,
year=2012))
def test_day(self):
begin, end = utils.last_completed_audit_period(unit='day')
self.assertEqual(begin, datetime.datetime(
day=4,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(
day=5,
month=3,
year=2012))
def test_day_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='day@6')
self.assertEqual(begin, datetime.datetime(
hour=6,
day=4,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(
hour=6,
day=5,
month=3,
year=2012))
def test_day_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='day@10')
self.assertEqual(begin, datetime.datetime(
hour=10,
day=3,
month=3,
year=2012))
self.assertEqual(end, datetime.datetime(
hour=10,
day=4,
month=3,
year=2012))
def test_month(self):
begin, end = utils.last_completed_audit_period(unit='month')
self.assertEqual(begin, datetime.datetime(
day=1,
month=2,
year=2012))
self.assertEqual(end, datetime.datetime(
day=1,
month=3,
year=2012))
def test_month_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='month@2')
self.assertEqual(begin, datetime.datetime(
day=2,
month=2,
year=2012))
self.assertEqual(end, datetime.datetime(
day=2,
month=3,
year=2012))
def test_month_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='month@15')
self.assertEqual(begin, datetime.datetime(
day=15,
month=1,
year=2012))
self.assertEqual(end, datetime.datetime(
day=15,
month=2,
year=2012))
def test_year(self):
begin, end = utils.last_completed_audit_period(unit='year')
self.assertEqual(begin, datetime.datetime(
day=1,
month=1,
year=2011))
self.assertEqual(end, datetime.datetime(
day=1,
month=1,
year=2012))
def test_year_with_offset_before_current(self):
begin, end = utils.last_completed_audit_period(unit='year@2')
self.assertEqual(begin, datetime.datetime(
day=1,
month=2,
year=2011))
self.assertEqual(end, datetime.datetime(
day=1,
month=2,
year=2012))
def test_year_with_offset_after_current(self):
begin, end = utils.last_completed_audit_period(unit='year@6')
self.assertEqual(begin, datetime.datetime(
day=1,
month=6,
year=2010))
self.assertEqual(end, datetime.datetime(
day=1,
month=6,
year=2011))
class MkfsTestCase(test.NoDBTestCase):
def test_mkfs(self):
self.mox.StubOutWithMock(utils, 'execute')
utils.execute('mkfs', '-t', 'ext4', '-F', '/my/block/dev',
run_as_root=False)
utils.execute('mkfs', '-t', 'msdos', '/my/msdos/block/dev',
run_as_root=False)
utils.execute('mkswap', '/my/swap/block/dev',
run_as_root=False)
self.mox.ReplayAll()
utils.mkfs('ext4', '/my/block/dev')
utils.mkfs('msdos', '/my/msdos/block/dev')
utils.mkfs('swap', '/my/swap/block/dev')
def test_mkfs_with_label(self):
self.mox.StubOutWithMock(utils, 'execute')
utils.execute('mkfs', '-t', 'ext4', '-F',
'-L', 'ext4-vol', '/my/block/dev', run_as_root=False)
utils.execute('mkfs', '-t', 'msdos',
'-n', 'msdos-vol', '/my/msdos/block/dev',
run_as_root=False)
utils.execute('mkswap', '-L', 'swap-vol', '/my/swap/block/dev',
run_as_root=False)
self.mox.ReplayAll()
utils.mkfs('ext4', '/my/block/dev', 'ext4-vol')
utils.mkfs('msdos', '/my/msdos/block/dev', 'msdos-vol')
utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol')
class LastBytesTestCase(test.NoDBTestCase):
"""Test the last_bytes() utility method."""
def setUp(self):
super(LastBytesTestCase, self).setUp()
self.f = six.BytesIO(b'1234567890')
def test_truncated(self):
self.f.seek(0, os.SEEK_SET)
out, remaining = utils.last_bytes(self.f, 5)
self.assertEqual(out, b'67890')
self.assertTrue(remaining > 0)
def test_read_all(self):
self.f.seek(0, os.SEEK_SET)
out, remaining = utils.last_bytes(self.f, 1000)
self.assertEqual(out, b'1234567890')
self.assertFalse(remaining > 0)
def test_seek_too_far_real_file(self):
# StringIO doesn't raise IOError if you see past the start of the file.
with tempfile.TemporaryFile() as flo:
content = b'1234567890'
flo.write(content)
self.assertEqual((content, 0), utils.last_bytes(flo, 1000))
class MetadataToDictTestCase(test.NoDBTestCase):
def test_metadata_to_dict(self):
self.assertEqual(utils.metadata_to_dict(
[{'key': 'foo1', 'value': 'bar'},
{'key': 'foo2', 'value': 'baz'}]),
{'foo1': 'bar', 'foo2': 'baz'})
def test_metadata_to_dict_empty(self):
self.assertEqual(utils.metadata_to_dict([]), {})
def test_dict_to_metadata(self):
def sort_key(adict):
return sorted(adict.items())
metadata = utils.dict_to_metadata(dict(foo1='bar1', foo2='bar2'))
expected = [{'key': 'foo1', 'value': 'bar1'},
{'key': 'foo2', 'value': 'bar2'}]
self.assertEqual(sorted(metadata, key=sort_key),
sorted(expected, key=sort_key))
def test_dict_to_metadata_empty(self):
self.assertEqual(utils.dict_to_metadata({}), [])
class WrappedCodeTestCase(test.NoDBTestCase):
"""Test the get_wrapped_function utility method."""
def _wrapper(self, function):
@functools.wraps(function)
def decorated_function(self, *args, **kwargs):
function(self, *args, **kwargs)
return decorated_function
def test_single_wrapped(self):
@self._wrapper
def wrapped(self, instance, red=None, blue=None):
pass
func = utils.get_wrapped_function(wrapped)
func_code = func.__code__
self.assertEqual(4, len(func_code.co_varnames))
self.assertIn('self', func_code.co_varnames)
self.assertIn('instance', func_code.co_varnames)
self.assertIn('red', func_code.co_varnames)
self.assertIn('blue', func_code.co_varnames)
def test_double_wrapped(self):
@self._wrapper
@self._wrapper
def wrapped(self, instance, red=None, blue=None):
pass
func = utils.get_wrapped_function(wrapped)
func_code = func.__code__
self.assertEqual(4, len(func_code.co_varnames))
self.assertIn('self', func_code.co_varnames)
self.assertIn('instance', func_code.co_varnames)
self.assertIn('red', func_code.co_varnames)
self.assertIn('blue', func_code.co_varnames)
def test_triple_wrapped(self):
@self._wrapper
@self._wrapper
@self._wrapper
def wrapped(self, instance, red=None, blue=None):
pass
func = utils.get_wrapped_function(wrapped)
func_code = func.__code__
self.assertEqual(4, len(func_code.co_varnames))
self.assertIn('self', func_code.co_varnames)
self.assertIn('instance', func_code.co_varnames)
self.assertIn('red', func_code.co_varnames)
self.assertIn('blue', func_code.co_varnames)
class ExpectedArgsTestCase(test.NoDBTestCase):
def test_passes(self):
@utils.expects_func_args('foo', 'baz')
def dec(f):
return f
@dec
def func(foo, bar, baz="lol"):
pass
def test_raises(self):
@utils.expects_func_args('foo', 'baz')
def dec(f):
return f
def func(bar, baz):
pass
self.assertRaises(TypeError, dec, func)
def test_var_no_of_args(self):
@utils.expects_func_args('foo')
def dec(f):
return f
@dec
def func(bar, *args, **kwargs):
pass
def test_more_layers(self):
@utils.expects_func_args('foo', 'baz')
def dec(f):
return f
def dec_2(f):
def inner_f(*a, **k):
return f()
return inner_f
@dec_2
def func(bar, baz):
pass
self.assertRaises(TypeError, dec, func)
class StringLengthTestCase(test.NoDBTestCase):
def test_check_string_length(self):
self.assertIsNone(utils.check_string_length(
'test', 'name', max_length=255))
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
11, 'name', max_length=255)
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
'', 'name', min_length=1)
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
'a' * 256, 'name', max_length=255)
def test_check_string_length_noname(self):
self.assertIsNone(utils.check_string_length(
'test', max_length=255))
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
11, max_length=255)
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
'', min_length=1)
self.assertRaises(exception.InvalidInput,
utils.check_string_length,
'a' * 256, max_length=255)
class ValidateIntegerTestCase(test.NoDBTestCase):
def test_valid_inputs(self):
self.assertEqual(
utils.validate_integer(42, "answer"), 42)
self.assertEqual(
utils.validate_integer("42", "answer"), 42)
self.assertEqual(
utils.validate_integer(
"7", "lucky", min_value=7, max_value=8), 7)
self.assertEqual(
utils.validate_integer(
7, "lucky", min_value=6, max_value=7), 7)
self.assertEqual(
utils.validate_integer(
300, "Spartaaa!!!", min_value=300), 300)
self.assertEqual(
utils.validate_integer(
"300", "Spartaaa!!!", max_value=300), 300)
def test_invalid_inputs(self):
self.assertRaises(exception.InvalidInput,
utils.validate_integer,
"im-not-an-int", "not-an-int")
self.assertRaises(exception.InvalidInput,
utils.validate_integer,
3.14, "Pie")
self.assertRaises(exception.InvalidInput,
utils.validate_integer,
"299", "Sparta no-show",
min_value=300, max_value=300)
self.assertRaises(exception.InvalidInput,
utils.validate_integer,
55, "doing 55 in a 54",
max_value=54)
self.assertRaises(exception.InvalidInput,
utils.validate_integer,
six.unichr(129), "UnicodeError",
max_value=1000)
class ValidateNeutronConfiguration(test.NoDBTestCase):
def test_nova_network(self):
self.assertFalse(utils.is_neutron())
def test_neutron(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
self.assertTrue(utils.is_neutron())
def test_quantum(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
self.assertTrue(utils.is_neutron())
class AutoDiskConfigUtilTestCase(test.NoDBTestCase):
def test_is_auto_disk_config_disabled(self):
self.assertTrue(utils.is_auto_disk_config_disabled("Disabled "))
def test_is_auto_disk_config_disabled_none(self):
self.assertFalse(utils.is_auto_disk_config_disabled(None))
def test_is_auto_disk_config_disabled_false(self):
self.assertFalse(utils.is_auto_disk_config_disabled("false"))
class GetSystemMetadataFromImageTestCase(test.NoDBTestCase):
def get_image(self):
image_meta = {
"id": "fake-image",
"name": "fake-name",
"min_ram": 1,
"min_disk": 1,
"disk_format": "raw",
"container_format": "bare",
}
return image_meta
def get_flavor(self):
flavor = {
"id": "fake.flavor",
"root_gb": 10,
}
return flavor
def test_base_image_properties(self):
image = self.get_image()
# Verify that we inherit all the needed keys
sys_meta = utils.get_system_metadata_from_image(image)
for key in utils.SM_INHERITABLE_KEYS:
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
self.assertEqual(image[key], sys_meta.get(sys_key))
# Verify that everything else is ignored
self.assertEqual(len(sys_meta), len(utils.SM_INHERITABLE_KEYS))
def test_inherit_image_properties(self):
image = self.get_image()
image["properties"] = {"foo1": "bar", "foo2": "baz"}
sys_meta = utils.get_system_metadata_from_image(image)
# Verify that we inherit all the image properties
for key, expected in six.iteritems(image["properties"]):
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
self.assertEqual(sys_meta[sys_key], expected)
def test_skip_image_properties(self):
image = self.get_image()
image["properties"] = {
"foo1": "bar", "foo2": "baz",
"mappings": "wizz", "img_block_device_mapping": "eek",
}
sys_meta = utils.get_system_metadata_from_image(image)
# Verify that we inherit all the image properties
for key, expected in six.iteritems(image["properties"]):
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
if key in utils.SM_SKIP_KEYS:
self.assertNotIn(sys_key, sys_meta)
else:
self.assertEqual(sys_meta[sys_key], expected)
def test_vhd_min_disk_image(self):
image = self.get_image()
flavor = self.get_flavor()
image["disk_format"] = "vhd"
sys_meta = utils.get_system_metadata_from_image(image, flavor)
# Verify that the min_disk property is taken from
# flavor's root_gb when using vhd disk format
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, "min_disk")
self.assertEqual(sys_meta[sys_key], flavor["root_gb"])
def test_dont_inherit_empty_values(self):
image = self.get_image()
for key in utils.SM_INHERITABLE_KEYS:
image[key] = None
sys_meta = utils.get_system_metadata_from_image(image)
# Verify that the empty properties have not been inherited
for key in utils.SM_INHERITABLE_KEYS:
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
self.assertNotIn(sys_key, sys_meta)
class GetImageFromSystemMetadataTestCase(test.NoDBTestCase):
def get_system_metadata(self):
sys_meta = {
"image_min_ram": 1,
"image_min_disk": 1,
"image_disk_format": "raw",
"image_container_format": "bare",
}
return sys_meta
def test_image_from_system_metadata(self):
sys_meta = self.get_system_metadata()
sys_meta["%soo1" % utils.SM_IMAGE_PROP_PREFIX] = "bar"
sys_meta["%soo2" % utils.SM_IMAGE_PROP_PREFIX] = "baz"
sys_meta["%simg_block_device_mapping" %
utils.SM_IMAGE_PROP_PREFIX] = "eek"
image = utils.get_image_from_system_metadata(sys_meta)
# Verify that we inherit all the needed keys
for key in utils.SM_INHERITABLE_KEYS:
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
self.assertEqual(image[key], sys_meta.get(sys_key))
# Verify that we inherit the rest of metadata as properties
self.assertIn("properties", image)
for key, value in six.iteritems(image["properties"]):
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
self.assertEqual(image["properties"][key], sys_meta[sys_key])
self.assertNotIn("img_block_device_mapping", image["properties"])
def test_dont_inherit_empty_values(self):
sys_meta = self.get_system_metadata()
for key in utils.SM_INHERITABLE_KEYS:
sys_key = "%s%s" % (utils.SM_IMAGE_PROP_PREFIX, key)
sys_meta[sys_key] = None
image = utils.get_image_from_system_metadata(sys_meta)
# Verify that the empty properties have not been inherited
for key in utils.SM_INHERITABLE_KEYS:
self.assertNotIn(key, image)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {"fake_prop": "fake_value"}
volume = {"volume_image_metadata": properties}
image_meta = utils.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta["properties"])
def test_image_size(self):
volume = {"size": 10}
image_meta = utils.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta["size"])
def test_image_status(self):
volume = {}
image_meta = utils.get_image_metadata_from_volume(volume)
self.assertEqual("active", image_meta["status"])
def test_values_conversion(self):
properties = {"min_ram": "5", "min_disk": "7"}
volume = {"volume_image_metadata": properties}
image_meta = utils.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta["min_ram"])
self.assertEqual(7, image_meta["min_disk"])
class VersionTestCase(test.NoDBTestCase):
def test_convert_version_to_int(self):
self.assertEqual(utils.convert_version_to_int('6.2.0'), 6002000)
self.assertEqual(utils.convert_version_to_int((6, 4, 3)), 6004003)
self.assertEqual(utils.convert_version_to_int((5, )), 5)
self.assertRaises(exception.NovaException,
utils.convert_version_to_int, '5a.6b')
def test_convert_version_to_string(self):
self.assertEqual(utils.convert_version_to_str(6007000), '6.7.0')
self.assertEqual(utils.convert_version_to_str(4), '4')
def test_convert_version_to_tuple(self):
self.assertEqual(utils.convert_version_to_tuple('6.7.0'), (6, 7, 0))
class ConstantTimeCompareTestCase(test.NoDBTestCase):
def test_constant_time_compare(self):
self.assertTrue(utils.constant_time_compare("abcd1234", "abcd1234"))
self.assertFalse(utils.constant_time_compare("abcd1234", "a"))
self.assertFalse(utils.constant_time_compare("abcd1234", "ABCD234"))
class ResourceFilterTestCase(test.NoDBTestCase):
def _assert_filtering(self, res_list, filts, expected_tags):
actual_tags = utils.filter_and_format_resource_metadata('instance',
res_list, filts, 'metadata')
self.assertJsonEqual(expected_tags, actual_tags)
def test_filter_and_format_resource_metadata(self):
# Create some tags
# One overlapping pair, and one different key value pair
# i1 : foo=bar, bax=wibble
# i2 : foo=bar, baz=quux
# resources
i1 = {
'uuid': '1',
'metadata': {'foo': 'bar', 'bax': 'wibble'},
}
i2 = {
'uuid': '2',
'metadata': {'foo': 'bar', 'baz': 'quux'},
}
# Resources list
rl = [i1, i2]
# tags
i11 = {'instance_id': '1', 'key': 'foo', 'value': 'bar'}
i12 = {'instance_id': '1', 'key': 'bax', 'value': 'wibble'}
i21 = {'instance_id': '2', 'key': 'foo', 'value': 'bar'}
i22 = {'instance_id': '2', 'key': 'baz', 'value': 'quux'}
# No filter
self._assert_filtering(rl, [], [i11, i12, i21, i22])
self._assert_filtering(rl, {}, [i11, i12, i21, i22])
# Key search
# Both should have tags with key 'foo' and value 'bar'
self._assert_filtering(rl, {'key': 'foo', 'value': 'bar'}, [i11, i21])
# Both should have tags with key 'foo'
self._assert_filtering(rl, {'key': 'foo'}, [i11, i21])
# Only i2 should have tags with key 'baz' and value 'quux'
self._assert_filtering(rl, {'key': 'baz', 'value': 'quux'}, [i22])
# Only i2 should have tags with value 'quux'
self._assert_filtering(rl, {'value': 'quux'}, [i22])
# Empty list should be returned when no tags match
self._assert_filtering(rl, {'key': 'split', 'value': 'banana'}, [])
# Multiple values
# Only i2 should have tags with key 'baz' and values in the set
# ['quux', 'wibble']
self._assert_filtering(rl, {'key': 'baz', 'value': ['quux', 'wibble']},
[i22])
# But when specified as two different filters, no tags should be
# returned. This is because, the filter will mean "return tags which
# have (key=baz AND value=quux) AND (key=baz AND value=wibble)
self._assert_filtering(rl, [{'key': 'baz', 'value': 'quux'},
{'key': 'baz', 'value': 'wibble'}], [])
# Test for regex
self._assert_filtering(rl, {'value': '\\Aqu..*\\Z(?s)'}, [i22])
# Make sure bug #1365887 is fixed
i1['metadata']['key3'] = 'a'
self._assert_filtering(rl, {'value': 'banana'}, [])
class SafeTruncateTestCase(test.NoDBTestCase):
def test_exception_to_dict_with_long_message_3_bytes(self):
# Generate Chinese byte string whose length is 300. This Chinese UTF-8
# character occupies 3 bytes. After truncating, the byte string length
# should be 255.
msg = u'\u8d75' * 100
truncated_msg = utils.safe_truncate(msg, 255)
byte_message = encodeutils.safe_encode(truncated_msg)
self.assertEqual(255, len(byte_message))
def test_exception_to_dict_with_long_message_2_bytes(self):
# Generate Russian byte string whose length is 300. This Russian UTF-8
# character occupies 2 bytes. After truncating, the byte string length
# should be 254.
msg = encodeutils.safe_decode('\xd0\x92' * 150)
truncated_msg = utils.safe_truncate(msg, 255)
byte_message = encodeutils.safe_encode(truncated_msg)
self.assertEqual(254, len(byte_message))
class SpawnNTestCase(test.NoDBTestCase):
def setUp(self):
super(SpawnNTestCase, self).setUp()
self.useFixture(context_fixture.ClearRequestContext())
self.spawn_name = 'spawn_n'
def test_spawn_n_no_context(self):
self.assertIsNone(common_context.get_current())
def _fake_spawn(func, *args, **kwargs):
# call the method to ensure no error is raised
func(*args, **kwargs)
self.assertEqual('test', args[0])
def fake(arg):
pass
with mock.patch.object(eventlet, self.spawn_name, _fake_spawn):
getattr(utils, self.spawn_name)(fake, 'test')
self.assertIsNone(common_context.get_current())
def test_spawn_n_context(self):
self.assertIsNone(common_context.get_current())
ctxt = context.RequestContext('user', 'project')
def _fake_spawn(func, *args, **kwargs):
# call the method to ensure no error is raised
func(*args, **kwargs)
self.assertEqual(ctxt, args[0])
self.assertEqual('test', kwargs['kwarg1'])
def fake(context, kwarg1=None):
pass
with mock.patch.object(eventlet, self.spawn_name, _fake_spawn):
getattr(utils, self.spawn_name)(fake, ctxt, kwarg1='test')
self.assertEqual(ctxt, common_context.get_current())
def test_spawn_n_context_different_from_passed(self):
self.assertIsNone(common_context.get_current())
ctxt = context.RequestContext('user', 'project')
ctxt_passed = context.RequestContext('user', 'project',
overwrite=False)
self.assertEqual(ctxt, common_context.get_current())
def _fake_spawn(func, *args, **kwargs):
# call the method to ensure no error is raised
func(*args, **kwargs)
self.assertEqual(ctxt_passed, args[0])
self.assertEqual('test', kwargs['kwarg1'])
def fake(context, kwarg1=None):
pass
with mock.patch.object(eventlet, self.spawn_name, _fake_spawn):
getattr(utils, self.spawn_name)(fake, ctxt_passed, kwarg1='test')
self.assertEqual(ctxt, common_context.get_current())
class SpawnTestCase(SpawnNTestCase):
def setUp(self):
super(SpawnTestCase, self).setUp()
self.spawn_name = 'spawn'
| {
"content_hash": "94a03854d70fde9ebe856daa5005ab54",
"timestamp": "",
"source": "github",
"line_count": 1134,
"max_line_length": 79,
"avg_line_length": 39.63139329805996,
"alnum_prop": 0.5460816163054604,
"repo_name": "JioCloud/nova_test_latest",
"id": "48c19b10b51d83f3bfc170035d519d40125034b4",
"size": "45558",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "nova/tests/unit/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16277164"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "283675"
}
],
"symlink_target": ""
} |
import sublime, sublime_plugin
import os
from .. import util
from ..popup_manager import popup_manager
from ..global_vars import *
class FolderExplorer:
view = None
start_path = ""
current_path = ""
selected_dir = ""
selected_file = ""
callback_choose = None
only_dir = False
only_file = False
closed = False
point = 5
def __init__(self, view, point=5, start_path="", callback_choose=None, only_dir=False, only_file=False):
self.view = view
self.start_path = start_path.strip()
self.callback_choose = callback_choose
self.only_dir = only_dir
self.only_file = only_file
self.point = point
if self.start_path:
pass
elif self.view and self.view.file_name():
self.start_path = self.view.file_name()
elif self.view and self.view.window().folders():
self.start_path = self.view.window().folders()[0]
else:
raise Exception('JavaScript Enhancements: No place to open Folder Explorer to.')
if not os.path.isdir(self.start_path):
self.start_path = os.path.dirname(self.start_path)
self.current_path = self.start_path
self.style_css = ""
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "folder_explorer.css"), encoding="utf-8") as css_file:
self.style_css = "<style>"+css_file.read()+"</style>"
def open(self, path=""):
dirs = []
files = []
self.current_path = path if path.strip() != "" else self.current_path
if not os.path.isdir(self.current_path):
prev_path = ""
while not os.path.isdir(self.current_path) and prev_path != self.current_path:
prev_path = self.current_path
self.current_path = os.path.dirname(self.current_path)
try:
for item in os.listdir(self.current_path):
abspath = os.path.join(self.current_path, item)
is_dir = os.path.isdir(abspath)
if is_dir:
dirs.append(abspath)
else:
files.append(abspath)
except PermissionError as e:
sublime.error_message("Permission denied: " + self.current_path)
if os.path.dirname(self.current_path) != self.current_path:
try:
os.listdir(os.path.dirname(self.current_path))
self.open(os.path.dirname(self.current_path))
except Exception as e2:
if self.start_path != self.current_path:
self.open(self.start_path)
return
dirs = sorted(dirs)
files = sorted(files)
html = """
<html>
<head></head>
<body>""" + self.style_css + """
<div class="content">
<p>Folder Explorer """ + (" - Only Directories" if self.only_dir else (" - Only Files" if self.only_file else "")) + """</p>
<p class="current-directory">""" + self.current_path + """</p>
"""
html += """
<div class="item-list">
"""
img_directory_src = "file://" + IMG_FOLDER_PATH + "/folder.png"
if self.current_path != os.path.dirname(self.current_path):
action = "navigate_dir|" + os.path.dirname(self.current_path)
html += "<div class=\"item directory\"><a href=\"" + action + "\"><img class=\"item-image directory-image\" src=\"" + img_directory_src + "\">..</a></div>"
if not self.only_file:
for d in dirs:
action = "select_dir|" + d
html += "<div class=\"item directory\"><a href=\"" + action + "\"><img class=\"item-image directory-image\" src=\"" + img_directory_src + "\">" + os.path.basename(d) + "</a></div>"
if not self.only_dir:
for f in files:
action = "select_file|" + f
html += "<div class=\"item file\"><a href=\"" + action + "\">" + os.path.basename(f) + "</a></div>"
html += """
</div>
<a class="button reset-path-button" href=\"navigate_dir|""" + self.start_path + """\">reset path</a>
<a class="button choose-button" href=\"choose\">choose</a>
<a class="button close-button" href=\"close\">close</a>
</div>
</body>
</html>
"""
if not popup_manager.is_visible("javascript_enhancements_folder_explorer"):
self.closed = False
popup_manager.set_visible("javascript_enhancements_folder_explorer", True)
sublime.set_timeout(lambda:
self.view.show_popup(
html,
sublime.COOPERATE_WITH_AUTO_COMPLETE,
self.point, 700, 500,
self.action,
lambda: popup_manager.set_visible("javascript_enhancements_folder_explorer", False) or ( self.open() if not self.closed else False ))
, 50)
else:
self.view.update_popup(html)
def action(self, action, parameters=[]):
if not parameters:
action = action.split("|")
parameters = action[1:]
action = action[0]
if action == "select_dir":
if self.selected_dir == parameters[0]:
self.action("navigate_dir", parameters)
else:
self.selected_dir = parameters[0]
self.selected_file = ""
elif action == "select_file":
if self.selected_file == parameters[0]:
self.action("choose")
else:
self.selected_file = parameters[0]
self.selected_dir = ""
elif action == "navigate_dir":
self.selected_dir = ""
self.selected_file = ""
self.open(parameters[0])
elif action == "choose":
if ( self.selected_dir or self.selected_file or self.current_path ) and self.callback_choose:
self.callback_choose( self.selected_dir or self.selected_file or self.current_path )
self.action("close")
return
elif action == "close":
self.closed = True
self.selected_dir = ""
self.selected_file = ""
self.view.hide_popup()
if self.selected_dir or self.selected_file:
panel = util.create_and_show_panel("javascript_enhancements_folder_explorer_selection", window=self.view.window(), return_if_exists=True, unlisted=True)
panel.set_read_only(False)
panel.run_command("javascript_enhancements_erase_text_view")
panel.run_command("javascript_enhancements_insert_text_view", args={"text": "Selected: " + ( self.selected_dir or self.selected_file ), "point": 0 })
panel.set_read_only(True)
else:
self.view.window().destroy_output_panel("javascript_enhancements_folder_explorer_selection")
| {
"content_hash": "bd19560877d1c4fddfe7c2c619e4baaa",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 188,
"avg_line_length": 34.57142857142857,
"alnum_prop": 0.6061665607120152,
"repo_name": "pichillilorenzo/JavaScriptEnhancements",
"id": "f96d5520d7e0515f7899083ee1a868d41521d61e",
"size": "6292",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/libs/folder_explorer/folder_explorer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "119"
},
{
"name": "CSS",
"bytes": "4035"
},
{
"name": "JavaScript",
"bytes": "2118"
},
{
"name": "Python",
"bytes": "374510"
},
{
"name": "Shell",
"bytes": "4984"
}
],
"symlink_target": ""
} |
import pathlib
import sys
from typing import Callable, List, Optional
import math
import subprocess
import tempfile
import networkx as nx
import numpy as np
import pymatching
import stim
def sample_decode_count_correct(*,
circuit: stim.Circuit,
model_circuit: Optional[stim.Circuit] = None,
num_shots: int,
decoder: str) -> int:
"""Counts how many times a decoder correctly predicts the logical frame of simulated runs.
Args:
circuit: The circuit to sample from and decode results for.
model_circuit: The circuit to use to generate the error model. Defaults to be the same thing as
the circuit being sampled from.
num_shots: The number of sample shots to take from the cirucit.
decoder: The name of the decoder to use. Allowed values are:
"pymatching": Use pymatching.
"internal": Use an internal decoder at `src/internal_decoder.binary` (not publically available).
"internal_correlated": Use the internal decoder and tell it to do correlated decoding.
"""
if decoder == "pymatching":
use_internal_decoder = False
use_correlated_decoding = False
elif decoder == "internal":
use_internal_decoder = True
use_correlated_decoding = False
elif decoder == "internal_correlated":
use_internal_decoder = True
use_correlated_decoding = True
else:
raise NotImplementedError(f"{decoder=!r}")
num_dets = circuit.num_detectors
num_obs = circuit.num_observables
if model_circuit is None:
model_circuit = circuit
else:
assert model_circuit.num_detectors == num_dets
assert model_circuit.num_observables == num_obs
# Sample some runs with known solutions.
det_obs_samples = circuit.compile_detector_sampler().sample(num_shots, append_observables=True)
if num_obs == 0:
det_samples = det_obs_samples[:, :]
obs_samples = det_obs_samples[:, :0]
else:
det_samples = det_obs_samples[:, :-num_obs]
obs_samples = det_obs_samples[:, -num_obs:]
assert obs_samples.shape[0] == det_samples.shape[0]
assert obs_samples.shape[1] == num_obs
assert det_samples.shape[1] == num_dets
# Have the decoder produce the solution from the symptoms.
decode_method = decode_using_internal_decoder if use_internal_decoder else decode_using_pymatching
predictions = decode_method(
det_samples=det_samples,
circuit=model_circuit,
use_correlated_decoding=use_correlated_decoding,
)
# Count how many solutions were completely correct.
assert predictions.shape == obs_samples.shape
all_corrects = np.all(predictions == obs_samples, axis=1)
return np.count_nonzero(all_corrects)
def decode_using_pymatching(circuit: stim.Circuit,
det_samples: np.ndarray,
use_correlated_decoding: bool,
) -> np.ndarray:
"""Collect statistics on how often logical errors occur when correcting using detections."""
if use_correlated_decoding:
raise NotImplementedError("pymatching doesn't support correlated decoding")
error_model = circuit.detector_error_model(decompose_errors=True)
matching_graph = detector_error_model_to_pymatching_graph(error_model)
num_shots = det_samples.shape[0]
num_obs = circuit.num_observables
num_dets = circuit.num_detectors
assert det_samples.shape[1] == num_dets
predictions = np.zeros(shape=(num_shots, num_obs), dtype=np.bool8)
for k in range(num_shots):
expanded_det = np.resize(det_samples[k], num_dets + 1)
expanded_det[-1] = 0
predictions[k] = matching_graph.decode(expanded_det)
return predictions
def internal_decoder_path() -> Optional[str]:
for possible_dirs in ["./", "src/", "../"]:
path = possible_dirs + "internal_decoder.binary"
if pathlib.Path(path).exists():
return path
return None
def decode_using_internal_decoder(circuit: stim.Circuit,
det_samples: np.ndarray,
use_correlated_decoding: bool,
) -> np.ndarray:
num_shots = det_samples.shape[0]
num_obs = circuit.num_observables
assert det_samples.shape[1] == circuit.num_detectors
error_model = circuit.detector_error_model(decompose_errors=True)
with tempfile.TemporaryDirectory() as d:
dem_file = f"{d}/model.dem"
dets_file = f"{d}/shots.dets"
out_file = f"{d}/out.predictions"
with open(dem_file, "w") as f:
print(error_model, file=f)
with open(dets_file, "w") as f:
for det_sample in det_samples:
print("shot", file=f, end="")
for k in np.nonzero(det_sample)[0]:
print(f" D{k}", file=f, end="")
print(file=f)
path = internal_decoder_path()
if path is None:
raise RuntimeError(
"You need an `internal_decoder.binary` file in the working directory to "
"use `decoder=internal` or `decoder=internal_correlated`.")
command = (f"{path} "
f"-mode fi_match_from_dem "
f"-dem_fname '{dem_file}' "
f"-dets_fname '{dets_file}' "
f"-ignore_distance_1_errors "
f"-out '{out_file}'")
if use_correlated_decoding:
command += " -cheap_corr -edge_corr -node_corr"
try:
subprocess.check_output(command, shell=True)
except:
with open(dem_file) as f:
with open("repro.dem", "w") as f2:
print(f.read(), file=f2)
with open(dets_file) as f:
with open("repro.dets", "w") as f2:
print(f.read(), file=f2)
with open("repro.stim", "w") as f2:
print(circuit, file=f2)
print(f"Wrote case to `repro.dem`, `repro.dets`, and `repro.stim`.\nCommand line is: {command}", file=sys.stderr)
raise
predictions = np.zeros(shape=(num_shots, num_obs), dtype=np.bool8)
with open(out_file, "r") as f:
for shot in range(num_shots):
for obs_index in range(num_obs):
c = f.read(1)
assert c in '01'
predictions[shot, obs_index] = c == '1'
assert f.read(1) == '\n'
return predictions
def iter_flatten_model(model: stim.DetectorErrorModel,
handle_error: Callable[[float, List[int], List[int]], None],
handle_detector_coords: Callable[[int, np.ndarray], None]):
det_offset = 0
coords_offset = np.zeros(100, dtype=np.float64)
def _helper(m: stim.DetectorErrorModel, reps: int):
nonlocal det_offset
nonlocal coords_offset
for _ in range(reps):
for instruction in m:
if isinstance(instruction, stim.DemRepeatBlock):
_helper(instruction.body_copy(), instruction.repeat_count)
elif isinstance(instruction, stim.DemInstruction):
if instruction.type == "error":
dets: List[int] = []
frames: List[int] = []
t: stim.DemTarget
p = instruction.args_copy()[0]
for t in instruction.targets_copy():
if t.is_relative_detector_id():
dets.append(t.val + det_offset)
elif t.is_logical_observable_id():
frames.append(t.val)
elif t.is_separator():
# Treat each component of a decomposed error as an independent error.
# (Ideally we could configure some sort of correlated analysis; oh well.)
handle_error(p, dets, frames)
frames = []
dets = []
# Handle last component.
handle_error(p, dets, frames)
elif instruction.type == "shift_detectors":
det_offset += instruction.targets_copy()[0]
a = np.array(instruction.args_copy())
coords_offset[:len(a)] += a
elif instruction.type == "detector":
a = np.array(instruction.args_copy())
for t in instruction.targets_copy():
handle_detector_coords(t.val + det_offset, a + coords_offset[:len(a)])
elif instruction.type == "logical_observable":
pass
else:
raise NotImplementedError()
else:
raise NotImplementedError()
_helper(model, 1)
def detector_error_model_to_nx_graph(model: stim.DetectorErrorModel) -> nx.Graph:
"""Convert a stim error model into a NetworkX graph."""
g = nx.Graph()
boundary_node = model.num_detectors
g.add_node(boundary_node, is_boundary=True, coords=[-1, -1, -1])
def handle_error(p: float, dets: List[int], frame_changes: List[int]):
if p == 0:
return
if len(dets) == 0:
# No symptoms for this error.
# Code probably has distance 1.
# Accept it and keep going, though of course decoding will probably perform terribly.
return
if len(dets) == 1:
dets = [dets[0], boundary_node]
if len(dets) > 2:
raise NotImplementedError(
f"Error with more than 2 symptoms can't become an edge or boundary edge: {dets!r}.")
if g.has_edge(*dets):
edge_data = g.get_edge_data(*dets)
old_p = edge_data["error_probability"]
old_frame_changes = edge_data["qubit_id"]
# If frame changes differ, the code has distance 2; just keep whichever was first.
if set(old_frame_changes) == set(frame_changes):
p = p * (1 - old_p) + old_p * (1 - p)
g.remove_edge(*dets)
g.add_edge(*dets, weight=math.log((1 - p) / p), qubit_id=frame_changes, error_probability=p)
def handle_detector_coords(detector: int, coords: np.ndarray):
g.add_node(detector, coords=coords)
iter_flatten_model(model, handle_error=handle_error, handle_detector_coords=handle_detector_coords)
return g
def detector_error_model_to_pymatching_graph(model: stim.DetectorErrorModel) -> pymatching.Matching:
"""Convert a stim error model into a pymatching graph."""
g = detector_error_model_to_nx_graph(model)
num_detectors = model.num_detectors
num_observables = model.num_observables
# Add spandrels to the graph to ensure pymatching will accept it.
# - Make sure there's only one connected component.
# - Make sure no detector nodes are skipped.
# - Make sure no observable nodes are skipped.
for k in range(num_detectors):
g.add_node(k)
g.add_node(num_detectors + 1)
for k in range(num_detectors + 1):
g.add_edge(k, num_detectors + 1, weight=9999999999)
g.add_edge(num_detectors, num_detectors + 1, weight=9999999999, qubit_id=list(range(num_observables)))
return pymatching.Matching(g)
| {
"content_hash": "a9c478d6a24aabf29b385ac3b39fc5f6",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 125,
"avg_line_length": 42.17689530685921,
"alnum_prop": 0.5692887100915861,
"repo_name": "Strilanc/honeycomb_threshold",
"id": "e80611fe4b9070b9b31bc557b12743297f4a2c37",
"size": "11683",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/decoding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "258401"
}
],
"symlink_target": ""
} |
try:
from IBMQuantumExperience import IBMQuantumExperience
from pprint import pprint
def quant_exp(args):
with open('etc/IBMtoken', 'r') as infile:
token = infile.read().replace('\n', '')
config = {
"url": 'https://quantumexperience.ng.bluemix.net/api'
}
QuantExp = IBMQuantumExperience.IBMQuantumExperience
api = QuantExp(token, config)
for code in api.get_last_codes():
pprint(code)
'''
name = code['name']
if 'grover' in name.lower():
print('IBM Results for{}'.format(name))
pprint(code['executions'][-1]['result'])
#pprint(code.keys())
#pprint(api.get_execution(name))
'''
except ModuleNotFoundError:
print('IBM suite not installed')
#1/0
def quant_exp(args):
1/0
| {
"content_hash": "d704bf7057a6e62951ff25a354c894d4",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 64,
"avg_line_length": 28.15625,
"alnum_prop": 0.5382907880133185,
"repo_name": "LSaldyt/qnp",
"id": "6c3ba9615682f7369e5e7bd840e8feea5b0470f4",
"size": "901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/quant_exp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Coq",
"bytes": "29785"
},
{
"name": "Makefile",
"bytes": "116"
},
{
"name": "Prolog",
"bytes": "1062"
},
{
"name": "Python",
"bytes": "25676"
},
{
"name": "TeX",
"bytes": "22586"
},
{
"name": "Verilog",
"bytes": "507264"
}
],
"symlink_target": ""
} |
from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("RandomForestRegressor" , "freidman3" , "db2")
| {
"content_hash": "fbc2413219fbeef9d9d8492d41c3db30",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 66,
"avg_line_length": 33.75,
"alnum_prop": 0.7777777777777778,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "e771da7831681b88e9f867e0be87dbdda081c314",
"size": "135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/regression/freidman3/ws_freidman3_RandomForestRegressor_db2_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
} |
from datetime import datetime
import dateutil.tz
import pytest
import pytz
from pandas._libs.tslibs import conversion, timezones
from pandas import Timestamp
@pytest.mark.parametrize("tz_name", list(pytz.common_timezones))
def test_cache_keys_are_distinct_for_pytz_vs_dateutil(tz_name):
if tz_name == "UTC":
pytest.skip("UTC: special case in dateutil")
tz_p = timezones.maybe_get_tz(tz_name)
tz_d = timezones.maybe_get_tz("dateutil/" + tz_name)
if tz_d is None:
pytest.skip(tz_name + ": dateutil does not know about this one")
assert timezones._p_tz_cache_key(tz_p) != timezones._p_tz_cache_key(tz_d)
def test_tzlocal_repr():
# see gh-13583
ts = Timestamp("2011-01-01", tz=dateutil.tz.tzlocal())
assert ts.tz == dateutil.tz.tzlocal()
assert "tz='tzlocal()')" in repr(ts)
def test_tzlocal_maybe_get_tz():
# see gh-13583
tz = timezones.maybe_get_tz('tzlocal()')
assert tz == dateutil.tz.tzlocal()
def test_tzlocal_offset():
# see gh-13583
#
# Get offset using normal datetime for test.
ts = Timestamp("2011-01-01", tz=dateutil.tz.tzlocal())
offset = dateutil.tz.tzlocal().utcoffset(datetime(2011, 1, 1))
offset = offset.total_seconds() * 1000000000
assert ts.value + offset == Timestamp("2011-01-01").value
@pytest.fixture(params=[
(pytz.timezone("US/Eastern"), lambda tz, x: tz.localize(x)),
(dateutil.tz.gettz("US/Eastern"), lambda tz, x: x.replace(tzinfo=tz))
])
def infer_setup(request):
eastern, localize = request.param
start_naive = datetime(2001, 1, 1)
end_naive = datetime(2009, 1, 1)
start = localize(eastern, start_naive)
end = localize(eastern, end_naive)
return eastern, localize, start, end, start_naive, end_naive
def test_infer_tz_compat(infer_setup):
eastern, _, start, end, start_naive, end_naive = infer_setup
assert (timezones.infer_tzinfo(start, end) is
conversion.localize_pydatetime(start_naive, eastern).tzinfo)
assert (timezones.infer_tzinfo(start, None) is
conversion.localize_pydatetime(start_naive, eastern).tzinfo)
assert (timezones.infer_tzinfo(None, end) is
conversion.localize_pydatetime(end_naive, eastern).tzinfo)
def test_infer_tz_utc_localize(infer_setup):
_, _, start, end, start_naive, end_naive = infer_setup
utc = pytz.utc
start = utc.localize(start_naive)
end = utc.localize(end_naive)
assert timezones.infer_tzinfo(start, end) is utc
@pytest.mark.parametrize("ordered", [True, False])
def test_infer_tz_mismatch(infer_setup, ordered):
eastern, _, _, _, start_naive, end_naive = infer_setup
msg = "Inputs must both have the same timezone"
utc = pytz.utc
start = utc.localize(start_naive)
end = conversion.localize_pydatetime(end_naive, eastern)
args = (start, end) if ordered else (end, start)
with pytest.raises(AssertionError, match=msg):
timezones.infer_tzinfo(*args)
| {
"content_hash": "73d49c8a1be9c5ba52e2f6ecbf24565e",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 77,
"avg_line_length": 29.75,
"alnum_prop": 0.6752941176470588,
"repo_name": "MJuddBooth/pandas",
"id": "0255865dbdf71846f43ff818cb8753ebc69d67fc",
"size": "2999",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pandas/tests/tslibs/test_timezones.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4879"
},
{
"name": "C",
"bytes": "406766"
},
{
"name": "C++",
"bytes": "17248"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "529"
},
{
"name": "Python",
"bytes": "14858932"
},
{
"name": "Shell",
"bytes": "29575"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
} |
import numpy as np
import sys
sys.path.append('../')
from interpolate import get_gradient, _bin_and_index
three_colors = ['#ffffff', '#000000', '#ff0000']
two_colors = ['#ffffff', '#000000']
equal = np.testing.assert_array_equal
close_enough = np.testing.assert_allclose
def test_bin_lower():
value = 0.3
size = 2
params = (value, size)
expected_answer = 0
equal(expected_answer, _bin_and_index(*params))
def test_bin_higher():
value = 0.9
size = 2
params = (value, size)
expected_answer = 1
equal(expected_answer, _bin_and_index(*params))
## test_<number of colors>_<value intensity between 0 and 1>
def test_3_half():
value = 0.5
params = (three_colors, value)
expected_answer = np.array([0, 0, 0])
close_enough( expected_answer, get_gradient(*params),atol = 1 )
def test_3_quarter():
value = 0.25
params = (three_colors, value)
expected_answer = np.array([127.5, 127.5, 127.5])
close_enough( expected_answer, get_gradient(*params),atol = 1 )
def test_3_3quarter():
value = 0.75
params = (three_colors, value)
expected_answer = np.array([127.5, 0, 0])
close_enough( expected_answer, get_gradient(*params),atol = 1 )
def test_2_half():
value = 0.5
params = (two_colors, value)
expected_answer = np.array([127.5, 127.5, 127.5])
close_enough( expected_answer, get_gradient(*params),atol = 1 )
def test_2_quarter():
value = 0.25
params = (two_colors, value)
expected_answer = np.array([191.25,191.25,191.25])
close_enough( expected_answer, get_gradient(*params),atol = 1 )
def test_2_3quarter():
value = 0.75
params = (two_colors, value)
expected_answer = np.array([63.75,63.75,63.75])
close_enough( expected_answer, get_gradient(*params),atol = 1 )
| {
"content_hash": "36b909d57d25185b008956a94a25864a",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 72,
"avg_line_length": 27.057971014492754,
"alnum_prop": 0.6164970540974826,
"repo_name": "ceos-seo/data_cube_utilities",
"id": "a72dc32086a6f16aebacb1def745198d090ee3e1",
"size": "1867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_cube_utilities/transect/tests/test_interpolate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "5234"
},
{
"name": "Python",
"bytes": "416675"
}
],
"symlink_target": ""
} |
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--temp")
parser.add_argument("--current")
def main():
FORMAT = '%(asctime)s %(levelname)s:%(message)s'
logging.basicConfig(filename='launcher.log', level=logging.INFO, filemode='w', format=FORMAT)
logging.info("Started logging...")
args = parser.parse_args()
if args.temp and args.current:
TEMP_DIR = args.temp
CURR_DIR = args.current
logging.info("Step 5. Delete files in current")
import os
import shutil
for file_name in os.listdir(CURR_DIR):
file_path = os.path.join(CURR_DIR, file_name)
try:
if os.path.isfile(file_path):
os.remove(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
logging.error("Failed to delete: {} with error: {}".format(
file_path, e))
logging.info("Step 6. Move Temp Path files to current")
for file_name in os.listdir(TEMP_DIR):
src_path = os.path.join(TEMP_DIR, file_name)
dst_path = os.path.join(CURR_DIR, file_name)
shutil.move(src_path, dst_path)
executable = os.path.join(CURR_DIR, sys.argv[0])
#os.execl(executable, executable, os.path.basename(executable))
import psutil
try:
p = psutil.Process(os.getpid())
for handler in p.get_open_files() + p.connections():
os.close(handler.fd)
except Exception as e:
logging.error(e)
os.execl(executable, executable)
# 7. Launch current/launcher and resume normal operations
import gui
gui.start()
if __name__ == '__main__':
main()
| {
"content_hash": "21f526fe9ee2984cd6abb8527bd00830",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 97,
"avg_line_length": 33.61818181818182,
"alnum_prop": 0.5705786911844241,
"repo_name": "Spring-Chobby/ChobbyLauncher",
"id": "bb040b3ddf1898a9f27a9c81974ac1c54ae518e7",
"size": "1893",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spring_launcher/launcher.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "41327"
},
{
"name": "Python",
"bytes": "42840"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import sys
if sys.version_info[0] > 2:
unicode = str
from xml.dom import XHTML_NAMESPACE
from xml.dom.minidom import getDOMImplementation
def create_testdoc(_title, _content, _data_count, _data_text):
dom_impl = getDOMImplementation()
html_doctype = dom_impl.createDocumentType('html', None, None)
html_doc = dom_impl.createDocument(XHTML_NAMESPACE, 'html', html_doctype)
element = lambda name: html_doc.createElementNS(XHTML_NAMESPACE, name)
text = lambda value: html_doc.createTextNode(value)
html_element = html_doc.documentElement
html_element.setAttribute('xmlns', XHTML_NAMESPACE)
head_element = element('head')
html_element.appendChild(head_element)
title_element = element('title')
head_element.appendChild(title_element)
title_text = text(_title)
title_element.appendChild(title_text)
body_element = element('body')
html_element.appendChild(body_element)
h1_element = element('h1')
body_element.appendChild(h1_element)
h1_text = text(_title)
h1_element.appendChild(h1_text)
p_element = element('p')
body_element.appendChild(p_element)
p_text = text(_content)
p_element.appendChild(p_text)
for i in range(_data_count):
div_element = element('div')
body_element.appendChild(div_element)
div_element.setAttribute('data-i', unicode(i))
for j in range(_data_count):
p_element = element('p')
p_element.setAttribute('data-j', unicode(j))
div_element.appendChild(p_element)
p_text = text(_data_text)
p_element.appendChild(p_text)
return html_doc
def create_testdoc_string(*args):
html_doc = create_testdoc(*args)
try:
return html_doc.toxml('UTF-8')
finally:
html_doc.unlink()
| {
"content_hash": "9ec6fa277e2435b9622d99a79f60e33c",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 77,
"avg_line_length": 35.5,
"alnum_prop": 0.671180931744312,
"repo_name": "IvIePhisto/ECoXiPy",
"id": "5d45a07bba614d23c4352697b2b50804f46de19d",
"size": "1846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/performance/xml_dom_minidom.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "214034"
},
{
"name": "Shell",
"bytes": "6074"
}
],
"symlink_target": ""
} |
"""
Regular-expression matching by the Thompson construction.
Explained in C at http://swtch.com/~rsc/regexp/regexp1.html
"""
def match(re, s): return run(prepare(re), s)
def run(states, s):
for c in s:
states = set.union(*[state(c) for state in states])
return accepting_state in states
def accepting_state(c): return set()
def expecting_state(char, k): return lambda c: k(set()) if c == char else set()
def state_node(state): return lambda seen: set([state])
def alt_node(k1, k2): return lambda seen: k1(seen) | k2(seen)
def loop_node(k, make_k):
def loop(seen):
if loop in seen: return set()
seen.add(loop)
return k(seen) | looping(seen)
looping = make_k(loop)
return loop
def prepare(re): return re(state_node(accepting_state))(set())
def lit(char): return lambda k: state_node(expecting_state(char, k))
def alt(re1, re2): return lambda k: alt_node(re1(k), re2(k))
def many(re): return lambda k: loop_node(k, re)
def empty(k): return k
def seq(re1, re2): return lambda k: re1(re2(k))
## match(empty, '')
#. True
## match(empty, 'A')
#. False
## match(lit('x'), '')
#. False
## match(lit('x'), 'y')
#. False
## match(lit('x'), 'x')
#. True
## match(lit('x'), 'xx')
#. False
## match(seq(lit('a'), lit('b')), '')
#. False
## match(seq(lit('a'), lit('b')), 'ab')
#. True
## match(alt(lit('a'), lit('b')), 'b')
#. True
## match(alt(lit('a'), lit('b')), 'a')
#. True
## match(alt(lit('a'), lit('b')), 'x')
#. False
## match(many(lit('a')), '')
#. True
## match(many(lit('a')), 'a')
#. True
## match(many(lit('a')), 'x')
#. False
## match(many(lit('a')), 'aa')
#. True
## match(many(lit('a')), 'ax')
#. False
## complicated = seq(many(alt(seq(lit('a'), lit('b')), seq(lit('a'), seq(lit('x'), lit('y'))))), lit('z'))
## match(complicated, '')
#. False
## match(complicated, 'z')
#. True
## match(complicated, 'abz')
#. True
## match(complicated, 'ababaxyab')
#. False
## match(complicated, 'ababaxyabz')
#. True
## match(complicated, 'ababaxyaxz')
#. False
# N.B. infinite recursion, like Thompson's original code:
## match(many(many(lit('x'))), 'xxxx')
#. True
## match(many(many(lit('x'))), 'xxxxy')
#. False
# Had a bug: empty forced a match regardless of the continuation.
## match(seq(empty, lit('x')), '')
#. False
## match(seq(empty, lit('x')), 'x')
#. True
| {
"content_hash": "5a2f636c08aa181c0fd47d90b5eeded8",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 106,
"avg_line_length": 25.434782608695652,
"alnum_prop": 0.5935897435897436,
"repo_name": "JaDogg/__py_playground",
"id": "0ad84480066551c05983fddc3634acb33cd54a9a",
"size": "2340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reference/sketchbook/regex/nfa_stifleloops.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "951976"
},
{
"name": "Assembly",
"bytes": "24809"
},
{
"name": "C",
"bytes": "205198"
},
{
"name": "C#",
"bytes": "48314"
},
{
"name": "C++",
"bytes": "168261"
},
{
"name": "CSS",
"bytes": "2582"
},
{
"name": "Emacs Lisp",
"bytes": "1041"
},
{
"name": "Erlang",
"bytes": "2303"
},
{
"name": "GAP",
"bytes": "718192"
},
{
"name": "HTML",
"bytes": "6799330"
},
{
"name": "Haskell",
"bytes": "2061"
},
{
"name": "Java",
"bytes": "1063759"
},
{
"name": "JavaScript",
"bytes": "12710"
},
{
"name": "Lua",
"bytes": "278"
},
{
"name": "M",
"bytes": "5739"
},
{
"name": "Makefile",
"bytes": "5903"
},
{
"name": "Matlab",
"bytes": "23"
},
{
"name": "Objective-C",
"bytes": "134542"
},
{
"name": "PHP",
"bytes": "5958"
},
{
"name": "Pascal",
"bytes": "40255"
},
{
"name": "Python",
"bytes": "2197399"
},
{
"name": "Ruby",
"bytes": "2367"
},
{
"name": "Scheme",
"bytes": "52618"
},
{
"name": "Shell",
"bytes": "5743"
},
{
"name": "Swift",
"bytes": "11374"
},
{
"name": "TeX",
"bytes": "62560"
},
{
"name": "VHDL",
"bytes": "401678"
},
{
"name": "Visual Basic",
"bytes": "4107"
},
{
"name": "Yacc",
"bytes": "99342"
}
],
"symlink_target": ""
} |
from geoportailv3_geoportal.models import LuxPredefinedWms
from functools import partial
from pyramid.view import view_defaults
from pyramid.view import view_config
from c2cgeoform.schema import GeoFormSchemaNode
from c2cgeoform.views.abstract_views import AbstractViews
from c2cgeoform.views.abstract_views import ListField
base_schema = GeoFormSchemaNode(LuxPredefinedWms)
# override standard class chosen by geoform to avoid clash with css style .label
# class="label" becomes class="_lux_label"
class ListFieldLux(ListField):
def id(self):
return '_lux_' + self._key
_list_field = partial(ListFieldLux, LuxPredefinedWms)
@view_defaults(match_param='table=lux_predefined_wms')
class LuxPredefinedWmsViews(AbstractViews):
_list_fields = [
_list_field('id'),
_list_field('label'),
_list_field('url'),
]
_id_field = 'id'
_model = LuxPredefinedWms
_base_schema = base_schema
@view_config(route_name='c2cgeoform_index',
renderer='./templates/index.jinja2')
def index(self):
return super().index()
@view_config(route_name='c2cgeoform_grid',
renderer='fast_json')
def grid(self):
return super().grid()
@view_config(route_name='c2cgeoform_item',
request_method='GET',
renderer='./templates/edit.jinja2')
def view(self):
return super().edit()
@view_config(route_name='c2cgeoform_item',
request_method='POST',
renderer='./templates/edit.jinja2')
def save(self):
return super().save()
@view_config(route_name='c2cgeoform_item',
request_method='DELETE',
renderer='fast_json')
def delete(self):
return super().delete()
@view_config(route_name='c2cgeoform_item_duplicate',
request_method='GET',
renderer='./templates/edit.jinja2')
def duplicate(self):
return super().duplicate()
| {
"content_hash": "d17a2b5494fee8ecd6408b9ab521fef6",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 80,
"avg_line_length": 30.363636363636363,
"alnum_prop": 0.6412175648702595,
"repo_name": "Geoportail-Luxembourg/geoportailv3",
"id": "e16e3f41bf5232fff46229b3116a25433e6536fe",
"size": "2004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geoportal/geoportailv3_geoportal/admin/view/lux_predefined_wms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "432229"
},
{
"name": "Dockerfile",
"bytes": "16989"
},
{
"name": "EJS",
"bytes": "158409"
},
{
"name": "HTML",
"bytes": "441209"
},
{
"name": "JavaScript",
"bytes": "3500634"
},
{
"name": "Less",
"bytes": "165289"
},
{
"name": "Makefile",
"bytes": "26467"
},
{
"name": "Mako",
"bytes": "696"
},
{
"name": "PLpgSQL",
"bytes": "1588593"
},
{
"name": "Python",
"bytes": "619684"
},
{
"name": "SCSS",
"bytes": "1878"
},
{
"name": "Shell",
"bytes": "11608"
},
{
"name": "TypeScript",
"bytes": "7440"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="barpolar.unselected.textfont", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs,
)
| {
"content_hash": "9ac2ac8714e34d8e348355a998f92600",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 87,
"avg_line_length": 32.76923076923077,
"alnum_prop": 0.6103286384976526,
"repo_name": "plotly/plotly.py",
"id": "2bd20d6506b808b18957e324f3d0c0a736d68ff9",
"size": "426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/barpolar/unselected/textfont/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
from collections import defaultdict
from .ahdl import AHDL_STM
class AHDLVisitor(object):
def __init__(self):
self.current_fsm = None
self.current_stg = None
self.current_state = None
def process(self, hdlmodule):
for fsm in hdlmodule.fsms.values():
self.process_fsm(fsm)
def process_fsm(self, fsm):
self.current_fsm = fsm
for stg in fsm.stgs:
self.process_stg(stg)
def process_stg(self, stg):
self.current_stg = stg
for state in stg.states:
self.process_state(state)
def process_state(self, state):
self.current_state = state
self.visit(state)
def visit_AHDL_CONST(self, ahdl):
pass
def visit_AHDL_VAR(self, ahdl):
pass
def visit_AHDL_MEMVAR(self, ahdl):
pass
def visit_AHDL_SUBSCRIPT(self, ahdl):
self.visit(ahdl.memvar)
self.visit(ahdl.offset)
def visit_AHDL_OP(self, ahdl):
for a in ahdl.args:
self.visit(a)
def visit_AHDL_SYMBOL(self, ahdl):
pass
def visit_AHDL_CONCAT(self, ahdl):
for var in ahdl.varlist:
self.visit(var)
def visit_AHDL_SLICE(self, ahdl):
self.visit(ahdl.var)
self.visit(ahdl.hi)
self.visit(ahdl.lo)
def visit_AHDL_NOP(self, ahdl):
pass
def visit_AHDL_INLINE(self, ahdl):
pass
def visit_AHDL_MOVE(self, ahdl):
self.visit(ahdl.src)
self.visit(ahdl.dst)
def visit_AHDL_STORE(self, ahdl):
self.visit(ahdl.src)
self.visit(ahdl.mem)
self.visit(ahdl.offset)
def visit_AHDL_LOAD(self, ahdl):
self.visit(ahdl.mem)
self.visit(ahdl.dst)
self.visit(ahdl.offset)
def visit_AHDL_IO_READ(self, ahdl):
self.visit(ahdl.io)
if ahdl.dst:
self.visit(ahdl.dst)
def visit_AHDL_IO_WRITE(self, ahdl):
self.visit(ahdl.io)
self.visit(ahdl.src)
def visit_AHDL_SEQ(self, ahdl):
method = 'visit_{}'.format(ahdl.factor.__class__.__name__)
visitor = getattr(self, method, None)
return visitor(ahdl.factor)
def visit_AHDL_IF(self, ahdl):
for cond in ahdl.conds:
if cond:
self.visit(cond)
for ahdlblk in ahdl.blocks:
self.visit(ahdlblk)
def visit_AHDL_IF_EXP(self, ahdl):
self.visit(ahdl.cond)
self.visit(ahdl.lexp)
self.visit(ahdl.rexp)
def visit_AHDL_CASE(self, ahdl):
self.visit(ahdl.sel)
for item in ahdl.items:
self.visit(item)
def visit_AHDL_CASE_ITEM(self, ahdl):
#self.visit(ahdl.val)
self.visit(ahdl.block)
def visit_AHDL_MODULECALL(self, ahdl):
for arg in ahdl.args:
self.visit(arg)
def visit_AHDL_CALLEE_PROLOG(self, ahdl):
pass
def visit_AHDL_CALLEE_EPILOG(self, ahdl):
pass
def visit_AHDL_FUNCALL(self, ahdl):
self.visit(ahdl.name)
for arg in ahdl.args:
self.visit(arg)
def visit_AHDL_PROCCALL(self, ahdl):
for arg in ahdl.args:
self.visit(arg)
def visit_AHDL_META(self, ahdl):
method = 'visit_' + ahdl.metaid
visitor = getattr(self, method, None)
if visitor:
return visitor(ahdl)
def visit_MEM_MUX(self, ahdl):
prefix = ahdl.args[0]
dst = ahdl.args[1]
srcs = ahdl.args[2]
conds = ahdl.args[3]
self.visit(dst)
for s in srcs:
self.visit(s)
for c in conds:
self.visit(c)
def visit_WAIT_EDGE(self, ahdl):
for var in ahdl.args[2:]:
self.visit(var)
if ahdl.codes:
for code in ahdl.codes:
self.visit(code)
if ahdl.transition:
self.visit(ahdl.transition)
def visit_WAIT_VALUE(self, ahdl):
for value, var in ahdl.args:
self.visit(value)
self.visit(var)
if ahdl.codes:
for code in ahdl.codes:
self.visit(code)
if ahdl.transition:
self.visit(ahdl.transition)
def visit_AHDL_META_WAIT(self, ahdl):
method = 'visit_' + ahdl.metaid
visitor = getattr(self, method, None)
if visitor:
return visitor(ahdl)
def visit_AHDL_META_MULTI_WAIT(self, ahdl):
for w in ahdl.waits:
self.visit(w)
if ahdl.transition:
self.visit(ahdl.transition)
def visit_AHDL_TRANSITION(self, ahdl):
pass
def visit_AHDL_TRANSITION_IF(self, ahdl):
self.visit_AHDL_IF(ahdl)
def visit_AHDL_PIPELINE_GUARD(self, ahdl):
self.visit_AHDL_IF(ahdl)
def visit_AHDL_BLOCK(self, ahdl):
for c in ahdl.codes:
self.visit(c)
def find_visitor(self, cls):
method = 'visit_' + cls.__name__
visitor = getattr(self, method, None)
if not visitor:
for base in cls.__bases__:
visitor = self.find_visitor(base)
if visitor:
break
return visitor
def visit(self, ahdl):
if ahdl.is_a(AHDL_STM):
self.current_stm = ahdl
visitor = self.find_visitor(ahdl.__class__)
return visitor(ahdl)
class AHDLCollector(AHDLVisitor):
def __init__(self, ahdl_cls):
super().__init__()
self.ahdl_cls = ahdl_cls
self.results = defaultdict(list)
def visit(self, ahdl):
if ahdl.__class__ is self.ahdl_cls:
self.results[self.current_state].append(ahdl)
super().visit(ahdl)
| {
"content_hash": "4ce4a8952d435e90262166af0281aa42",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 66,
"avg_line_length": 25.894977168949772,
"alnum_prop": 0.5595133133486158,
"repo_name": "ktok07b6/polyphony",
"id": "b9b14b6690eb2d2c362a027d97935f278336715f",
"size": "5671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "polyphony/compiler/ahdlvisitor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "617717"
},
{
"name": "C++",
"bytes": "1948"
},
{
"name": "Objective-C",
"bytes": "21991"
},
{
"name": "Python",
"bytes": "1331469"
}
],
"symlink_target": ""
} |
import kivy
from kivy.app import App
from kivy.lang import Builder
from kivy.utils import platform
from kivy.uix.widget import Widget
from kivy.clock import Clock
from jnius import autoclass
from android.runnable import run_on_ui_thread
from kivy.uix.label import Label
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import StringProperty
WebView = autoclass('android.webkit.WebView')
WebViewClient = autoclass('android.webkit.WebViewClient')
activity = autoclass('org.renpy.android.PythonActivity').mActivity
Builder.load_string('''
<RootWidget>:
Button:
text: wv.url
on_release: wv.create_webview()
Wv:
id: wv
<Wv>:
''')
class RootWidget(BoxLayout):
pass
class Wv(Widget):
url = StringProperty('url')
def __init__(self, **kwargs):
super(Wv, self).__init__(**kwargs)
#Clock.schedule_once(self.create_webview, 5)
@run_on_ui_thread
def create_webview(self, *args):
webview = WebView(activity)
webview.getSettings().setJavaScriptEnabled(True)
wvc = WebViewClient();
webview.setWebViewClient(wvc);
activity.setContentView(webview)
webview.loadUrl(
'https://www.facebook.com/v2.8/dialog/oauth?'
'client_id=303848833322071&'
'redirect_uri=https://mytestapp-1146e.firebaseapp.com/__/auth/handler'
)
self.url = webview.getOriginalUrl()
class ServiceApp(App):
def build(self):
return RootWidget()
if __name__ == '__main__':
ServiceApp().run()
| {
"content_hash": "f5ab629c9b8238339796354dee604346",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 82,
"avg_line_length": 24.58730158730159,
"alnum_prop": 0.6701097482246611,
"repo_name": "gugabfigueiredo/p4a-social",
"id": "534e9f66ff095a50eb268c4ab072ab4d0a63bd8b",
"size": "1573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "p4a-social/examples/facebook-example/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1574"
}
],
"symlink_target": ""
} |
"""Qubole operator"""
import re
from datetime import datetime
from typing import Iterable, Optional
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator, BaseOperatorLink
from airflow.models.taskinstance import TaskInstance
from airflow.providers.qubole.hooks.qubole import (
COMMAND_ARGS,
HYPHEN_ARGS,
POSITIONAL_ARGS,
QuboleHook,
flatten_list,
)
from airflow.utils.decorators import apply_defaults
class QDSLink(BaseOperatorLink):
"""Link to QDS"""
name = 'Go to QDS'
def get_link(self, operator: BaseOperator, dttm: datetime) -> str:
"""
Get link to qubole command result page.
:param operator: operator
:param dttm: datetime
:return: url link
"""
ti = TaskInstance(task=operator, execution_date=dttm)
conn = BaseHook.get_connection(
getattr(operator, "qubole_conn_id", None)
or operator.kwargs['qubole_conn_id'] # type: ignore[attr-defined]
)
if conn and conn.host:
host = re.sub(r'api$', 'v2/analyze?command_id=', conn.host)
else:
host = 'https://api.qubole.com/v2/analyze?command_id='
qds_command_id = ti.xcom_pull(task_ids=operator.task_id, key='qbol_cmd_id')
url = host + str(qds_command_id) if qds_command_id else ''
return url
class QuboleOperator(BaseOperator):
"""
Execute tasks (commands) on QDS (https://qubole.com).
:param qubole_conn_id: Connection id which consists of qds auth_token
:type qubole_conn_id: str
kwargs:
:command_type: type of command to be executed, e.g. hivecmd, shellcmd, hadoopcmd
:tags: array of tags to be assigned with the command
:cluster_label: cluster label on which the command will be executed
:name: name to be given to command
:notify: whether to send email on command completion or not (default is False)
**Arguments specific to command types**
hivecmd:
:query: inline query statement
:script_location: s3 location containing query statement
:sample_size: size of sample in bytes on which to run query
:macros: macro values which were used in query
:sample_size: size of sample in bytes on which to run query
:hive-version: Specifies the hive version to be used. eg: 0.13,1.2,etc.
prestocmd:
:query: inline query statement
:script_location: s3 location containing query statement
:macros: macro values which were used in query
hadoopcmd:
:sub_commnad: must be one these ["jar", "s3distcp", "streaming"] followed by
1 or more args
shellcmd:
:script: inline command with args
:script_location: s3 location containing query statement
:files: list of files in s3 bucket as file1,file2 format. These files will be
copied into the working directory where the qubole command is being
executed.
:archives: list of archives in s3 bucket as archive1,archive2 format. These
will be unarchived into the working directory where the qubole command is
being executed
:parameters: any extra args which need to be passed to script (only when
script_location is supplied)
pigcmd:
:script: inline query statement (latin_statements)
:script_location: s3 location containing pig query
:parameters: any extra args which need to be passed to script (only when
script_location is supplied
sparkcmd:
:program: the complete Spark Program in Scala, R, or Python
:cmdline: spark-submit command line, all required arguments must be specify
in cmdline itself.
:sql: inline sql query
:script_location: s3 location containing query statement
:language: language of the program, Scala, R, or Python
:app_id: ID of an Spark job server app
:arguments: spark-submit command line arguments.
If `cmdline` is selected, this should not be used because all
required arguments and configurations are to be passed in the `cmdline` itself.
:user_program_arguments: arguments that the user program takes in
:macros: macro values which were used in query
:note_id: Id of the Notebook to run
dbtapquerycmd:
:db_tap_id: data store ID of the target database, in Qubole.
:query: inline query statement
:macros: macro values which were used in query
dbexportcmd:
:mode: Can be 1 for Hive export or 2 for HDFS/S3 export
:schema: Db schema name assumed accordingly by database if not specified
:hive_table: Name of the hive table
:partition_spec: partition specification for Hive table.
:dbtap_id: data store ID of the target database, in Qubole.
:db_table: name of the db table
:db_update_mode: allowinsert or updateonly
:db_update_keys: columns used to determine the uniqueness of rows
:export_dir: HDFS/S3 location from which data will be exported.
:fields_terminated_by: hex of the char used as column separator in the dataset
:use_customer_cluster: To use cluster to run command
:customer_cluster_label: the label of the cluster to run the command on
:additional_options: Additional Sqoop options which are needed enclose options in
double or single quotes e.g. '--map-column-hive id=int,data=string'
dbimportcmd:
:mode: 1 (simple), 2 (advance)
:hive_table: Name of the hive table
:schema: Db schema name assumed accordingly by database if not specified
:hive_serde: Output format of the Hive Table
:dbtap_id: data store ID of the target database, in Qubole.
:db_table: name of the db table
:where_clause: where clause, if any
:parallelism: number of parallel db connections to use for extracting data
:extract_query: SQL query to extract data from db. $CONDITIONS must be part
of the where clause.
:boundary_query: Query to be used get range of row IDs to be extracted
:split_column: Column used as row ID to split data into ranges (mode 2)
:use_customer_cluster: To use cluster to run command
:customer_cluster_label: the label of the cluster to run the command on
:additional_options: Additional Sqoop options which are needed enclose options in
double or single quotes
jupytercmd:
:path: Path including name of the Jupyter notebook to be run with extension.
:arguments: Valid JSON to be sent to the notebook. Specify the parameters in notebooks and pass
the parameter value using the JSON format. key is the parameter’s name and value is
the parameter’s value. Supported types in parameters are string, integer, float and boolean.
.. note:
Following fields are template-supported : ``query``, ``script_location``,
``sub_command``, ``script``, ``files``, ``archives``, ``program``, ``cmdline``,
``sql``, ``where_clause``, ``extract_query``, ``boundary_query``, ``macros``,
``tags``, ``name``, ``parameters``, ``dbtap_id``, ``hive_table``, ``db_table``,
``split_column``, ``note_id``, ``db_update_keys``, ``export_dir``,
``partition_spec``, ``qubole_conn_id``, ``arguments``, ``user_program_arguments``.
You can also use ``.txt`` files for template driven use cases.
.. note:
In QuboleOperator there is a default handler for task failures and retries,
which generally kills the command running at QDS for the corresponding task
instance. You can override this behavior by providing your own failure and retry
handler in task definition.
"""
template_fields: Iterable[str] = (
'query',
'script_location',
'sub_command',
'script',
'files',
'archives',
'program',
'cmdline',
'sql',
'where_clause',
'tags',
'extract_query',
'boundary_query',
'macros',
'name',
'parameters',
'dbtap_id',
'hive_table',
'db_table',
'split_column',
'note_id',
'db_update_keys',
'export_dir',
'partition_spec',
'qubole_conn_id',
'arguments',
'user_program_arguments',
'cluster_label',
)
template_ext: Iterable[str] = ('.txt',)
ui_color = '#3064A1'
ui_fgcolor = '#fff'
qubole_hook_allowed_args_list = ['command_type', 'qubole_conn_id', 'fetch_logs']
operator_extra_links = (QDSLink(),)
@apply_defaults
def __init__(self, *, qubole_conn_id: str = "qubole_default", **kwargs) -> None:
self.kwargs = kwargs
self.kwargs['qubole_conn_id'] = qubole_conn_id
self.hook: Optional[QuboleHook] = None
filtered_base_kwargs = self._get_filtered_args(kwargs)
super().__init__(**filtered_base_kwargs)
if self.on_failure_callback is None:
self.on_failure_callback = QuboleHook.handle_failure_retry
if self.on_retry_callback is None:
self.on_retry_callback = QuboleHook.handle_failure_retry
def _get_filtered_args(self, all_kwargs) -> dict:
qubole_args = (
flatten_list(COMMAND_ARGS.values())
+ HYPHEN_ARGS
+ flatten_list(POSITIONAL_ARGS.values())
+ self.qubole_hook_allowed_args_list
)
return {key: value for key, value in all_kwargs.items() if key not in qubole_args}
def execute(self, context) -> None:
return self.get_hook().execute(context)
def on_kill(self, ti=None) -> None:
if self.hook:
self.hook.kill(ti)
else:
self.get_hook().kill(ti)
def get_results(self, ti=None, fp=None, inline: bool = True, delim=None, fetch: bool = True) -> str:
"""get_results from Qubole"""
return self.get_hook().get_results(ti, fp, inline, delim, fetch)
def get_log(self, ti) -> None:
"""get_log from Qubole"""
return self.get_hook().get_log(ti)
def get_jobs_id(self, ti) -> None:
"""Get jobs_id from Qubole"""
return self.get_hook().get_jobs_id(ti)
def get_hook(self) -> QuboleHook:
"""Reinitialising the hook, as some template fields might have changed"""
return QuboleHook(**self.kwargs)
def __getattribute__(self, name: str) -> str:
if name in QuboleOperator.template_fields:
if name in self.kwargs:
return self.kwargs[name]
else:
return ''
else:
return object.__getattribute__(self, name)
def __setattr__(self, name: str, value: str) -> None:
if name in QuboleOperator.template_fields:
self.kwargs[name] = value
else:
object.__setattr__(self, name, value)
| {
"content_hash": "33060614725b7876b68e44101b55564d",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 108,
"avg_line_length": 42.822641509433964,
"alnum_prop": 0.612354599929503,
"repo_name": "DinoCow/airflow",
"id": "8725dd0fb53ab60e6800f6eaba239c9189c4b080",
"size": "12139",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "airflow/providers/qubole/operators/qubole.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "56963"
},
{
"name": "HTML",
"bytes": "140781"
},
{
"name": "JavaScript",
"bytes": "1370838"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Python",
"bytes": "1473771"
},
{
"name": "Shell",
"bytes": "18638"
}
],
"symlink_target": ""
} |
import subprocess
from backends import Backend
class KdeBackend(Backend):
def lock_screen(self):
subprocess.Popen(['qdbus', 'org.freedesktop.ScreenSaver', '/ScreenSaver', 'Lock']) | {
"content_hash": "b3d202951ba51f6032b1e1122ccf7ed6",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 90,
"avg_line_length": 27.571428571428573,
"alnum_prop": 0.7253886010362695,
"repo_name": "pedrospdc/kde-leapmotion-control",
"id": "951a0c7c2b271d78dc0a8f66555de7b21167d508",
"size": "193",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backends/kde.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73505"
}
],
"symlink_target": ""
} |
import bn, conv, dense, fft, init, norm, pool
from .bn import ComplexBatchNormalization as ComplexBN
from .conv import (ComplexConv,
ComplexConv1D,
ComplexConv2D,
ComplexConv3D,
WeightNorm_Conv)
from .dense import ComplexDense
from .fft import fft, ifft, fft2, ifft2, FFT, IFFT, FFT2, IFFT2
from .init import (ComplexIndependentFilters, IndependentFilters,
ComplexInit, SqrtInit)
from .norm import LayerNormalization, ComplexLayerNorm
from .pool import SpectralPooling1D, SpectralPooling2D
from .utils import (get_realpart, get_imagpart, getpart_output_shape,
GetImag, GetReal, GetAbs)
| {
"content_hash": "bae185540ab666ec4bd9fb417f297381",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 71,
"avg_line_length": 46.5,
"alnum_prop": 0.6411290322580645,
"repo_name": "ChihebTrabelsi/deep_complex_networks",
"id": "bf2e7cb40c1538125add2094847509393dc35d72",
"size": "861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "complexnn/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "90053"
},
{
"name": "Python",
"bytes": "168686"
},
{
"name": "Shell",
"bytes": "155"
}
],
"symlink_target": ""
} |
import itertools
import platform
import subprocess
import sys
import math
import pickle
def normalize(x):
denom = sum(x)
return [e/denom for e in x]
def scale(x, a):
return [e * a for e in x]
def argcmp(x, comp, predicate):
idx = None
val = None
for i in xrange(len(x)):
if not predicate(x[i]):
continue
if idx is None or comp(x[i], val):
idx = i
val = x[i]
if idx is None:
# couldn't find it
raise Exception("no argmin satisfiying predicate")
return idx
def argmin(x, predicate):
return argcmp(x, lambda a, b: a < b, predicate)
def argmax(x, predicate):
return argcmp(x, lambda a, b: a > b, predicate)
def allocate(nworkers, weights):
approx = map(int, map(math.ceil, scale(weights, nworkers)))
diff = sum(approx) - nworkers
if diff > 0:
while diff > 0:
i = argmin(approx, predicate=lambda x: x > 0)
approx[i] -= 1
diff -= 1
elif diff < 0:
i = argmax(approx, lambda x: True)
approx[i] += -diff
acc = 0
ret = []
for x in approx:
ret.append(range(acc, acc + x))
acc += x
return ret
def run(cmd):
print >>sys.stderr, '[INFO] running command %s' % str(cmd)
p = subprocess.Popen(cmd, stdin=open('/dev/null', 'r'), stdout=subprocess.PIPE)
r = p.stdout.read()
p.wait()
return r
if __name__ == '__main__':
(_, outfile) = sys.argv
STRATEGIES = ['epoch', 'epoch-compress']
NCORES = [1, 2, 4, 8, 16, 24, 32]
WSET = [18]
#STRATEGIES = ['epoch']
#NCORES = [1]
#WSET = [18]
node = platform.node()
if node == 'modis2':
LOGGERS = [
('data.log', 1.),
('/data/scidb/001/2/stephentu/data.log', 1.),
('/data/scidb/001/3/stephentu/data.log', 1.),
]
elif node == 'istc3':
LOGGERS = [
('data.log', 1./3.),
('/f0/stephentu/data.log', 2./3.),
]
else:
print "unknown node", node
assert False, "Unknown node!"
weights = normalize([x[1] for x in LOGGERS])
logfile_cmds = list(itertools.chain.from_iterable([['--logfile', f] for f, _ in LOGGERS]))
results = []
for strat, ncores, ws in itertools.product(STRATEGIES, NCORES, WSET):
allocations = allocate(ncores, weights)
alloc_cmds = list(
itertools.chain.from_iterable([['--assignment', ','.join(map(str, alloc))] for alloc in allocations]))
cmd = ['./persist_test'] + \
logfile_cmds + \
alloc_cmds + \
['--num-threads', str(ncores),
'--strategy', strat,
'--writeset', str(ws),
'--valuesize', '32']
output = run(cmd)
res = float(output.strip())
results.append(((strat, ncores, ws), res))
with open(outfile, 'w') as fp:
pickle.dump(results, fp)
| {
"content_hash": "2d79a8b8b8a1eab4970fc95ca75568c2",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 110,
"avg_line_length": 24.84259259259259,
"alnum_prop": 0.5825568393589265,
"repo_name": "nathanielherman/silo",
"id": "f632fa0edc73001d9a6911b46425d890d7c81c34",
"size": "2706",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/tester.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "759"
},
{
"name": "C++",
"bytes": "887477"
},
{
"name": "Makefile",
"bytes": "7439"
},
{
"name": "Python",
"bytes": "720601"
},
{
"name": "Shell",
"bytes": "1063"
}
],
"symlink_target": ""
} |
import ConfigParser
import argparse
import fnmatch
import logging
import json
import os
import sys
import time
import unittest
from collections import OrderedDict
from autothreadharness.harness_case import HarnessCase
from autothreadharness.open_thread_controller import OpenThreadController
from autothreadharness import settings
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
"""Logger: The global logger"""
logger.setLevel(logging.INFO)
RESUME_SCRIPT_PATH = "%appdata%\\Microsoft\\Windows\\Start Menu\\Programs\\" "Startup\\continue_harness.bat"
class SimpleTestResult(unittest.TestResult):
executions = 0
def __init__(self, path, auto_reboot_args=None, keep_explorer=False, add_all_devices=False):
"""Record test results in json file
Args:
path (str): File path to record the results
auto_reboot (bool): Whether reboot when harness die
"""
super(SimpleTestResult, self).__init__()
self.path = path
self.auto_reboot_args = auto_reboot_args
self.result = json.load(open(self.path, 'r'))
self.log_handler = None
self.started = None
self.keep_explorer = keep_explorer
self.add_all_devices = add_all_devices
SimpleTestResult.executions += 1
logger.info('Initial state is %s', json.dumps(self.result, indent=2))
def startTest(self, test):
logger.info(
'\n========================================\n%s\n========================================',
test.__class__.__name__,
)
test.add_all_devices = self.add_all_devices
# create start up script if auto reboot enabled
if self.auto_reboot_args:
test.auto_reboot = True
os.system('echo %s > "%s"' %
(' '.join(self.auto_reboot_args + ['-c', test.__class__.__name__]), RESUME_SCRIPT_PATH))
# record start timestamp
self.started = time.strftime('%Y-%m-%dT%H:%M:%S')
os.system('mkdir %s' % test.result_dir)
self.log_handler = logging.FileHandler('%s\\auto-%s.log' % (test.result_dir, time.strftime('%Y%m%d%H%M%S')))
self.log_handler.setLevel(logging.DEBUG)
self.log_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(self.log_handler)
def add_result(self, test, passed, error=None):
"""Record test result into json file
Args:
test (TestCase): The test just run
passed (bool): Whether the case is passed
"""
fails = self.result.get(test.__class__.__name__, {}).get('fails', 0)
if passed is False:
fails += 1
self.result[str(test.__class__.__name__)] = {
'started': self.started,
'stopped': time.strftime('%Y-%m-%dT%H:%M:%S'),
'passed': passed,
'fails': fails,
'error': error,
'executions': SimpleTestResult.executions,
}
if self.auto_reboot_args:
os.system('del "%s"' % RESUME_SCRIPT_PATH)
json.dump(OrderedDict(sorted(self.result.items(), key=lambda t: t[0])), open(self.path, 'w'), indent=2)
# save logs
logger.removeHandler(self.log_handler)
self.log_handler.close()
self.log_handler = None
time.sleep(2)
# close explorers
if not self.keep_explorer:
os.system('taskkill /f /im explorer.exe && start explorer.exe')
def addSuccess(self, test):
logger.info('case[%s] pass', test.__class__.__name__)
super(SimpleTestResult, self).addSuccess(test)
self.add_result(test, True)
def addFailure(self, test, err):
logger.warning('case[%s] fail', test.__class__.__name__)
super(SimpleTestResult, self).addFailure(test, err)
self.add_result(test, False)
def addError(self, test, err):
logger.error('case[%s] error', test.__class__.__name__, exc_info=err)
if err and err[0] is SystemExit:
if self.auto_reboot_args:
logger.warning('rebooting..')
os.system('shutdown /r /t 1')
else:
logger.warning('exiting..')
sys.exit(1)
super(SimpleTestResult, self).addError(test, err)
self.add_result(test, None, str(err[1]))
def list_devices(names=None, continue_from=None, **kwargs):
"""List devices in settings file and print versions"""
if not names:
names = [device for device, _type in settings.GOLDEN_DEVICES if _type == 'OpenThread']
if continue_from:
continue_from = names.index(continue_from)
else:
continue_from = 0
for port in names[continue_from:]:
try:
with OpenThreadController(port) as otc:
print('%s: %s' % (port, otc.version))
except BaseException:
logger.exception('failed to get version of %s' % port)
def discover(
names=None,
pattern=['*.py'],
skip='efp',
dry_run=False,
denylist=None,
name_greps=None,
manual_reset=False,
delete_history=False,
max_devices=0,
continue_from=None,
result_file='./result.json',
auto_reboot=False,
keep_explorer=False,
add_all_devices=False,
):
"""Discover all test cases and skip those passed
Args:
pattern (str): Pattern to match case modules, refer python's unittest
documentation for more details
skip (str): types cases to skip
"""
if not os.path.exists(settings.OUTPUT_PATH):
os.mkdir(settings.OUTPUT_PATH)
if delete_history:
os.system('del history.json')
if denylist:
try:
excludes = [line.strip('\n') for line in open(denylist, 'r').readlines() if not line.startswith('#')]
except BaseException:
logger.exception('Failed to open test case denylist file')
raise
else:
excludes = []
log = None
if os.path.isfile(result_file):
try:
log = json.load(open(result_file, 'r'))
except BaseException:
logger.exception('Failed to open result file')
if not log:
log = {}
json.dump(log, open(result_file, 'w'), indent=2)
new_th = False
harness_info = ConfigParser.ConfigParser()
harness_info.read('%s\\info.ini' % settings.HARNESS_HOME)
if harness_info.has_option('Thread_Harness_Info', 'Version') and harness_info.has_option(
'Thread_Harness_Info', 'Mode'):
harness_version = harness_info.get('Thread_Harness_Info', 'Version').rsplit(' ', 1)[1]
harness_mode = harness_info.get('Thread_Harness_Info', 'Mode')
if harness_mode == 'External' and harness_version > '1.4.0':
new_th = True
if harness_mode == 'Internal' and harness_version > '49.4':
new_th = True
suite = unittest.TestSuite()
if new_th:
discovered = unittest.defaultTestLoader.discover('cases', pattern)
else:
discovered = unittest.defaultTestLoader.discover('cases_R140', pattern)
if names and continue_from:
names = names[names.index(continue_from):]
for s1 in discovered:
for s2 in s1:
for case in s2:
if case.__class__ is HarnessCase:
continue
case_name = str(case.__class__.__name__)
# grep name
if name_greps and not any(fnmatch.fnmatch(case_name, name_grep) for name_grep in name_greps):
logger.info('case[%s] skipped by name greps', case_name)
continue
# allowlist
if len(names) and case_name not in names:
logger.info('case[%s] skipped', case_name)
continue
# skip cases
if case_name in log:
if ((log[case_name]['passed'] and ('p' in skip)) or
(log[case_name]['passed'] is False and ('f' in skip)) or (log[case_name]['passed'] is None and
('e' in skip))):
logger.warning('case[%s] skipped for its status[%s]', case_name, log[case_name]['passed'])
continue
# continue from
if continue_from:
if continue_from != case_name:
logger.warning('case[%s] skipped for continue from[%s]', case_name, continue_from)
continue
else:
continue_from = None
# denylist
if case_name in excludes:
logger.warning('case[%s] skipped for denylist', case_name)
continue
# max devices
if max_devices and case.golden_devices_required > max_devices:
logger.warning('case[%s] skipped for exceeding max golden devices allowed[%d]', case_name,
max_devices)
continue
suite.addTest(case)
logger.info('case[%s] added', case_name)
if auto_reboot:
argv = []
argv.append('"%s"' % os.sep.join([os.getcwd(), 'start.bat']))
argv.extend(['-p', pattern])
argv.extend(['-k', skip])
argv.extend(['-o', result_file])
argv.append('-a')
if manual_reset:
argv.append('-m')
if delete_history:
argv.append('-d')
auto_reboot_args = argv + names
else:
auto_reboot_args = None
if os.path.isfile(RESUME_SCRIPT_PATH):
os.system('del "%s"' % RESUME_SCRIPT_PATH)
# manual reset
if manual_reset:
settings.PDU_CONTROLLER_TYPE = 'MANUAL_PDU_CONTROLLER'
settings.PDU_CONTROLLER_OPEN_PARAMS = {}
settings.PDU_CONTROLLER_REBOOT_PARAMS = {}
result = SimpleTestResult(result_file, auto_reboot_args, keep_explorer, add_all_devices)
for case in suite:
logger.info(case.__class__.__name__)
if dry_run:
return
suite.run(result)
return result
def main():
parser = argparse.ArgumentParser(description='Thread harness test case runner')
parser.add_argument('--auto-reboot',
'-a',
action='store_true',
default=False,
help='restart system when harness service die')
parser.add_argument('names',
metavar='NAME',
type=str,
nargs='*',
default=None,
help='test case name, omit to test all')
parser.add_argument('--denylist',
'-b',
metavar='DENYLIST_FILE',
type=str,
help='file to list test cases to skip',
default=None)
parser.add_argument('--continue-from', '-c', type=str, default=None, help='first case to test')
parser.add_argument('--delete-history', '-d', action='store_true', default=False, help='clear history on startup')
parser.add_argument('--keep-explorer',
'-e',
action='store_true',
default=False,
help='do not restart explorer.exe at the end')
parser.add_argument('--name-greps', '-g', action='append', default=None, help='grep case by names')
parser.add_argument('--list-file', '-i', type=str, default=None, help='file to list cases names to test')
parser.add_argument(
'--skip',
'-k',
metavar='SKIP',
type=str,
help='type of results to skip. e for error, f for fail, p for pass.',
default='',
)
parser.add_argument('--list-devices', '-l', action='store_true', default=False, help='list devices')
parser.add_argument('--manual-reset', '-m', action='store_true', default=False, help='reset devices manually')
parser.add_argument('--dry-run', '-n', action='store_true', default=False, help='just show what to run')
parser.add_argument(
'--result-file',
'-o',
type=str,
default=settings.OUTPUT_PATH + '\\result.json',
help='file to store and read current status',
)
parser.add_argument('--pattern',
'-p',
metavar='PATTERN',
type=str,
help='file name pattern, default to "*.py"',
default='*.py')
parser.add_argument('--rerun-fails', '-r', type=int, default=0, help='number of times to rerun failed test cases')
parser.add_argument('--add-all-devices',
'-t',
action='store_true',
default=False,
help='add all devices to the test bed')
parser.add_argument('--max-devices', '-u', type=int, default=0, help='max golden devices allowed')
args = vars(parser.parse_args())
if args['list_file']:
try:
names = [line.strip('\n') for line in open(args['list_file'], 'r').readlines() if not line.startswith('#')]
except BaseException:
logger.exception('Failed to open test case list file')
raise
else:
args['names'] = args['names'] + names
args.pop('list_file')
if args.pop('list_devices', False):
list_devices(**args)
return
rerun_fails = args.pop('rerun_fails')
result = discover(**args)
if rerun_fails > 0:
for i in range(rerun_fails):
failed_names = {name for name in result.result if result.result[name]['passed'] is False}
if not failed_names:
break
logger.info('Rerunning failed test cases')
logger.info('Rerun #{}:'.format(i + 1))
result = discover(
names=failed_names,
pattern=args['pattern'],
skip='',
result_file=args['result_file'],
auto_reboot=args['auto_reboot'],
keep_explorer=args['keep_explorer'],
add_all_devices=args['add_all_devices'],
)
if __name__ == '__main__':
main()
| {
"content_hash": "fb8ec968632f705a5999384a9d7db7a6",
"timestamp": "",
"source": "github",
"line_count": 402,
"max_line_length": 119,
"avg_line_length": 35.92537313432836,
"alnum_prop": 0.5459077690070627,
"repo_name": "chshu/openthread",
"id": "a6010253d8e4c5813928d22a0a46451d19c669ee",
"size": "16023",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tools/harness-automation/autothreadharness/runner.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "15850"
},
{
"name": "C",
"bytes": "959546"
},
{
"name": "C#",
"bytes": "18077"
},
{
"name": "C++",
"bytes": "4339561"
},
{
"name": "Dockerfile",
"bytes": "6256"
},
{
"name": "M4",
"bytes": "64583"
},
{
"name": "Makefile",
"bytes": "134582"
},
{
"name": "Python",
"bytes": "2121036"
},
{
"name": "Ruby",
"bytes": "3397"
},
{
"name": "Shell",
"bytes": "64482"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "openinghours.tests.test_settings")
from django.core.management import execute_from_command_line
args = sys.argv + ["makemigrations", "openinghours"]
execute_from_command_line(args)
| {
"content_hash": "2d42df8699da43ba1f163ae9e8ff60b3",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 87,
"avg_line_length": 36.875,
"alnum_prop": 0.711864406779661,
"repo_name": "arteria/django-openinghours",
"id": "8ed03b7bda0939ac3a90c9633d84881124a7bbe9",
"size": "295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "make_migrations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4873"
},
{
"name": "Makefile",
"bytes": "279"
},
{
"name": "Python",
"bytes": "36705"
}
],
"symlink_target": ""
} |
from datetime import (
datetime,
timedelta,
)
import numpy as np
import pytest
import pandas._testing as tm
from pandas.core.indexes.api import (
Index,
RangeIndex,
)
@pytest.fixture
def index_large():
# large values used in TestUInt64Index where no compat needed with int64/float64
large = [2**63, 2**63 + 10, 2**63 + 15, 2**63 + 20, 2**63 + 25]
return Index(large, dtype=np.uint64)
class TestSetOps:
@pytest.mark.parametrize("dtype", ["f8", "u8", "i8"])
def test_union_non_numeric(self, dtype):
# corner case, non-numeric
index = Index(np.arange(5, dtype=dtype), dtype=dtype)
assert index.dtype == dtype
other = Index([datetime.now() + timedelta(i) for i in range(4)], dtype=object)
result = index.union(other)
expected = Index(np.concatenate((index, other)))
tm.assert_index_equal(result, expected)
result = other.union(index)
expected = Index(np.concatenate((other, index)))
tm.assert_index_equal(result, expected)
def test_intersection(self):
index = Index(range(5), dtype=np.int64)
other = Index([1, 2, 3, 4, 5])
result = index.intersection(other)
expected = Index(np.sort(np.intersect1d(index.values, other.values)))
tm.assert_index_equal(result, expected)
result = other.intersection(index)
expected = Index(
np.sort(np.asarray(np.intersect1d(index.values, other.values)))
)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("dtype", ["int64", "uint64"])
def test_int_float_union_dtype(self, dtype):
# https://github.com/pandas-dev/pandas/issues/26778
# [u]int | float -> float
index = Index([0, 2, 3], dtype=dtype)
other = Index([0.5, 1.5], dtype=np.float64)
expected = Index([0.0, 0.5, 1.5, 2.0, 3.0], dtype=np.float64)
result = index.union(other)
tm.assert_index_equal(result, expected)
result = other.union(index)
tm.assert_index_equal(result, expected)
def test_range_float_union_dtype(self):
# https://github.com/pandas-dev/pandas/issues/26778
index = RangeIndex(start=0, stop=3)
other = Index([0.5, 1.5], dtype=np.float64)
result = index.union(other)
expected = Index([0.0, 0.5, 1, 1.5, 2.0], dtype=np.float64)
tm.assert_index_equal(result, expected)
result = other.union(index)
tm.assert_index_equal(result, expected)
def test_float64_index_difference(self):
# https://github.com/pandas-dev/pandas/issues/35217
float_index = Index([1.0, 2, 3])
string_index = Index(["1", "2", "3"])
result = float_index.difference(string_index)
tm.assert_index_equal(result, float_index)
result = string_index.difference(float_index)
tm.assert_index_equal(result, string_index)
def test_intersection_uint64_outside_int64_range(self, index_large):
other = Index([2**63, 2**63 + 5, 2**63 + 10, 2**63 + 15, 2**63 + 20])
result = index_large.intersection(other)
expected = Index(np.sort(np.intersect1d(index_large.values, other.values)))
tm.assert_index_equal(result, expected)
result = other.intersection(index_large)
expected = Index(
np.sort(np.asarray(np.intersect1d(index_large.values, other.values)))
)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"index2,keeps_name",
[
(Index([4, 7, 6, 5, 3], name="index"), True),
(Index([4, 7, 6, 5, 3], name="other"), False),
],
)
def test_intersection_monotonic(self, index2, keeps_name, sort):
index1 = Index([5, 3, 2, 4, 1], name="index")
expected = Index([5, 3, 4])
if keeps_name:
expected.name = "index"
result = index1.intersection(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
def test_symmetric_difference(self, sort):
# smoke
index1 = Index([5, 2, 3, 4], name="index1")
index2 = Index([2, 3, 4, 1])
result = index1.symmetric_difference(index2, sort=sort)
expected = Index([5, 1])
assert tm.equalContents(result, expected)
assert result.name is None
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
class TestSetOpsSort:
@pytest.mark.parametrize("slice_", [slice(None), slice(0)])
def test_union_sort_other_special(self, slice_):
# https://github.com/pandas-dev/pandas/issues/24959
idx = Index([1, 0, 2])
# default, sort=None
other = idx[slice_]
tm.assert_index_equal(idx.union(other), idx)
tm.assert_index_equal(other.union(idx), idx)
# sort=False
tm.assert_index_equal(idx.union(other, sort=False), idx)
@pytest.mark.xfail(reason="Not implemented")
@pytest.mark.parametrize("slice_", [slice(None), slice(0)])
def test_union_sort_special_true(self, slice_):
# TODO(GH#25151): decide on True behaviour
# sort=True
idx = Index([1, 0, 2])
# default, sort=None
other = idx[slice_]
result = idx.union(other, sort=True)
expected = Index([0, 1, 2])
tm.assert_index_equal(result, expected)
| {
"content_hash": "a23f55d6b7d9ea4d4abd37ec142e238a",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 86,
"avg_line_length": 34.789808917197455,
"alnum_prop": 0.6036250457707799,
"repo_name": "pandas-dev/pandas",
"id": "3e3de14960f4ea3cb3f7652e6d9fb9c8a2b6fd3e",
"size": "5462",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pandas/tests/indexes/numeric/test_setops.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "512"
},
{
"name": "C",
"bytes": "366145"
},
{
"name": "CSS",
"bytes": "1800"
},
{
"name": "Cython",
"bytes": "1186787"
},
{
"name": "Dockerfile",
"bytes": "1411"
},
{
"name": "HTML",
"bytes": "456531"
},
{
"name": "Python",
"bytes": "18778786"
},
{
"name": "Shell",
"bytes": "10369"
},
{
"name": "Smarty",
"bytes": "8486"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import collections
import copy
import datetime
import decimal
import math
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
# When the _meta object was formalized, this exception was moved to
# django.core.exceptions. It is retained here for backwards compatibility
# purposes.
from django.core.exceptions import FieldDoesNotExist # NOQA
from django.db import connection, connections, router
from django.db.models.query_utils import QueryWrapper, RegisterLookupMixin
from django.utils import six, timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.deprecation import (
RemovedInDjango20Warning, warn_about_renamed_method,
)
from django.utils.duration import duration_string
from django.utils.encoding import (
force_bytes, force_text, python_2_unicode_compatible, smart_text,
)
from django.utils.functional import Promise, cached_property, curry
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
# Avoid "TypeError: Item in ``from list'' not a string" -- unicode_literals
# makes these strings unicode
__all__ = [str(x) for x in (
'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField',
'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField',
'DateField', 'DateTimeField', 'DecimalField', 'DurationField',
'EmailField', 'Empty', 'Field', 'FieldDoesNotExist', 'FilePathField',
'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField',
'NOT_PROVIDED', 'NullBooleanField', 'PositiveIntegerField',
'PositiveSmallIntegerField', 'SlugField', 'SmallIntegerField', 'TextField',
'TimeField', 'URLField', 'UUIDField',
)]
class Empty(object):
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
@total_ordering
@python_2_unicode_compatible
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
'invalid_choice': _('Value %(value)r is not a valid choice.'),
'null': _('This field cannot be null.'),
'blank': _('This field cannot be blank.'),
'unique': _('%(model_name)s with this %(field_label)s '
'already exists.'),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
'unique_for_date': _("%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
# Generic field type description, usually overridden by subclasses
def _description(self):
return _('Field of type: %(field_type)s') % {
'field_type': self.__class__.__name__
}
description = property(_description)
def __init__(self, verbose_name=None, name=None, primary_key=False,
max_length=None, unique=False, blank=False, null=False,
db_index=False, rel=None, default=NOT_PROVIDED, editable=True,
serialize=True, unique_for_date=None, unique_for_month=None,
unique_for_year=None, choices=None, help_text='', db_column=None,
db_tablespace=None, auto_created=False, validators=[],
error_messages=None):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, collections.Iterator):
choices = list(choices)
self.choices = choices or []
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self.db_tablespace = db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = validators # Store for deconstruction later
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self._error_messages = error_messages # Store for deconstruction later
self.error_messages = messages
def __str__(self):
""" Return "app_label.model_label.field_name". """
model = self.model
app = model._meta.app_label
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
def __repr__(self):
"""
Displays the module, class and name of the field.
"""
path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
name = getattr(self, 'name', None)
if name is not None:
return '<%s: %s>' % (path, name)
return '<%s>' % path
def check(self, **kwargs):
errors = []
errors.extend(self._check_field_name())
errors.extend(self._check_choices())
errors.extend(self._check_db_index())
errors.extend(self._check_null_allowed_for_primary_keys())
errors.extend(self._check_backend_specific_checks(**kwargs))
errors.extend(self._check_deprecation_details())
return errors
def _check_field_name(self):
""" Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk". """
if self.name.endswith('_'):
return [
checks.Error(
'Field names must not end with an underscore.',
obj=self,
id='fields.E001',
)
]
elif '__' in self.name:
return [
checks.Error(
'Field names must not contain "__".',
obj=self,
id='fields.E002',
)
]
elif self.name == 'pk':
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id='fields.E003',
)
]
else:
return []
@property
def rel(self):
warnings.warn(
"Usage of field.rel has been deprecated. Use field.remote_field instead.",
RemovedInDjango20Warning, 2)
return self.remote_field
def _check_choices(self):
if self.choices:
if (isinstance(self.choices, six.string_types) or
not is_iterable(self.choices)):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id='fields.E004',
)
]
elif any(isinstance(choice, six.string_types) or
not is_iterable(choice) or len(choice) != 2
for choice in self.choices):
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id='fields.E005',
)
]
else:
return []
else:
return []
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id='fields.E006',
)
]
else:
return []
def _check_null_allowed_for_primary_keys(self):
if (self.primary_key and self.null and
not connection.features.interprets_empty_strings_as_nulls):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
'Primary keys must not have null=True.',
hint=('Set null=False on the field, or '
'remove primary_key=True argument.'),
obj=self,
id='fields.E007',
)
]
else:
return []
def _check_backend_specific_checks(self, **kwargs):
app_label = self.model._meta.app_label
for db in connections:
if router.allow_migrate(db, app_label, model_name=self.model._meta.model_name):
return connections[db].validation.check_field(self, **kwargs)
return []
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
'msg',
'%s has been removed except for support in historical '
'migrations.' % self.__class__.__name__
),
hint=self.system_check_removed_details.get('hint'),
obj=self,
id=self.system_check_removed_details.get('id', 'fields.EXXX'),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
'msg',
'%s has been deprecated.' % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get('hint'),
obj=self,
id=self.system_check_deprecated_details.get('id', 'fields.WXXX'),
)
]
return []
def get_col(self, alias, output_field=None):
if output_field is None:
output_field = self
if alias != self.model._meta.db_table or output_field != self:
from django.db.models.expressions import Col
return Col(alias, self, output_field)
else:
return self.cached_col
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be used
by Django.
"""
return sql, params
def deconstruct(self):
"""
Returns enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class has been run
* The import path of the field, including the class: django.db.models.IntegerField
This should be the most portable version, so less specific may be better.
* A list of positional arguments
* A dict of keyword arguments
Note that the positional or keyword arguments must contain values of the
following types (including inner values of collection types):
* None, bool, str, unicode, int, long, float, complex, set, frozenset, list, tuple, dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this time,
just ensure that the resulting field is the same - prefer keyword arguments
over positional ones, and omit parameters with their default values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": [],
"help_text": '',
"db_column": None,
"db_tablespace": settings.DEFAULT_INDEX_TABLESPACE,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
}
equals_comparison = {"choices", "validators", "db_tablespace"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
if path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
if path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
if path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (
force_text(self.name, strings_only=True),
path,
[],
keywords,
)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
if isinstance(other, Field):
return self.creation_counter < other.creation_counter
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, 'field') and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, we use the app registry to load the
model and then the field back.
"""
if not hasattr(self, 'model'):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
return _empty, (self.__class__,), self.__dict__
if self.model._deferred:
# Deferred model will not be found from the app registry. This
# could be fixed by reconstructing the deferred model on unpickle.
raise RuntimeError("Fields of deferred models can't be reduced")
return _load_field, (self.model._meta.app_label, self.model._meta.object_name,
self.name)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
"""
return value
@cached_property
def validators(self):
# Some validators can't be created at field initialization time.
# This method provides a way to delay their creation until required.
return self.default_validators + self._validators
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages['null'], code='null')
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages['blank'], code='blank')
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python and validate are propagated. The correct value is
returned if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type(self, connection):
"""
Returns the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
try:
return connection.data_types[self.get_internal_type()] % data
except KeyError:
return None
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return
values (type, checks).
This will look at db_type(), allowing custom model fields to override it.
"""
data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
type_string = self.db_type(connection)
try:
check_string = connection.data_type_check_constraints[self.get_internal_type()] % data
except KeyError:
check_string = None
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, 'from_db_value'):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
def set_attributes_from_name(self, name):
if not self.name:
self.name = name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace('_', ' ')
def contribute_to_class(self, cls, name, virtual_only=False):
self.set_attributes_from_name(name)
self.model = cls
if virtual_only:
cls._meta.add_field(self, virtual=True)
else:
cls._meta.add_field(self)
if self.choices:
setattr(cls, 'get_%s_display' % self.name,
curry(cls._get_FIELD_display, field=self))
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_cache_name(self):
return '_%s_cache' % self.name
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""
Returns field's value just before saving.
"""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""
Perform preliminary non-db specific value checks and conversions.
"""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""Returns field's value prepared for interacting with the database
backend.
Used by the default implementations of ``get_db_prep_save``and
`get_db_prep_lookup```
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""
Returns field's value prepared for saving into a database.
"""
return self.get_db_prep_value(value, connection=connection,
prepared=False)
def get_prep_lookup(self, lookup_type, value):
"""
Perform preliminary non-db specific lookup checks and conversions
"""
if hasattr(value, '_prepare'):
return value._prepare(self)
if lookup_type in {
'iexact', 'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',
'isnull', 'search', 'regex', 'iregex',
}:
return value
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
return self.get_prep_value(value)
elif lookup_type in ('range', 'in'):
return [self.get_prep_value(v) for v in value]
return self.get_prep_value(value)
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
"""
Returns field's value prepared for database lookup.
"""
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
prepared = True
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabeled_clone method it means the
# value will be handled later on.
if hasattr(value, 'relabeled_clone'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
if lookup_type in ('search', 'regex', 'iregex', 'contains',
'icontains', 'iexact', 'startswith', 'endswith',
'istartswith', 'iendswith'):
return [value]
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
return [self.get_db_prep_value(value, connection=connection,
prepared=prepared)]
elif lookup_type in ('range', 'in'):
return [self.get_db_prep_value(v, connection=connection,
prepared=prepared) for v in value]
elif lookup_type == 'isnull':
return []
else:
return [value]
def has_default(self):
"""
Returns a boolean of whether this field has a default value.
"""
return self.default is not NOT_PROVIDED
def get_default(self):
"""
Returns the default value for this field.
"""
if self.has_default():
if callable(self.default):
return self.default()
return self.default
if (not self.empty_strings_allowed or (self.null and
not connection.features.interprets_empty_strings_as_nulls)):
return None
return ""
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None):
"""Returns choices with a default blank choices included, for use
as SelectField choices for this field."""
blank_defined = False
choices = list(self.choices) if self.choices else []
named_groups = choices and isinstance(choices[0][1], (list, tuple))
if not named_groups:
for choice, __ in choices:
if choice in ('', None):
blank_defined = True
break
first_choice = (blank_choice if include_blank and
not blank_defined else [])
if self.choices:
return first_choice + choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
if hasattr(self.remote_field, 'get_related_field'):
lst = [(getattr(x, self.remote_field.get_related_field().attname),
smart_text(x))
for x in rel_model._default_manager.complex_filter(
limit_choices_to)]
else:
lst = [(x._get_pk_val(), smart_text(x))
for x in rel_model._default_manager.complex_filter(
limit_choices_to)]
return first_choice + lst
def get_choices_default(self):
return self.get_choices()
@warn_about_renamed_method(
'Field', '_get_val_from_obj', 'value_from_object',
RemovedInDjango20Warning
)
def _get_val_from_obj(self, obj):
if obj is not None:
return getattr(obj, self.attname)
else:
return self.get_default()
def value_to_string(self, obj):
"""
Returns a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return smart_text(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""
Returns a django.forms.Field instance for this database Field.
"""
defaults = {'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
if self.choices:
# Fields with choices get special treatment.
include_blank = (self.blank or
not (self.has_default() or 'initial' in kwargs))
defaults['choices'] = self.get_choices(include_blank=include_blank)
defaults['coerce'] = self.to_python
if self.null:
defaults['empty_value'] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial'):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""
Returns the value of this field in the given model instance.
"""
return getattr(obj, self.attname)
class AutoField(Field):
description = _("Integer")
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be an integer."),
}
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(AutoField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
errors = super(AutoField, self).check(**kwargs)
errors.extend(self._check_primary_key())
return errors
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
'AutoFields must set primary_key=True.',
obj=self,
id='fields.E100',
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
del kwargs['blank']
kwargs['primary_key'] = True
return name, path, args, kwargs
def get_internal_type(self):
return "AutoField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def get_prep_value(self, value):
value = super(AutoField, self).get_prep_value(value)
if value is None:
return None
return int(value)
def contribute_to_class(self, cls, name, **kwargs):
assert not cls._meta.has_auto_field, \
"A model can't have more than one AutoField."
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
cls._meta.has_auto_field = True
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class BigAutoField(AutoField):
description = _("Big (8 byte) integer")
def get_internal_type(self):
return "BigAutoField"
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be either True or False."),
}
description = _("Boolean (Either True or False)")
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(BooleanField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
errors = super(BooleanField, self).check(**kwargs)
errors.extend(self._check_null(**kwargs))
return errors
def _check_null(self, **kwargs):
if getattr(self, 'null', False):
return [
checks.Error(
'BooleanFields do not accept null values.',
hint='Use a NullBooleanField instead.',
obj=self,
id='fields.E110',
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super(BooleanField, self).deconstruct()
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if value in (True, False):
# if value is 1 or 0 than it's equal to True or False, but we want
# to return a true bool for semantic reasons.
return bool(value)
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_prep_lookup(self, lookup_type, value):
# Special-case handling for filters coming from a Web request (e.g. the
# admin interface). Only works for scalar values (not lists). If you're
# passing in a list, you might as well make things the right type when
# constructing the list.
if value in ('1', '0'):
value = bool(int(value))
return super(BooleanField, self).get_prep_lookup(lookup_type, value)
def get_prep_value(self, value):
value = super(BooleanField, self).get_prep_value(value)
if value is None:
return None
return bool(value)
def formfield(self, **kwargs):
# Unlike most fields, BooleanField figures out include_blank from
# self.null instead of self.blank.
if self.choices:
include_blank = not (self.has_default() or 'initial' in kwargs)
defaults = {'choices': self.get_choices(include_blank=include_blank)}
else:
defaults = {'form_class': forms.BooleanField}
defaults.update(kwargs)
return super(BooleanField, self).formfield(**defaults)
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
super(CharField, self).__init__(*args, **kwargs)
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
errors = super(CharField, self).check(**kwargs)
errors.extend(self._check_max_length_attribute(**kwargs))
return errors
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id='fields.E120',
)
]
elif not isinstance(self.max_length, six.integer_types) or self.max_length <= 0:
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id='fields.E121',
)
]
else:
return []
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, six.string_types) or value is None:
return value
return smart_text(value)
def get_prep_value(self, value):
value = super(CharField, self).get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length}
defaults.update(kwargs)
return super(CharField, self).formfield(**defaults)
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_deprecated_details = {
'msg': (
'CommaSeparatedIntegerField has been deprecated. Support '
'for it (except in historical migrations) will be removed '
'in Django 2.0.'
),
'hint': (
'Use CharField(validators=[validate_comma_separated_integer_list]) instead.'
),
'id': 'fields.W901',
}
def formfield(self, **kwargs):
defaults = {
'error_messages': {
'invalid': _('Enter only digits separated by commas.'),
}
}
defaults.update(kwargs)
return super(CommaSeparatedIntegerField, self).formfield(**defaults)
class DateTimeCheckMixin(object):
def check(self, **kwargs):
errors = super(DateTimeCheckMixin, self).check(**kwargs)
errors.extend(self._check_mutually_exclusive_options())
errors.extend(self._check_fix_default_value())
return errors
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [self.auto_now_add, self.auto_now,
self.has_default()]
enabled_options = [option not in (None, False)
for option in mutually_exclusive_options].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id='fields.E160',
)
]
else:
return []
def _check_fix_default_value(self):
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid date format. It must be "
"in YYYY-MM-DD format."),
'invalid_date': _("'%(value)s' value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."),
}
description = _("Date (without time)")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super(DateField, self).__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Adds a warning to the checks framework stating, that using an actual
date or datetime value is probably wrong; it's only being evaluated on
server start-up.
For details see ticket #21905
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
if not timezone.is_naive(value):
value = timezone.make_naive(value, timezone.utc)
value = value.date()
elif isinstance(value, datetime.date):
# Nothing to do, as dates don't have tz information
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
offset = datetime.timedelta(days=1)
lower = (now - offset).date()
upper = (now + offset).date()
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(DateField, self).deconstruct()
if self.auto_now:
kwargs['auto_now'] = True
if self.auto_now_add:
kwargs['auto_now_add'] = True
if self.auto_now or self.auto_now_add:
del kwargs['editable']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super(DateField, self).pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super(DateField, self).contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(cls, 'get_next_by_%s' % self.name,
curry(cls._get_next_or_previous_by_FIELD, field=self,
is_next=True))
setattr(cls, 'get_previous_by_%s' % self.name,
curry(cls._get_next_or_previous_by_FIELD, field=self,
is_next=False))
def get_prep_value(self, value):
value = super(DateField, self).get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateField}
defaults.update(kwargs)
return super(DateField, self).formfield(**defaults)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."),
'invalid_date': _("'%(value)s' value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."),
'invalid_datetime': _("'%(value)s' value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Adds a warning to the checks framework stating, that using an actual
date or datetime value is probably wrong; it's only being evaluated on
server start-up.
For details see ticket #21905
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
elif isinstance(value, datetime.date):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
lower = datetime.datetime(lower.year, lower.month, lower.day)
upper = now + second_offset
upper = datetime.datetime(upper.year, upper.month, upper.day)
value = datetime.datetime(value.year, value.month, value.day)
else:
# No explicit date / datetime value -- no checks necessary
return []
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn("DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active." %
(self.model.__name__, self.name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_datetime'],
code='invalid_datetime',
params={'value': value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super(DateTimeField, self).pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
# get_prep_lookup is inherited from DateField
def get_prep_value(self, value):
value = super(DateTimeField, self).get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = '%s.%s' % (self.model.__name__, self.name)
except AttributeError:
name = '(unbound)'
warnings.warn("DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." %
(name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateTimeField}
defaults.update(kwargs)
return super(DateTimeField, self).formfield(**defaults)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be a decimal number."),
}
description = _("Decimal number")
def __init__(self, verbose_name=None, name=None, max_digits=None,
decimal_places=None, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
super(DecimalField, self).__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super(DecimalField, self).check(**kwargs)
digits_errors = self._check_decimal_places()
digits_errors.extend(self._check_max_digits())
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id='fields.E130',
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id='fields.E131',
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id='fields.E132',
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id='fields.E133',
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id='fields.E134',
)
]
return []
@cached_property
def validators(self):
return super(DecimalField, self).validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
def deconstruct(self):
name, path, args, kwargs = super(DecimalField, self).deconstruct()
if self.max_digits is not None:
kwargs['max_digits'] = self.max_digits
if self.decimal_places is not None:
kwargs['decimal_places'] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
try:
return decimal.Decimal(value)
except decimal.InvalidOperation:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def _format(self, value):
if isinstance(value, six.string_types):
return value
else:
return self.format_number(value)
def format_number(self, value):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
# Method moved to django.db.backends.utils.
#
# It is preserved because it is used by the oracle backend
# (django.db.backends.oracle.query), and also for
# backwards-compatibility with any external code which may have used
# this method.
from django.db.backends import utils
return utils.format_number(value, self.max_digits, self.decimal_places)
def get_db_prep_save(self, value, connection):
return connection.ops.adapt_decimalfield_value(self.to_python(value),
self.max_digits, self.decimal_places)
def get_prep_value(self, value):
value = super(DecimalField, self).get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {
'max_digits': self.max_digits,
'decimal_places': self.decimal_places,
'form_class': forms.DecimalField,
}
defaults.update(kwargs)
return super(DecimalField, self).formfield(**defaults)
class DurationField(Field):
"""Stores timedelta objects.
Uses interval on postgres, INVERAL DAY TO SECOND on Oracle, and bigint of
microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"[DD] [HH:[MM:]]ss[.uuuuuu] format.")
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return value.total_seconds() * 1000000
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super(DurationField, self).get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else duration_string(val)
def formfield(self, **kwargs):
defaults = {
'form_class': forms.DurationField,
}
defaults.update(kwargs)
return super(DurationField, self).formfield(**defaults)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs['max_length'] = kwargs.get('max_length', 254)
super(EmailField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(EmailField, self).deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
defaults = {
'form_class': forms.EmailField,
}
defaults.update(kwargs)
return super(EmailField, self).formfield(**defaults)
class FilePathField(Field):
description = _("File path")
def __init__(self, verbose_name=None, name=None, path='', match=None,
recursive=False, allow_files=True, allow_folders=False, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs['max_length'] = kwargs.get('max_length', 100)
super(FilePathField, self).__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super(FilePathField, self).check(**kwargs)
errors.extend(self._check_allowing_files_or_folders(**kwargs))
return errors
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
obj=self,
id='fields.E140',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(FilePathField, self).deconstruct()
if self.path != '':
kwargs['path'] = self.path
if self.match is not None:
kwargs['match'] = self.match
if self.recursive is not False:
kwargs['recursive'] = self.recursive
if self.allow_files is not True:
kwargs['allow_files'] = self.allow_files
if self.allow_folders is not False:
kwargs['allow_folders'] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super(FilePathField, self).get_prep_value(value)
if value is None:
return None
return six.text_type(value)
def formfield(self, **kwargs):
defaults = {
'path': self.path,
'match': self.match,
'recursive': self.recursive,
'form_class': forms.FilePathField,
'allow_files': self.allow_files,
'allow_folders': self.allow_folders,
}
defaults.update(kwargs)
return super(FilePathField, self).formfield(**defaults)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super(FloatField, self).get_prep_value(value)
if value is None:
return None
return float(value)
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
defaults = {'form_class': forms.FloatField}
defaults.update(kwargs)
return super(FloatField, self).formfield(**defaults)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
errors = super(IntegerField, self).check(**kwargs)
errors.extend(self._check_max_length_warning())
return errors
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with IntegerField",
hint="Remove 'max_length' from field",
obj=self,
id='fields.W122',
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
range_validators = []
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None:
range_validators.append(validators.MinValueValidator(min_value))
if max_value is not None:
range_validators.append(validators.MaxValueValidator(max_value))
return super(IntegerField, self).validators + range_validators
def get_prep_value(self, value):
value = super(IntegerField, self).get_prep_value(value)
if value is None:
return None
return int(value)
def get_prep_lookup(self, lookup_type, value):
if ((lookup_type == 'gte' or lookup_type == 'lt')
and isinstance(value, float)):
value = math.ceil(value)
return super(IntegerField, self).get_prep_lookup(lookup_type, value)
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
defaults = {'form_class': forms.IntegerField}
defaults.update(kwargs)
return super(IntegerField, self).formfield(**defaults)
class BigIntegerField(IntegerField):
empty_strings_allowed = False
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1,
'max_value': BigIntegerField.MAX_BIGINT}
defaults.update(kwargs)
return super(BigIntegerField, self).formfield(**defaults)
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
'msg': (
'IPAddressField has been removed except for support in '
'historical migrations.'
),
'hint': 'Use GenericIPAddressField instead.',
'id': 'fields.E900',
}
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 15
super(IPAddressField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(IPAddressField, self).deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_prep_value(self, value):
value = super(IPAddressField, self).get_prep_value(value)
if value is None:
return None
return six.text_type(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(self, verbose_name=None, name=None, protocol='both',
unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
self.default_validators, invalid_error_message = \
validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages['invalid'] = invalid_error_message
kwargs['max_length'] = 39
super(GenericIPAddressField, self).__init__(verbose_name, name, *args,
**kwargs)
def check(self, **kwargs):
errors = super(GenericIPAddressField, self).check(**kwargs)
errors.extend(self._check_blank_and_null_values(**kwargs))
return errors
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, 'null', False) and getattr(self, 'blank', False):
return [
checks.Error(
'GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.',
obj=self,
id='fields.E150',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(GenericIPAddressField, self).deconstruct()
if self.unpack_ipv4 is not False:
kwargs['unpack_ipv4'] = self.unpack_ipv4
if self.protocol != "both":
kwargs['protocol'] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, six.string_types):
value = force_text(value)
value = value.strip()
if ':' in value:
return clean_ipv6_address(value,
self.unpack_ipv4, self.error_messages['invalid'])
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super(GenericIPAddressField, self).get_prep_value(value)
if value is None:
return None
if value and ':' in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return six.text_type(value)
def formfield(self, **kwargs):
defaults = {
'protocol': self.protocol,
'form_class': forms.GenericIPAddressField,
}
defaults.update(kwargs)
return super(GenericIPAddressField, self).formfield(**defaults)
class NullBooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
def __init__(self, *args, **kwargs):
kwargs['null'] = True
kwargs['blank'] = True
super(NullBooleanField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(NullBooleanField, self).deconstruct()
del kwargs['null']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "NullBooleanField"
def to_python(self, value):
if value is None:
return None
if value in (True, False):
return bool(value)
if value in ('None',):
return None
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_prep_lookup(self, lookup_type, value):
# Special-case handling for filters coming from a Web request (e.g. the
# admin interface). Only works for scalar values (not lists). If you're
# passing in a list, you might as well make things the right type when
# constructing the list.
if value in ('1', '0'):
value = bool(int(value))
return super(NullBooleanField, self).get_prep_lookup(lookup_type,
value)
def get_prep_value(self, value):
value = super(NullBooleanField, self).get_prep_value(value)
if value is None:
return None
return bool(value)
def formfield(self, **kwargs):
defaults = {
'form_class': forms.NullBooleanField,
'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
defaults.update(kwargs)
return super(NullBooleanField, self).formfield(**defaults)
class PositiveIntegerRelDbTypeMixin(object):
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return IntegerField().db_type(connection=connection)
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super(PositiveIntegerField, self).formfield(**defaults)
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super(PositiveSmallIntegerField, self).formfield(**defaults)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 50)
# Set db_index=True unless it's been set manually.
if 'db_index' not in kwargs:
kwargs['db_index'] = True
self.allow_unicode = kwargs.pop('allow_unicode', False)
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super(SlugField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SlugField, self).deconstruct()
if kwargs.get("max_length") == 50:
del kwargs['max_length']
if self.db_index is False:
kwargs['db_index'] = False
else:
del kwargs['db_index']
if self.allow_unicode is not False:
kwargs['allow_unicode'] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode}
defaults.update(kwargs)
return super(SlugField, self).formfield(**defaults)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class TextField(Field):
description = _("Text")
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, six.string_types) or value is None:
return value
return smart_text(value)
def get_prep_value(self, value):
value = super(TextField, self).get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length, 'widget': forms.Textarea}
defaults.update(kwargs)
return super(TextField, self).formfield(**defaults)
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."),
'invalid_time': _("'%(value)s' value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."),
}
description = _("Time")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super(TimeField, self).__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Adds a warning to the checks framework stating, that using an actual
time or datetime value is probably wrong; it's only being evaluated on
server start-up.
For details see ticket #21905
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
elif isinstance(value, datetime.time):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
value = datetime.datetime.combine(now.date(), value)
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc).time()
else:
# No explicit time / datetime value -- no checks necessary
return []
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(TimeField, self).deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs['blank']
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_time'],
code='invalid_time',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super(TimeField, self).pre_save(model_instance, add)
def get_prep_value(self, value):
value = super(TimeField, self).get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.TimeField}
defaults.update(kwargs)
return super(TimeField, self).formfield(**defaults)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 200)
super(URLField, self).__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(URLField, self).deconstruct()
if kwargs.get("max_length") == 200:
del kwargs['max_length']
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
defaults = {
'form_class': forms.URLField,
}
defaults.update(kwargs)
return super(URLField, self).formfield(**defaults)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b'']
def __init__(self, *args, **kwargs):
kwargs['editable'] = False
super(BinaryField, self).__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def deconstruct(self):
name, path, args, kwargs = super(BinaryField, self).deconstruct()
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super(BinaryField, self).get_default()
if default == '':
return b''
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super(BinaryField, self).get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(force_bytes(self.value_from_object(obj))).decode('ascii')
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, six.text_type):
return six.memoryview(b64decode(force_bytes(value)))
return value
class UUIDField(Field):
default_error_messages = {
'invalid': _("'%(value)s' is not a valid UUID."),
}
description = 'Universally unique identifier'
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs['max_length'] = 32
super(UUIDField, self).__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(UUIDField, self).deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
try:
value = uuid.UUID(value)
except AttributeError:
raise TypeError(self.error_messages['invalid'] % {'value': value})
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value and not isinstance(value, uuid.UUID):
try:
return uuid.UUID(value)
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
return value
def formfield(self, **kwargs):
defaults = {
'form_class': forms.UUIDField,
}
defaults.update(kwargs)
return super(UUIDField, self).formfield(**defaults)
| {
"content_hash": "6ca908ec7691ef9a7a6b9e486cf99bf3",
"timestamp": "",
"source": "github",
"line_count": 2446,
"max_line_length": 101,
"avg_line_length": 36.782910874897794,
"alnum_prop": 0.5733402985406408,
"repo_name": "dgladkov/django",
"id": "6f8ca8980f91bb5f8464ae2d376235ac4371b949",
"size": "89995",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "django/db/models/fields/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52372"
},
{
"name": "HTML",
"bytes": "170531"
},
{
"name": "JavaScript",
"bytes": "256023"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11485293"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
"""Support for mounting images with the loop device"""
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.disk.mount import api
LOG = logging.getLogger(__name__)
class LoopMount(api.Mount):
"""loop back support for raw images."""
mode = 'loop'
def _inner_get_dev(self):
out, err = utils.trycmd('losetup', '--find', '--show', self.image,
run_as_root=True)
if err:
self.error = _('Could not attach image to loopback: %s') % err
LOG.info(_('Loop mount error: %s'), self.error)
self.linked = False
self.device = None
return False
self.device = out.strip()
LOG.debug(_("Got loop device %s"), self.device)
self.linked = True
return True
def get_dev(self):
# NOTE(mikal): the retry is required here in case we are low on loop
# devices. Note however that modern kernels will use more loop devices
# if they exist. If you're seeing lots of retries, consider adding
# more devices.
return self._get_dev_retry_helper()
def unget_dev(self):
if not self.linked:
return
# NOTE(mikal): On some kernels, losetup -d will intermittently fail,
# thus leaking a loop device unless the losetup --detach is retried:
# https://lkml.org/lkml/2012/9/28/62
LOG.debug(_("Release loop device %s"), self.device)
utils.execute('losetup', '--detach', self.device, run_as_root=True,
attempts=3)
self.linked = False
self.device = None
| {
"content_hash": "d8bf133eb6f2aeb39ce5bf3826051e0c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 78,
"avg_line_length": 35.02127659574468,
"alnum_prop": 0.5911300121506683,
"repo_name": "fajoy/nova",
"id": "667ecee1432750a2790db82bfef7a7886a6a7dda",
"size": "2264",
"binary": false,
"copies": "2",
"ref": "refs/heads/grizzly-2",
"path": "nova/virt/disk/mount/loop.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "7567423"
},
{
"name": "Shell",
"bytes": "15428"
}
],
"symlink_target": ""
} |
import numpy as np
from scipy import linalg
from numpy.testing import assert_almost_equal, assert_array_almost_equal
from nose.tools import assert_true
from mne.time_frequency.stft import stft, istft, stftfreq, stft_norm2
def test_stft():
"Test stft and istft tight frame property"
sfreq = 1000. # Hz
f = 7. # Hz
for T in [253, 256]: # try with even and odd numbers
t = np.arange(T).astype(np.float)
x = np.sin(2 * np.pi * f * t / sfreq)
x = np.array([x, x + 1.])
wsize = 128
tstep = 4
X = stft(x, wsize, tstep)
xp = istft(X, tstep, Tx=T)
freqs = stftfreq(wsize, sfreq=1000)
max_freq = freqs[np.argmax(np.sum(np.abs(X[0]) ** 2, axis=1))]
assert_true(X.shape[1] == len(freqs))
assert_true(np.all(freqs >= 0.))
assert_true(np.abs(max_freq - f) < 1.)
assert_array_almost_equal(x, xp, decimal=6)
# norm conservation thanks to tight frame property
assert_almost_equal(np.sqrt(stft_norm2(X)),
[linalg.norm(xx) for xx in x], decimal=2)
# Try with empty array
x = np.zeros((0, T))
X = stft(x, wsize, tstep)
xp = istft(X, tstep, T)
assert_true(xp.shape == x.shape)
| {
"content_hash": "d32f8674c48fcf13495d718ae278fcf4",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 72,
"avg_line_length": 31.8,
"alnum_prop": 0.5715408805031447,
"repo_name": "jaeilepp/eggie",
"id": "e7eca49980df06453573ba15266ec29ed42a8713",
"size": "1272",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mne/time_frequency/tests/test_stft.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "3357472"
}
],
"symlink_target": ""
} |
Subsets and Splits