Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
1,400 | def get_window(window, Nx, fftbins=True):
"""
Return a window.
Parameters
----------
window : string, float, or tuple
The type of window to create. See below for more details.
Nx : int
The number of samples in the window.
fftbins : bool, optional
If True (default), create a "periodic" window, ready to use with
`ifftshift` and be multiplied by the result of an FFT (see also
`fftpack.fftfreq`).
If False, create a "symmetric" window, for use in filter design.
Returns
-------
get_window : ndarray
Returns a window of length `Nx` and type `window`
Notes
-----
Window types:
`boxcar`, `triang`, `blackman`, `hamming`, `hann`, `bartlett`,
`flattop`, `parzen`, `bohman`, `blackmanharris`, `nuttall`,
`barthann`, `kaiser` (needs beta), `gaussian` (needs standard
deviation), `general_gaussian` (needs power, width), `slepian`
(needs width), `chebwin` (needs attenuation), `exponential`
(needs decay scale), `tukey` (needs taper fraction)
If the window requires no parameters, then `window` can be a string.
If the window requires parameters, then `window` must be a tuple
with the first argument the string name of the window, and the next
arguments the needed parameters.
If `window` is a floating point number, it is interpreted as the beta
parameter of the `kaiser` window.
Each of the window types listed above is also the name of
a function that can be called directly to create a window of
that type.
Examples
--------
>>> from scipy import signal
>>> signal.get_window('triang', 7)
array([ 0.25, 0.5 , 0.75, 1. , 0.75, 0.5 , 0.25])
>>> signal.get_window(('kaiser', 4.0), 9)
array([ 0.08848053, 0.32578323, 0.63343178, 0.89640418, 1. ,
0.89640418, 0.63343178, 0.32578323, 0.08848053])
>>> signal.get_window(4.0, 9)
array([ 0.08848053, 0.32578323, 0.63343178, 0.89640418, 1. ,
0.89640418, 0.63343178, 0.32578323, 0.08848053])
"""
sym = not fftbins
try:
beta = float(window)
except (TypeError, ValueError):
args = ()
if isinstance(window, tuple):
winstr = window[0]
if len(window) > 1:
args = window[1:]
elif isinstance(window, string_types):
if window in _needs_param:
raise ValueError("The '" + window + "' window needs one or "
"more parameters -- pass a tuple.")
else:
winstr = window
else:
raise ValueError("%s as window type is not supported." %
str(type(window)))
try:
winfunc = _win_equiv[winstr]
except __HOLE__:
raise ValueError("Unknown window type.")
params = (Nx,) + args + (sym,)
else:
winfunc = kaiser
params = (Nx, beta, sym)
return winfunc(*params) | KeyError | dataset/ETHPy150Open scipy/scipy/scipy/signal/windows.py/get_window |
1,401 | def setRGB(self, r,g,b):
try:
self.bus.write_byte_data(DISPLAY_RGB_ADDR,0,0)
self.bus.write_byte_data(DISPLAY_RGB_ADDR,1,0)
self.bus.write_byte_data(DISPLAY_RGB_ADDR,0x08,0xaa)
self.bus.write_byte_data(DISPLAY_RGB_ADDR,4,r)
self.bus.write_byte_data(DISPLAY_RGB_ADDR,3,g)
self.bus.write_byte_data(DISPLAY_RGB_ADDR,2,b)
self.errorCount = 0
except __HOLE__:
Tools.log("Error writing to RGB Screen",1)
self.errorCount += 1
if (self.errorCount<5):
self.clear()
self.setRGB(r,g,b)
# send command to display (no need for external use) | IOError | dataset/ETHPy150Open awslabs/simplebeerservice/sbsunit/devices/grovergb.py/GroveRGB.setRGB |
1,402 | def _check_duration(option, opt, value):
try:
if value[-1].isdigit():
return int(value)
else:
return int(value[:-1]) * _time_units[value[-1]]
except (__HOLE__, IndexError):
raise OptionValueError(
'option %s: invalid duration: %r' % (opt, value)) | ValueError | dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_optparse.py/_check_duration |
1,403 | def testIter(self):
t = (0,1,2,3)
i = 0
it = t.__iter__()
while True:
try:
item = it.next()
except __HOLE__:
break
self.assertEqual(item, t[i])
i += 1
# XXX: This does not even compile
#def test_tuple_unpacking(self):
# self.fail('Bug #527 Tuple unpacking not supported for more than one level')
# (a, b), c, (d, e) = x
# self.assertEqual((a, b, c, d, e), (1, 2, 3, 4, 5))
# XXX: This does not even compile
#def test_tuple_unpacking_in_loop(self):
# self.fail('Tuple unpacking in for-loops not supported for more than one level')
# x = ((1, 2), 3, (4, 5))
# for (a, b), c, (d, e) in [x, x, x]:
# self.assertEqual((a, b, c, d, e), (1, 2, 3, 4, 5)) | StopIteration | dataset/ETHPy150Open anandology/pyjamas/examples/libtest/TupleTest.py/TupleTest.testIter |
1,404 | @staticmethod
def append(category, key, obj, timeout=None):
'''Add a new object to the cache.
:Parameters:
`category` : str
Identifier of the category.
`key` : str
Unique identifier of the object to store.
`obj` : object
Object to store in cache.
`timeout` : double (optional)
Time after which to delete the object if it has not been used.
If None, no timeout is applied.
'''
#check whether obj should not be cached first
if getattr(obj, '_no_cache', False):
return
try:
cat = Cache._categories[category]
except __HOLE__:
Logger.warning('Cache: category <%s> not exist' % category)
return
timeout = timeout or cat['timeout']
# FIXME: activate purge when limit is hit
#limit = cat['limit']
#if limit is not None and len(Cache._objects[category]) >= limit:
# Cache._purge_oldest(category)
Cache._objects[category][key] = {
'object': obj,
'timeout': timeout,
'lastaccess': Clock.get_time(),
'timestamp': Clock.get_time()} | KeyError | dataset/ETHPy150Open kivy/kivy/kivy/cache.py/Cache.append |
1,405 | def izip_longest(*args, **kwds):
# izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D-
fillvalue = kwds.get('fillvalue')
def sentinel(counter = ([fillvalue]*(len(args)-1)).pop):
yield counter() # yields the fillvalue, or raises IndexError
fillers = itertools.repeat(fillvalue)
iters = [itertools.chain(it, sentinel(), fillers) for it in args]
try:
for tup in itertools.izip(*iters):
yield tup
except __HOLE__:
pass | IndexError | dataset/ETHPy150Open ardekantur/pyglet/pyglet/compat.py/izip_longest |
1,406 | def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
if lookup_type not in ['exact', 'in', 'isnull']:
raise TypeError('Lookup type %s is not supported.' % lookup_type)
# The Field model already calls get_db_prep_value before doing the
# actual lookup, so all we need to do is limit the lookup types.
try:
return super(PickledObjectField, self).get_db_prep_lookup(
lookup_type, value, connection=connection, prepared=prepared)
except __HOLE__:
# Try not to break on older versions of Django, where the
# `connection` and `prepared` parameters are not available.
return super(PickledObjectField, self).get_db_prep_lookup(
lookup_type, value)
# South support; see http://south.aeracode.org/docs/tutorial/part4.html#simple-inheritance | TypeError | dataset/ETHPy150Open callowayproject/django-supertagging/supertagging/fields.py/PickledObjectField.get_db_prep_lookup |
1,407 | def get(self, key, default):
try:
value = self.cache.pop(key)
self.cache[key] = value
return value
except __HOLE__:
return default | KeyError | dataset/ETHPy150Open mesosphere/marathon-lb/lrucache.py/LRUCache.get |
1,408 | def set(self, key, value):
try:
self.cache.pop(key)
except __HOLE__:
if len(self.cache) >= self.capacity:
self.cache.popitem(last=False)
self.cache[key] = value | KeyError | dataset/ETHPy150Open mesosphere/marathon-lb/lrucache.py/LRUCache.set |
1,409 | def scrapeshell():
# clear argv for IPython
import sys
orig_argv = sys.argv[1:]
sys.argv = sys.argv[:1]
try:
from IPython import embed
except ImportError:
print('scrapeshell requires ipython >= 0.11')
return
try:
import lxml.html
USE_LXML = True
except __HOLE__:
USE_LXML = False
parser = argparse.ArgumentParser(prog='scrapeshell',
description='interactive python shell for'
' scraping')
parser.add_argument('url', help="url to scrape")
parser.add_argument('--ua', dest='user_agent', default=_user_agent,
help='user agent to make requests with')
parser.add_argument('-p', '--postdata', dest='postdata',
default=None,
help="POST data (will make a POST instead of GET)")
args = parser.parse_args(orig_argv)
scraper = Scraper()
scraper.user_agent = args.user_agent
url = args.url
if args.postdata:
html = scraper.post(args.url, args.postdata)
else:
html = scraper.get(args.url)
if USE_LXML:
doc = lxml.html.fromstring(html.content) # noqa
print('local variables')
print('---------------')
print('url: %s' % url)
print('html: requests Response instance')
if USE_LXML:
print('doc: `lxml HTML element`')
else:
print('doc not available: lxml not installed')
embed() | ImportError | dataset/ETHPy150Open jamesturk/scrapelib/scrapelib/__main__.py/scrapeshell |
1,410 | @staticmethod
def log_http_response(resp):
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
dump = ['\nHTTP/%.1f %s %s' % status]
dump.extend(['%s: %s' % (k, v) for k, v in resp.headers.items()])
dump.append('')
if resp.content:
content = resp.content
if isinstance(content, six.binary_type):
try:
content = encodeutils.safe_decode(resp.content)
except __HOLE__:
pass
else:
dump.extend([content, ''])
LOG.debug('\n'.join(dump)) | UnicodeDecodeError | dataset/ETHPy150Open openstack/python-sticksclient/sticksclient/common/http.py/HTTPClient.log_http_response |
1,411 | def json_request(self, method, url, content_type='application/json',
**kwargs):
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('Content-Type', content_type)
# Don't set Accept because we aren't always dealing in JSON
if 'body' in kwargs:
if 'data' in kwargs:
raise ValueError("Can't provide both 'data' and "
"'body' to a request")
LOG.warning("Use of 'body' is deprecated; use 'data' instead")
kwargs['data'] = kwargs.pop('body')
if 'data' in kwargs:
kwargs['data'] = jsonutils.dumps(kwargs['data'])
resp = self._http_request(url, method, **kwargs)
body = resp.content
if 'application/json' in resp.headers.get('content-type', []):
try:
body = resp.json()
except __HOLE__:
LOG.error('Could not decode response body as JSON')
else:
body = None
return resp, body | ValueError | dataset/ETHPy150Open openstack/python-sticksclient/sticksclient/common/http.py/HTTPClient.json_request |
1,412 | def handle(self, *args, **options):
# --date-from YY-MM-DD
# specify a date filter start
# default to first date in file
# --date-to YY-MM-DD
# specify a date filter end
# default to now
LOGFILE = options.get('file')
try:
data = analyze_log_file(LOGFILE, PATTERN, reverse_paths=options.get('reverse'))
except __HOLE__:
print "File not found"
exit(2)
print generate_table_from(data) | IOError | dataset/ETHPy150Open garethr/django-timelog/src/timelog/management/commands/analyze_timelog.py/Command.handle |
1,413 | def createSomeComments(self):
# Tests for the moderation signals must actually post data
# through the comment views, because only the comment views
# emit the custom signals moderation listens for.
e = Entry.objects.get(pk=1)
data = self.getValidData(e)
self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
# We explicitly do a try/except to get the comment we've just
# posted because moderation may have disallowed it, in which
# case we can just return it as None.
try:
c1 = Comment.objects.all()[0]
except IndexError:
c1 = None
self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
try:
c2 = Comment.objects.all()[0]
except __HOLE__:
c2 = None
return c1, c2 | IndexError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/regressiontests/comment_tests/tests/comment_utils_moderators_tests.py/CommentUtilsModeratorTests.createSomeComments |
1,414 | def upload_fixture(fileobj, loader=None):
fixture = load_fixture(fileobj, loader)
queue = Queue.Queue()
keys = {}
for obj in fixture:
pk = obj['pk']
model_name = obj["model"].split(".")[1]
try:
itertools.dropwhile(
lambda m: not hasattr(models, m),
[model_name.capitalize(),
"".join(map(lambda n: n.capitalize(), model_name.split("_")))]
).next()
except __HOLE__:
raise Exception("Couldn't find model {0}".format(model_name))
obj['model'] = getattr(models, capitalize_model_name(model_name))
keys[obj['model'].__tablename__] = {}
# Check if it's already uploaded
obj_from_db = db().query(obj['model']).get(pk)
if obj_from_db:
logger.info("Fixture model '%s' with pk='%s' already"
" uploaded. Skipping", model_name, pk)
continue
queue.put(obj)
pending_objects = []
while True:
try:
obj = queue.get_nowait()
except Exception:
break
new_obj = obj['model']()
fk_fields = {}
for field, value in six.iteritems(obj["fields"]):
f = getattr(obj['model'], field)
impl = getattr(f, 'impl', None)
fk_model = None
try:
if hasattr(f.comparator.prop, "argument"):
if hasattr(f.comparator.prop.argument, "__call__"):
fk_model = f.comparator.prop.argument()
else:
fk_model = f.comparator.prop.argument.class_
except AttributeError:
pass
if fk_model:
if value not in keys[fk_model.__tablename__]:
if obj not in pending_objects:
queue.put(obj)
pending_objects.append(obj)
continue
else:
logger.error(
u"Can't resolve foreign key "
"'{0}' for object '{1}'".format(
field,
obj["model"]
)
)
break
else:
value = keys[fk_model.__tablename__][value].id
if isinstance(impl, orm.attributes.ScalarObjectAttributeImpl):
if value:
fk_fields[field] = (value, fk_model)
elif isinstance(impl, orm.attributes.CollectionAttributeImpl):
if value:
fk_fields[field] = (value, fk_model)
elif hasattr(f, 'property') and isinstance(
f.property.columns[0].type, sqlalchemy.types.DateTime
):
if value:
setattr(
new_obj,
field,
datetime.strptime(value, "%d-%m-%Y %H:%M:%S")
)
else:
setattr(
new_obj,
field,
datetime.now()
)
else:
setattr(new_obj, field, value)
for field, data in six.iteritems(fk_fields):
if isinstance(data[0], int):
setattr(new_obj, field, db().query(data[1]).get(data[0]))
elif isinstance(data[0], list):
for v in data[0]:
getattr(new_obj, field).append(
db().query(data[1]).get(v)
)
db().add(new_obj)
db().commit()
keys[obj['model'].__tablename__][obj["pk"]] = new_obj
# UGLY HACK for testing
if new_obj.__class__.__name__ == 'Node':
objects.Node.update_interfaces(new_obj)
fire_callback_on_node_create(new_obj)
db().commit() | StopIteration | dataset/ETHPy150Open openstack/fuel-web/nailgun/nailgun/db/sqlalchemy/fixman.py/upload_fixture |
1,415 | def _get_stacktrace():
if WANT_STACK_TRACE:
try:
stack = inspect.stack()
except __HOLE__:
# this is a work around because python's inspect.stack() sometimes fail
# when jinja templates are on the stack
return [(
"",
0,
"Error retrieving stack",
"Could not retrieve stack. IndexError exception occured in inspect.stack(). "
"This error might occur when jinja2 templates is on the stack.",
)]
return _tidy_stacktrace(reversed(stack))
else:
return []
# Wrap Cursor._refresh for getting queries | IndexError | dataset/ETHPy150Open hmarr/django-debug-toolbar-mongo/debug_toolbar_mongo/operation_tracker.py/_get_stacktrace |
1,416 | def _resolve_name(name, package, level):
"""Returns the absolute name of the module to be imported. """
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in xrange(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except __HOLE__:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name) | ValueError | dataset/ETHPy150Open tarekziade/boom/boom/util.py/_resolve_name |
1,417 | def resolve_name(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True the return value will be `None` if the import fails.
For better debugging we recommend the new :func:`import_module`
function to be used instead.
:param import_name: the dotted name for the object to import.
:param silent: if set to `True` import errors are ignored and
`None` is returned instead.
:return: imported object
"""
# force the import name to automatically convert to strings
if PY3:
unicode = str
else:
global unicode
if isinstance(import_name, unicode):
import_name = str(import_name)
try:
if ':' in import_name:
module, obj = import_name.split(':', 1)
elif '.' in import_name:
module, obj = import_name.rsplit('.', 1)
else:
return __import__(import_name)
# __import__ is not able to handle unicode strings in the fromlist
# if the module is a package
if isinstance(obj, unicode):
obj = obj.encode('utf-8')
if isinstance(obj, bytes):
obj = obj.decode('utf-8')
try:
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, __HOLE__):
# support importing modules not yet set up by the parent module
# (or package for that matter)
modname = module + '.' + obj
__import__(modname)
return sys.modules[modname]
except ImportError as e:
if not silent:
raise ImportStringError(import_name, e) | AttributeError | dataset/ETHPy150Open tarekziade/boom/boom/util.py/resolve_name |
1,418 | def split_line(line, point):
lexer = shlex.shlex(line, posix=True, punctuation_chars=True)
words = []
def split_word(word):
# TODO: make this less ugly
point_in_word = len(word) + point - lexer.instream.tell()
if isinstance(lexer.state, basestring) and lexer.state in lexer.whitespace:
point_in_word += 1
if point_in_word > len(word):
debug("In trailing whitespace")
words.append(word)
word = ''
prefix, suffix = word[:point_in_word], word[point_in_word:]
prequote = ''
# posix
if lexer.state is not None and lexer.state in lexer.quotes:
prequote = lexer.state
# non-posix
#if len(prefix) > 0 and prefix[0] in lexer.quotes:
# prequote, prefix = prefix[0], prefix[1:]
first_colon_pos = lexer.first_colon_pos if ':' in word else None
return prequote, prefix, suffix, words, first_colon_pos
while True:
try:
word = lexer.get_token()
if word == lexer.eof:
# TODO: check if this is ever unsafe
# raise ArgcompleteException("Unexpected end of input")
return "", "", "", words, None
if lexer.instream.tell() >= point:
debug("word", word, "split, lexer state: '{s}'".format(s=lexer.state))
return split_word(word)
words.append(word)
except __HOLE__:
debug("word", lexer.token, "split (lexer stopped, state: '{s}')".format(s=lexer.state))
if lexer.instream.tell() >= point:
return split_word(lexer.token)
else:
raise ArgcompleteException("Unexpected internal state. Please report this bug at https://github.com/kislyuk/argcomplete/issues.") | ValueError | dataset/ETHPy150Open dnanexus/dx-toolkit/src/python/dxpy/packages/argcomplete/__init__.py/split_line |
1,419 | def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except __HOLE__:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True | AttributeError | dataset/ETHPy150Open ionelmc/django-prefetch/tests/test_app/tests.py/_AssertRaisesContext.__exit__ |
1,420 | def init():
"""
Initialize the Git repository.
"""
dirname = repo()
try:
os.makedirs(dirname)
if util.via_sudo():
uid = int(os.environ['SUDO_UID'])
gid = int(os.environ['SUDO_GID'])
os.chown(dirname, uid, gid)
except OSError:
pass
try:
p = subprocess.Popen(['git',
'--git-dir', dirname,
'init',
'--bare',
'-q'],
close_fds=True,
preexec_fn=unroot,
stdout=sys.stderr,
stderr=sys.stderr)
except __HOLE__:
logging.error('git not found on PATH - exiting')
sys.exit(1)
p.communicate()
if 0 != p.returncode:
#sys.exit(p.returncode)
raise GitError(p.returncode) | OSError | dataset/ETHPy150Open devstructure/blueprint/blueprint/git.py/init |
1,421 | def git(*args, **kwargs):
"""
Execute a Git command. Raises GitError on non-zero exits unless the
raise_exc keyword argument is falsey.
"""
try:
p = subprocess.Popen(git_args() + list(args),
close_fds=True,
preexec_fn=unroot,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
except __HOLE__:
logging.error('git not found on PATH - exiting')
sys.exit(1)
stdout, stderr = p.communicate(kwargs.get('stdin'))
if 0 != p.returncode and kwargs.get('raise_exc', True):
raise GitError(p.returncode)
return p.returncode, stdout | OSError | dataset/ETHPy150Open devstructure/blueprint/blueprint/git.py/git |
1,422 | @classmethod
def new_from_tuple(cls, tuple_):
try:
queryset, serializer, label = tuple_
except __HOLE__:
queryset, serializer = tuple_
label = None
query = Query(queryset, serializer, label)
return query | ValueError | dataset/ETHPy150Open Axiologue/DjangoRestMultipleModels/drf_multiple_model/mixins.py/Query.new_from_tuple |
1,423 | def destroy_datastore(paths):
"""Destroys the appengine datastore at the specified paths."""
for path in paths.values():
if not path:
continue
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except __HOLE__, error:
if error.errno != 2:
logging.error("Failed to clear datastore: %s" % error) | OSError | dataset/ETHPy150Open adieu/djangoappengine/db/base.py/destroy_datastore |
1,424 | def run_cgi(self):
"""Execute a CGI script."""
path = self.path
dir, rest = self.cgi_info
i = path.find('/', len(dir) + 1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir) + 1)
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = {}
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
os.environ.update(env)
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, os.environ)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
elif self.have_popen2 or self.have_popen3:
# Windows -- use popen2 or popen3 to create a subprocess
import shutil
if self.have_popen3:
popenx = os.popen3
else:
popenx = os.popen2
cmdline = scriptfile
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = "%s -u %s" % (interp, cmdline)
if '=' not in query and '"' not in query:
cmdline = '%s "%s"' % (cmdline, query)
self.log_message("command: %s", cmdline)
try:
nbytes = int(length)
except (__HOLE__, ValueError):
nbytes = 0
files = popenx(cmdline, 'b')
fi = files[0]
fo = files[1]
if self.have_popen3:
fe = files[2]
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
fi.write(data)
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
fi.close()
shutil.copyfileobj(fo, self.wfile)
if self.have_popen3:
errors = fe.read()
fe.close()
if errors:
self.log_error('%s', errors)
sts = fo.close()
if sts:
self.log_error("CGI script exit status %#x", sts)
else:
self.log_message("CGI script exited OK")
else:
# Other O.S. -- execute script in this process
save_argv = sys.argv
save_stdin = sys.stdin
save_stdout = sys.stdout
save_stderr = sys.stderr
try:
save_cwd = os.getcwd()
try:
sys.argv = [scriptfile]
if '=' not in decoded_query:
sys.argv.append(decoded_query)
sys.stdout = self.wfile
sys.stdin = self.rfile
execfile(scriptfile, {"__name__": "__main__"})
finally:
sys.argv = save_argv
sys.stdin = save_stdin
sys.stdout = save_stdout
sys.stderr = save_stderr
os.chdir(save_cwd)
except SystemExit, sts:
self.log_error("CGI script exit status %s", str(sts))
else:
self.log_message("CGI script exited OK") | TypeError | dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/CGIHTTPServer.py/CGIHTTPRequestHandler.run_cgi |
1,425 | def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except __HOLE__:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody | ImportError | dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/CGIHTTPServer.py/nobody_uid |
1,426 | def find_loader(name, path=None):
"""Find the loader for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__loader__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable loader with the
value of 'path' given to the finders. None is returned if no loader could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct loader.
"""
try:
loader = sys.modules[name].__loader__
if loader is None:
raise ValueError('{}.__loader__ is None'.format(name))
else:
return loader
except __HOLE__:
pass
return _bootstrap._find_module(name, path) | KeyError | dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/importlib/__init__.py/find_loader |
1,427 | def read_history_file(self, filename=None):
'''Load a readline history file.'''
if filename is None:
filename=self.history_filename
try:
for line in open(filename, 'r'):
self.add_history(lineobj.ReadLineTextBuffer(ensure_unicode(line.rstrip())))
except __HOLE__:
self.history = []
self.history_cursor = 0 | IOError | dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/pyreadline/lineeditor/history.py/LineHistory.read_history_file |
1,428 | def _search(self, direction, partial):
try:
if (self.lastcommand != self.history_search_forward and
self.lastcommand != self.history_search_backward):
self.query = ''.join(partial[0:partial.point].get_line_text())
hcstart=max(self.history_cursor,0)
hc = self.history_cursor + direction
while (direction < 0 and hc >= 0) or (direction > 0 and hc < len(self.history)):
h = self.history[hc]
if not self.query:
self.history_cursor = hc
result=lineobj.ReadLineTextBuffer(h,point=len(h.get_line_text()))
return result
elif (h.get_line_text().startswith(self.query) and (h != partial.get_line_text())):
self.history_cursor = hc
result=lineobj.ReadLineTextBuffer(h,point=partial.point)
return result
hc += direction
else:
if len(self.history)==0:
pass
elif hc>=len(self.history) and not self.query:
self.history_cursor=len(self.history)
return lineobj.ReadLineTextBuffer("",point=0)
elif self.history[max(min(hcstart,len(self.history)-1),0)].get_line_text().startswith(self.query) and self.query:
return lineobj.ReadLineTextBuffer(self.history[max(min(hcstart,len(self.history)-1),0)],point=partial.point)
else:
return lineobj.ReadLineTextBuffer(partial,point=partial.point)
return lineobj.ReadLineTextBuffer(self.query,point=min(len(self.query),partial.point))
except __HOLE__:
raise | IndexError | dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/pyreadline/lineeditor/history.py/LineHistory._search |
1,429 | @permission_required("core.manage_shop")
def manage_customer_taxes(request):
"""Dispatches to the first customer_tax or to the add customer_tax form.
"""
try:
customer_tax = CustomerTax.objects.all()[0]
url = reverse("lfs_manage_customer_tax", kwargs={"id": customer_tax.id})
except __HOLE__:
url = reverse("lfs_manage_no_customer_taxes")
return HttpResponseRedirect(url) | IndexError | dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/customer_tax/views.py/manage_customer_taxes |
1,430 | def get_entitlement(self, user, virtualorg):
try:
return self._store[user]["entitlement"][virtualorg]
except __HOLE__:
return [] | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/s2repoze/plugins/entitlement.py/EntitlementMetadataProvider.get_entitlement |
1,431 | def add_metadata(self, environ, identity):
#logger = environ.get('repoze.who.logger','')
try:
user = self._store[identity.get('repoze.who.userid')]
except KeyError:
return
try:
vorg = environ["myapp.vo"]
try:
ents = user["entitlement"][vorg]
identity["user"] = {
"entitlement": ["%s:%s" % (vorg,e) for e in ents]}
except KeyError:
pass
except __HOLE__:
res = []
for vorg, ents in user["entitlement"].items():
res.extend(["%s:%s" % (vorg, e) for e in ents])
identity["user"] = res | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/s2repoze/plugins/entitlement.py/EntitlementMetadataProvider.add_metadata |
1,432 | def _runAsUser(self, f, *args, **kw):
euid = os.geteuid()
egid = os.getegid()
groups = os.getgroups()
uid, gid = self.getUserGroupId()
os.setegid(0)
os.seteuid(0)
os.setgroups(self.getOtherGroups())
os.setegid(gid)
os.seteuid(uid)
try:
f = iter(f)
except __HOLE__:
f = [(f, args, kw)]
try:
for i in f:
func = i[0]
args = len(i)>1 and i[1] or ()
kw = len(i)>2 and i[2] or {}
r = func(*args, **kw)
finally:
os.setegid(0)
os.seteuid(0)
os.setgroups(groups)
os.setegid(egid)
os.seteuid(euid)
return r | TypeError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/conch/unix.py/UnixConchUser._runAsUser |
1,433 | def closed(self):
if self.ptyTuple and os.path.exists(self.ptyTuple[2]):
ttyGID = os.stat(self.ptyTuple[2])[5]
os.chown(self.ptyTuple[2], 0, ttyGID)
if self.pty:
try:
self.pty.signalProcess('HUP')
except (__HOLE__,ProcessExitedAlready):
pass
self.pty.loseConnection()
self.addUTMPEntry(0)
log.msg('shell closed') | OSError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/conch/unix.py/SSHSessionForUnixConchUser.closed |
1,434 | def next(self):
try:
f = self.files.pop(0)
except __HOLE__:
raise StopIteration
else:
s = self.server.avatar._runAsUser(os.lstat, os.path.join(self.dir, f))
longname = lsLine(f, s)
attrs = self.server._getAttrs(s)
return (f, longname, attrs) | IndexError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/conch/unix.py/UnixSFTPDirectory.next |
1,435 | def main(name, desc, config={}):
program['name'] = name
program['desc'] = desc
if len(sys.argv) < 2:
command, args = '', []
else:
command, args = sys.argv[1], sys.argv[2:]
options = config
for i, arg in enumerate(args):
if arg.startswith('--'):
opt = arg.lstrip('--')
try:
key, val = opt.split('=')
except __HOLE__:
key = opt.split('=')
val = True
options[key] = val
del args[i]
if command in handlers:
handler = handlers[command]
else:
handler = help
try:
handler(*args, **options)
except Exception as e:
print "error: %s" % str(e) | ValueError | dataset/ETHPy150Open adeel/timed/timed/cmdapp.py/main |
1,436 | def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The Etag function should return a string (or
None if the resource doesn't exist), whilst the last_modified function
should return a datetime object (or None if the resource doesn't exist).
If both parameters are provided, all the preconditions must be met before
the view is processed.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (preconditions
failed), depending upon the request method.
Any behavior marked as "undefined" in the HTTP spec (e.g. If-none-match
plus If-modified-since headers) will result in the view function being
called.
"""
def decorator(func):
def inner(request, *args, **kwargs):
# Get HTTP request headers
if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE")
if_none_match = request.META.get("HTTP_IF_NONE_MATCH")
if_match = request.META.get("HTTP_IF_MATCH")
if if_none_match or if_match:
# There can be more than one ETag in the request, so we
# consider the list of values.
try:
etags = parse_etags(if_none_match or if_match)
except __HOLE__:
# In case of invalid etag ignore all ETag headers.
# Apparently Opera sends invalidly quoted headers at times
# (we should be returning a 400 response, but that's a
# little extreme) -- this is Django bug #10681.
if_none_match = None
if_match = None
# Compute values (if any) for the requested resource.
if etag_func:
res_etag = etag_func(request, *args, **kwargs)
else:
res_etag = None
if last_modified_func:
dt = last_modified_func(request, *args, **kwargs)
if dt:
res_last_modified = formatdate(timegm(dt.utctimetuple()))[:26] + 'GMT'
else:
res_last_modified = None
else:
res_last_modified = None
response = None
if not ((if_match and (if_modified_since or if_none_match)) or
(if_match and if_none_match)):
# We only get here if no undefined combinations of headers are
# specified.
if ((if_none_match and (res_etag in etags or
"*" in etags and res_etag)) and
(not if_modified_since or
res_last_modified == if_modified_since)):
if request.method in ("GET", "HEAD"):
response = HttpResponseNotModified()
else:
response = HttpResponse(status=412)
elif if_match and ((not res_etag and "*" in etags) or
(res_etag and res_etag not in etags)):
response = HttpResponse(status=412)
elif (not if_none_match and if_modified_since and
request.method == "GET" and
res_last_modified == if_modified_since):
response = HttpResponseNotModified()
if response is None:
response = func(request, *args, **kwargs)
# Set relevant headers on the response if they don't already exist.
if res_last_modified and not response.has_header('Last-Modified'):
response['Last-Modified'] = res_last_modified
if res_etag and not response.has_header('ETag'):
response['ETag'] = quote_etag(res_etag)
return response
return inner
return decorator
# Shortcut decorators for common cases based on ETag or Last-Modified only | ValueError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/views/decorators/http.py/condition |
1,437 | def getHTTPNtlmAuthHandler(*args, **kwargs):
try:
from . import HTTPNtlmAuthHandler
return HTTPNtlmAuthHandler
except __HOLE__:
return None | ImportError | dataset/ETHPy150Open Arelle/Arelle/arelle/plugin/internet/proxyNTLM/__init__.py/getHTTPNtlmAuthHandler |
1,438 | def textplot(expr, a, b, W=55, H=18):
"""
Print a crude ASCII art plot of the SymPy expression 'expr' (which
should contain a single symbol, e.g. x or something else) over the
interval [a, b].
Examples
========
textplot(sin(t)*t, 0, 15)
"""
free = expr.free_symbols
if len(free) > 1:
raise ValueError("length can not be greater than 1")
x = free.pop() if free else Dummy()
f = lambdify([x], expr)
a = float(a)
b = float(b)
# Calculate function values
y = [0] * W
for x in range(W):
try:
y[x] = f(a + (b - a)/float(W)*x)
except (__HOLE__, ValueError):
y[x] = 0
# Normalize height to screen space
ma = max(y)
mi = min(y)
if ma == mi:
if ma:
mi, ma = sorted([0, 2*ma])
else:
mi, ma = -1, 1
for x in range(W):
y[x] = int(float(H)*(y[x] - mi)/(ma - mi))
margin = 7
print
for h in range(H - 1, -1, -1):
s = [' '] * W
for x in range(W):
if y[x] == h:
if (x == 0 or y[x - 1] == h - 1) and (x == W - 1 or y[x + 1] == h + 1):
s[x] = '/'
elif (x == 0 or y[x - 1] == h + 1) and (x == W - 1 or y[x + 1] == h - 1):
s[x] = '\\'
else:
s[x] = '.'
# Print y values
if h == H - 1:
prefix = ("%g" % ma).rjust(margin)[:margin]
elif h == H//2:
prefix = ("%g" % ((mi + ma)/2)).rjust(margin)[:margin]
elif h == 0:
prefix = ("%g" % mi).rjust(margin)[:margin]
else:
prefix = " "*margin
s = "".join(s)
if h == H//2:
s = s.replace(" ", "-")
print(prefix + " | " + s)
# Print x values
bottom = " " * (margin + 3)
bottom += ("%g" % a).ljust(W//2 - 4)
bottom += ("%g" % ((a + b)/2)).ljust(W//2)
bottom += "%g" % b
print(bottom) | TypeError | dataset/ETHPy150Open sympy/sympy/sympy/plotting/textplot.py/textplot |
1,439 | def _show_warning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except __HOLE__:
pass # the file (probably stderr) is invalid - this warning gets lost.
# Keep a working version around in case the deprecation of the old API is
# triggered. | IOError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/warnings.py/_show_warning |
1,440 | def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (__HOLE__, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption() | ValueError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/warnings.py/_setoption |
1,441 | def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except __HOLE__:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat | ImportError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/warnings.py/_getcategory |
1,442 | def __getitem__(self, key):
try:
return getattr(sys, key)
except __HOLE__:
raise KeyError(key) | AttributeError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/warnings.py/SysGlobals.__getitem__ |
1,443 | def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = SysGlobals()
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
elif fnl.endswith("$py.class"):
filename = filename[:-9] + '.py'
else:
if module == "__main__":
try:
filename = sys.argv[0]
except (__HOLE__, TypeError):
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals) | AttributeError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/warnings.py/warn |
1,444 | def __test__():
from testing import __ok__, __report__
print 'Testing basic math...'
__ok__(equal(1.0, 1.0), True)
__ok__(equal(1.0, 1.01), False)
__ok__(equal(1.0, 1.0001), False)
__ok__(equal(1.0, 0.9999), False)
__ok__(equal(1.0, 1.0000001), False)
__ok__(equal(1.0, 0.9999999), False)
__ok__(equal(1.0, 1.0000000001), True)
__ok__(equal(1.0, 0.9999999999), True)
__ok__(equal(degrees(0), 0.0))
__ok__(equal(degrees(math.pi/2), 90.0))
__ok__(equal(degrees(math.pi), 180.0))
__ok__(equal(radians(0.0), 0.0))
__ok__(equal(radians(90.0), math.pi/2))
__ok__(equal(radians(180.0), math.pi))
print 'Testing V vector class...'
# structural construction
__ok__(V.O is not None, True)
__ok__(V.O._v is not None, True)
__ok__(V.O._v, (0., 0., 0.)) ; __ok__(V.O._l, 0.)
__ok__(V.X._v, (1., 0., 0.)) ; __ok__(V.X._l, 1.)
__ok__(V.Y._v, (0., 1., 0.)) ; __ok__(V.Y._l, 1.)
__ok__(V.Z._v, (0., 0., 1.)) ; __ok__(V.Z._l, 1.)
a = V(3., 2., 1.) ; __ok__(a._v, [3., 2., 1.])
a = V((1., 2., 3.)) ; __ok__(a._v, [1., 2., 3.])
a = V([1., 1., 1.]) ; __ok__(a._v, [1., 1., 1.])
a = V(0.) ; __ok__(a._v, [0.]) ; __ok__(a._l, 0.)
a = V(3.) ; __ok__(a._v, [3.]) ; __ok__(a._l, 3.)
# constants and direct comparisons
__ok__(V.O, V(0.,0.,0.))
__ok__(V.X, V(1.,0.,0.))
__ok__(V.Y, V(0.,1.,0.))
__ok__(V.Z, V(0.,0.,1.))
# formatting and elements
__ok__(repr(V.X), 'V(1.0, 0.0, 0.0)')
__ok__(V.X[0], 1.)
__ok__(V.X[1], 0.)
__ok__(V.X[2], 0.)
# simple addition
__ok__(V.X + V.Y, V(1.,1.,0.))
__ok__(V.Y + V.Z, V(0.,1.,1.))
__ok__(V.X + V.Z, V(1.,0.,1.))
# didn't overwrite our constants, did we?
__ok__(V.X, V(1.,0.,0.))
__ok__(V.Y, V(0.,1.,0.))
__ok__(V.Z, V(0.,0.,1.))
a = V(3.,2.,1.)
b = a.normalize()
__ok__(a != b)
__ok__(a == V(3.,2.,1.))
__ok__(b.magnitude(), 1)
b = a.magnitude(5)
__ok__(a == V(3.,2.,1.))
__ok__(b.magnitude(), 5)
__ok__(equal(b.dsquared(V.O), 25))
a = V(3.,2.,1.).normalize()
__ok__(equal(a[0], 0.80178372573727319))
b = V(1.,3.,2.).normalize()
__ok__(equal(b[2], 0.53452248382484879))
d = a.dot(b)
__ok__(equal(d, 0.785714285714), True)
__ok__(V(2., 2., 1.) * 3, V(6, 6, 3))
__ok__(3 * V(2., 2., 1.), V(6, 6, 3))
__ok__(V(2., 2., 1.) / 2, V(1, 1, 0.5))
v = V(1,2,3)
w = V(4,5,6)
__ok__(v.cross(w), V(-3,6,-3))
__ok__(v.cross(w), v*w)
__ok__(v*w, -(w*v))
__ok__(v.dot(w), 32)
__ok__(v.dot(w), w.dot(v))
__ok__(zero(angle(V(1,1,1), V(2,2,2))), True)
__ok__(equal(90.0, degrees(angle(V(1,0,0), V(0,1,0)))), True)
__ok__(equal(180.0, degrees(angle(V(1,0,0), V(-1,0,0)))), True)
__ok__(equal( 0.0, degrees(track(V( 1, 0)))), True)
__ok__(equal( 90.0, degrees(track(V( 0, 1)))), True)
__ok__(equal(180.0, degrees(track(V(-1, 0)))), True)
__ok__(equal(270.0, degrees(track(V( 0,-1)))), True)
__ok__(equal( 45.0, degrees(track(V( 1, 1)))), True)
__ok__(equal(135.0, degrees(track(V(-1, 1)))), True)
__ok__(equal(225.0, degrees(track(V(-1,-1)))), True)
__ok__(equal(315.0, degrees(track(V( 1,-1)))), True)
print 'Testing C complex number class...'
__ok__(C(1,2) is not None, True)
__ok__(C(1,2)[0], 1.0)
__ok__(C(1+2j)[0], 1.0)
__ok__(C((1,2))[1], 2.0)
__ok__(C(V([1,2]))[1], 2.0)
__ok__(C(3+2j) * C(1+4j), C(-5+14j))
try:
__ok__(C(1,2,3) is not None, True)
except __HOLE__: # takes exactly 2 elements
__ok__(True, True)
try:
__ok__(C([1,2,3]) is not None, True)
except TypeError: # takes exactly 2 elements
__ok__(True, True)
except TypeError: # takes exactly 2 elements
__ok__(True, True)
print 'Testing Q quaternion class...'
__ok__(Q(1,2,3,4) is not None, True)
__ok__(Q(1,2,3,4)[1], 2.0)
__ok__(Q((1,2,3,4))[2], 3.0)
__ok__(Q(V(1,2,3,4))[3], 4.0)
__ok__(Q(), Q(0,0,0,1))
__ok__(Q(1,2,3,4).conjugate(), Q(-1,-2,-3,4))
print 'Testing M matrix class...'
m = M()
__ok__(V(1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1), m)
__ok__(m.row(0), V(1,0,0,0))
__ok__(m.row(2), V(0,0,1,0))
__ok__(m.col(1), V(0,1,0,0))
__ok__(m.col(3), V(0,0,0,1))
__ok__(m[5], 1.0)
__ok__(m[1,1], 1.0)
__ok__(m[6], 0.0)
__ok__(m[1,2], 0.0)
__ok__(m * V(1,2,3,4), V(1,2,3,4))
__ok__(V(1,2,3,4) * m, V(1,2,3,4))
mm = m * m
__ok__(mm.__class__, M)
__ok__(mm, M.I)
mm = m * 2
__ok__(mm.__class__, M)
__ok__(mm, 2.0 * m)
__ok__(mm[3,3], 2)
__ok__(mm[3,2], 0)
__ok__(M.rotate('X',radians(90)),
M.twist(Q.rotate('X',radians(90))))
__ok__(M.twist(Q(0,0,0,1)), M.I)
__ok__(M.twist(Q(.5,0,0,1)),
M.twist(Q(.5,0,0,1).normalize()))
__ok__(V.O * M.translate(V(1,2,3)),
V(1,2,3,1))
__ok__((V.X+V.Y+V.Z) * M.translate(V(1,2,3)),
V(2,3,4,1))
# need some tests on m.determinant()
m = M()
m = m.translate(V(1,2,3))
__ok__(m.inverse(), M().translate(-V(1,2,3)))
m = m.rotate('Y', radians(30))
__ok__(m * m.inverse(), M.I)
__report__() | TypeError | dataset/ETHPy150Open hhatto/autopep8/test/vectors_example.py/__test__ |
1,445 | def test_event(self):
self.asm.run()
self.assertEqual(self.asm.comp1.exec_count, 3)
self.assertEqual(self.asm.comp1.doit_count, 0)
self.assertEqual(self.asm.comp1.doit2_count, 0)
self.asm.comp1.exec_count = 0
self.asm.driver.add_event('comp1.doit')
self.asm.run()
self.assertEqual(self.asm.comp1.exec_count, 3)
self.assertEqual(self.asm.comp1.doit_count, 3)
self.assertEqual(self.asm.comp1.doit2_count, 0)
self.asm.driver.set_events()
self.assertEqual(self.asm.comp1.doit_count, 4)
self.assertEqual(self.asm.comp1.doit2_count, 0)
self.asm.driver.add_event('comp1.doit2')
self.asm.driver.set_events()
self.assertEqual(self.asm.comp1.doit_count, 5)
self.assertEqual(self.asm.comp1.doit2_count, 1)
try:
self.asm.driver.add_event('comp1.bogus')
except AttributeError as err:
self.assertEqual(str(err), "driver: Can't add event 'comp1.bogus' because it doesn't exist")
else:
self.fail('Exception expected')
try:
self.asm.driver.add_event('comp1.some_int')
except TypeError as err:
self.assertEqual(str(err), "driver: 'comp1.some_int' is not an event")
else:
self.fail('TypeError expected')
try:
self.asm.driver.remove_event('comp1.bogus')
except __HOLE__ as err:
self.assertEqual(str(err), "driver: Trying to remove event 'comp1.bogus' that is not in the driver.")
else:
self.fail('AttributeError expected')
events = self.asm.driver.get_events()
self.assertEqual(events, ['comp1.doit', 'comp1.doit2'])
self.asm.driver.remove_event('comp1.doit')
events = self.asm.driver.get_events()
self.assertEqual(events, ['comp1.doit2'])
self.asm.driver.set_events()
self.assertEqual(self.asm.comp1.doit_count, 5)
self.assertEqual(self.asm.comp1.doit2_count, 2)
self.asm.driver.clear_events()
events = self.asm.driver.get_events()
self.assertEqual(events, []) | AttributeError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/test/test_hasevents.py/HasEventsTestCase.test_event |
1,446 | def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, __HOLE__):
raise exceptions.ValidationError(
_("This value must be an integer.")) | ValueError | dataset/ETHPy150Open Suor/handy/handy/models/fields.py/AdditionalAutoField.to_python |
1,447 | def to_python(self, value):
value = super(TypedMultipleField, self).to_python(value)
if value not in validators.EMPTY_VALUES:
try:
value = [self.coerce(v) for v in value]
except (__HOLE__, TypeError):
raise exceptions.ValidationError(self.error_messages['invalid'])
return value | ValueError | dataset/ETHPy150Open Suor/handy/handy/models/fields.py/TypedMultipleField.to_python |
1,448 | def to_python(self, value):
"""Convert our string value to JSON after we load it from the DB"""
if value == "":
return None
try:
if isinstance(value, basestring):
return json.loads(value, object_hook=decode_object)
except __HOLE__:
pass
return value | ValueError | dataset/ETHPy150Open Suor/handy/handy/models/fields.py/JSONField.to_python |
1,449 | def to_python(self, value):
"""Convert our string value to JSON after we load it from the DB"""
if value == "":
return None
try:
if isinstance(value, basestring):
return pickle.loads(str(value))
except __HOLE__:
pass
return value | ValueError | dataset/ETHPy150Open Suor/handy/handy/models/fields.py/PickleField.to_python |
1,450 | def to_python(self, value):
if value is None:
return value
try:
return long(value)
except (TypeError, __HOLE__):
raise exceptions.ValidationError(
_("This value must be a long integer.")) | ValueError | dataset/ETHPy150Open Suor/handy/handy/models/fields.py/BigIntegerField.to_python |
1,451 | def read_file(self):
try:
f = open(self._fileloc, 'rb')
except __HOLE__:
print "Couldn't get server list at {0}".format(fileloc)
return []
srv = json.load(f)
f.close()
if len(srv) > 0:
srv = [s for s in srv if utils.test_connection(s['port'])]
return srv | IOError | dataset/ETHPy150Open medek/elixir_complete.vim/python/ecclient/server_list.py/ServerList.read_file |
1,452 | def write_file(self):
try:
f = tempfile.NamedTemporaryFile(dir=os.path.dirname(self._fileloc),
delete=False)
json.dump(self._servers, f)
f.close()
utils.atomic_rename(f.name, self._fileloc)
except __HOLE__:
print "Couldn't open {0} for writing".format(fileloc)
return False
return True | IOError | dataset/ETHPy150Open medek/elixir_complete.vim/python/ecclient/server_list.py/ServerList.write_file |
1,453 | def fuse_selections(dsk, head1, head2, merge):
"""Fuse selections with lower operation.
Handles graphs of the form:
``{key1: (head1, key2, ...), key2: (head2, ...)}``
Parameters
----------
dsk : dict
dask graph
head1 : function
The first element of task1
head2 : function
The first element of task2
merge : function
Takes ``task1`` and ``task2`` and returns a merged task to
replace ``task1``.
>>> def load(store, partition, columns):
... pass
>>> dsk = {'x': (load, 'store', 'part', ['a', 'b']),
... 'y': (getitem, 'x', 'a')}
>>> merge = lambda t1, t2: (load, t2[1], t2[2], t1[2])
>>> dsk2 = fuse_selections(dsk, getitem, load, merge)
>>> cull(dsk2, 'y')
{'y': (<function load at ...>, 'store', 'part', 'a')}
"""
dsk2 = dict()
for k, v in dsk.items():
try:
if (istask(v) and v[0] == head1 and v[1] in dsk and
istask(dsk[v[1]]) and dsk[v[1]][0] == head2):
dsk2[k] = merge(v, dsk[v[1]])
else:
dsk2[k] = v
except __HOLE__:
dsk2[k] = v
return dsk2 | TypeError | dataset/ETHPy150Open dask/dask/dask/optimize.py/fuse_selections |
1,454 | def main():
""" Repository maintenance. """
usage = """\
%prog OP [options] repository, where OP may be:
check -- check for lock
lock -- lock repository
unlock -- unlock repository
set -- set this as current repository
(sets OPENMDAO_REPO environment variable and starts a new shell)
fix -- fix permissions and remove generated directories
rmpyc -- remove 'orphan' .pyc files"""
parser = optparse.OptionParser(usage)
parser.add_option('-f', '--force', action='store_true',
default=False, help='forced unlock')
parser.add_option('-v', '--verbose', action='store_true',
default=False, help='print info messages')
try:
operation = sys.argv.pop(1)
except __HOLE__:
parser.print_help()
sys.exit(1)
options, arguments = parser.parse_args()
repository = ''
if len(arguments) > 0:
if len(arguments) == 1:
repository = arguments[0]
else:
parser.print_help()
sys.exit(1)
if not repository:
try:
repository = os.environ['OPENMDAO_REPO']
except KeyError:
pass
this_user = get_username()
path = find_repository(repository, this_user)
if not path:
print 'Cannot find repository!'
sys.exit(2)
if options.verbose:
print 'Repository root:', path
if operation == 'check':
do_check(path)
elif operation == 'lock':
do_lock(path)
elif operation == 'unlock':
do_unlock(path, options)
elif operation == 'set':
do_set(path, this_user)
elif operation == 'fix':
do_fix(path, options)
do_rmpyc(path)
elif operation == 'rmpyc':
do_rmpyc(path)
else:
parser.print_help()
sys.exit(1) | IndexError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/scripts/repo.py/main |
1,455 | def do_fix(repo_path, options):
""" Check/fix permissions and remove generated directories. """
directories = (
'buildout/bin',
'buildout/develop-eggs',
'buildout/eggs',
'buildout/html',
'buildout/parts',
'docs/_build',
)
files = (
'examples/openmdao.examples.bar3simulation/openmdao/examples/bar3simulation/bar3-f2pywrappers.f',
'examples/openmdao.examples.bar3simulation/openmdao/examples/bar3simulation/bar3module.c'
)
for relpath in directories:
if sys.platform == 'win32':
relpath.replace('/', '\\')
directory = os.path.join(repo_path, relpath)
if os.path.exists(directory):
shutil.rmtree(directory)
for relpath in files:
if sys.platform == 'win32':
relpath.replace('/', '\\')
filename = os.path.join(repo_path, relpath)
if os.path.exists(filename):
os.remove(filename)
for dirpath, dirnames, filenames in os.walk(repo_path):
if options.verbose:
print dirpath[len(repo_path):]
names = dirnames
names.extend(filenames)
for name in names:
path = os.path.join(dirpath, name)
info = os.stat(path)
mode = info.st_mode
fixup = mode
if (mode & stat.S_IRUSR) and not (mode & stat.S_IRGRP):
fixup |= stat.S_IRGRP
if (mode & stat.S_IWUSR) and not (mode & stat.S_IWGRP):
fixup |= stat.S_IWGRP
if (mode & stat.S_IXUSR) and not (mode & stat.S_IXGRP):
fixup |= stat.S_IXGRP
if options.verbose:
if fixup != mode:
print ' fixing %s %s' % (permission_bits(mode), name)
else:
print ' %s %s' % (permission_bits(mode), name)
elif fixup != mode:
print 'fixing %s %s' % (permission_bits(mode), path)
if fixup != mode:
try:
os.chmod(path, fixup)
except __HOLE__, exc:
print ' %s' % exc
print ' (owner %s)' % get_username(info.st_uid) | OSError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/scripts/repo.py/do_fix |
1,456 | def check_lockfile(path):
""" Return (user, modification time) of lockfile, or (None, None). """
path = os.path.join(path, LOCKFILE)
if os.path.exists(path):
try:
info = os.stat(path)
except __HOLE__, exc:
print 'Cannot access lockfile:', exc
sys.exit(1)
else:
user = get_username(info.st_uid)
mtime = time.asctime(time.localtime(info.st_mtime))
return (user, mtime)
else:
return (None, None) | OSError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/scripts/repo.py/check_lockfile |
1,457 | def create_lockfile(path):
""" Create lockfile. """
path = os.path.join(path, LOCKFILE)
try:
os.open(path, os.O_CREAT|os.O_EXCL|os.O_WRONLY, 0660)
except __HOLE__, exc:
print 'Cannot create lockfile:', exc
sys.exit(1) | OSError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/scripts/repo.py/create_lockfile |
1,458 | def remove_lockfile(path):
""" Remove lockfile. """
path = os.path.join(path, LOCKFILE)
try:
os.unlink(path)
except __HOLE__, exc:
print 'Cannot remove lockfile:', exc
sys.exit(1) | OSError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/scripts/repo.py/remove_lockfile |
1,459 | def test_activities_threads(self):
try:
from futures import ThreadPoolExecutor
except __HOLE__:
from concurrent.futures import ThreadPoolExecutor
main = LocalWorkflow(W, executor=ThreadPoolExecutor)
main.conf_activity('m', tactivity)
main.conf_activity('r', tactivity)
result = main.run(8, r=True, _wait=True)
self.assertEquals(result, 45) | ImportError | dataset/ETHPy150Open severb/flowy/tests/test_local.py/TestLocalWorkflow.test_activities_threads |
1,460 | def test_subworkflows_threads(self):
try:
from futures import ThreadPoolExecutor
except __HOLE__:
from concurrent.futures import ThreadPoolExecutor
sub = LocalWorkflow(TWorkflow)
main = LocalWorkflow(W, executor=ThreadPoolExecutor)
main.conf_workflow('m', sub)
main.conf_workflow('r', sub)
result = main.run(8, r=True, _wait=True)
self.assertEquals(result, 45) | ImportError | dataset/ETHPy150Open severb/flowy/tests/test_local.py/TestLocalWorkflow.test_subworkflows_threads |
1,461 | def test_selfsubworkflows_threads(self):
try:
from futures import ThreadPoolExecutor
except __HOLE__:
from concurrent.futures import ThreadPoolExecutor
sub = LocalWorkflow(W, executor=ThreadPoolExecutor)
sub.conf_activity('m', tactivity)
sub.conf_activity('r', tactivity)
main = LocalWorkflow(W, executor=ThreadPoolExecutor)
main.conf_workflow('m', sub)
main.conf_activity('r', tactivity)
result = main.run(8, r=True, _wait=True)
self.assertEquals(result, 165) | ImportError | dataset/ETHPy150Open severb/flowy/tests/test_local.py/TestLocalWorkflow.test_selfsubworkflows_threads |
1,462 | def _gen_wrapper(ctx, generator):
"""Generator Wrapper that starts/stops our context
"""
try:
yielded = None
while True:
ctx.start()
value = generator.send(yielded)
ctx.stop()
try:
yielded = yield value
except Exception:
ctx.start()
value = generator.throw(*sys.exc_info())
ctx.stop()
yielded = yield value
except (tornado.gen.Return, __HOLE__):
ctx.done()
raise | StopIteration | dataset/ETHPy150Open rhettg/BlueOx/blueox/tornado_utils.py/_gen_wrapper |
1,463 | def coroutine(func):
"""Replacement for tornado.gen.coroutine that manages a blueox context
The difficulty with managing global blueox contexts in an async environment
is contexts will need to start and stop depending on what steps of a
coroutine are running. This decorator wraps the default coroutine decorator
allowing us to stop and restore the context whenever this coroutine runs.
If you don't use this wrapper, unrelated contexts may be grouped together!
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
ctx = args[0].blueox_ctx
except (AttributeError, __HOLE__):
ctx = None
# Remember, not every coroutine wrapped method will return a generator,
# so we have to manage context switching in multiple places.
if ctx is not None:
ctx.start()
result = func(*args, **kwargs)
if ctx is not None:
ctx.stop()
if isinstance(result, types.GeneratorType):
return _gen_wrapper(ctx, result)
return result
real_coroutine = tornado.gen.coroutine
return real_coroutine(wrapper) | IndexError | dataset/ETHPy150Open rhettg/BlueOx/blueox/tornado_utils.py/coroutine |
1,464 | def store_conf(self, data, conf=None, mode='srv'):
"""See :func:`burpui.misc.parser.interface.BUIparser.store_conf`"""
mconf = None
if not conf:
mconf = self.conf
else:
mconf = conf
if mconf != self.conf and not mconf.startswith('/'):
mconf = os.path.join(self.root, mconf)
if not mconf:
return [[1, 'Sorry, no configuration file defined']]
dirname = os.path.dirname(mconf)
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except __HOLE__ as e:
return [[1, str(e)]]
if self.clientconfdir in dirname:
ref = '{}.bui.init.back~'.format(mconf)
bak = '{}.bak~'.format(mconf)
else:
ref = '{}.bui.init.back'.format(mconf)
bak = '{}.bak'.format(mconf)
if not os.path.isfile(ref) and os.path.isfile(mconf):
try:
shutil.copy(mconf, ref)
except Exception as e:
return [[2, str(e)]]
elif os.path.isfile(mconf):
try:
shutil.copy(mconf, bak)
except Exception as e:
return [[2, str(e)]]
errs = []
for key in data.keys():
if key in self.files:
d = data.get(key)
if not os.path.isfile(d):
typ = 'strings'
if key in getattr(self, 'multi_{}'.format(mode)):
typ = 'multis'
elif key in getattr(self, 'boolean_{}'.format(mode)):
typ = 'bools'
elif key in getattr(self, 'integer_{}'.format(mode)):
typ = 'integers'
# highlight the wrong parameters
errs.append([
2,
"Sorry, the file '{}' does not exist".format(d),
key,
typ
])
if errs:
return errs
orig = []
try:
with codecs.open(mconf, 'r', 'utf-8') as ff:
orig = [x.rstrip('\n') for x in ff.readlines()]
except:
pass
oldkeys = [self._get_line_key(x) for x in orig]
newkeys = list(set(data.viewkeys()) - set(oldkeys))
already_multi = []
already_file = []
written = []
try:
with codecs.open(mconf, 'w', 'utf-8') as f:
# f.write('# Auto-generated configuration using Burp-UI\n')
for line in orig:
if (self._line_removed(line, data.viewkeys()) and
not self._line_is_comment(line) and
not self._line_is_file_include(line)):
# The line was removed, we comment it
f.write('#{}\n'.format(line))
elif self._line_is_file_include(line):
# The line is a file inclusion, we check if the line
# was already present
ori = self._include_get_file(line)
if ori in data.getlist('includes_ori'):
idx = data.getlist('includes_ori').index(ori)
file = data.getlist('includes')[idx]
self._write_key(f, '.', file)
already_file.append(file)
else:
f.write('#{}\n'.format(line))
elif self._get_line_key(line, False) in data.viewkeys():
# The line is still present or has been un-commented,
# rewrite it with eventual changes
key = self._get_line_key(line, False)
if key not in already_multi:
self._write_key(f, key, data)
if key in getattr(self, 'multi_{}'.format(mode)):
already_multi.append(key)
written.append(key)
else:
# The line was empty or a comment...
f.write('{}\n'.format(line))
# Write the new keys
for key in newkeys:
if (key not in written and
key not in ['includes', 'includes_ori']):
self._write_key(f, key, data)
# Write the rest of file inclusions
for file in data.getlist('includes'):
if file not in already_file:
self._write_key(f, '.', file)
except Exception as e:
return [[2, str(e)]]
return [[0, 'Configuration successfully saved.']] | OSError | dataset/ETHPy150Open ziirish/burp-ui/burpui/misc/parser/burp1.py/Parser.store_conf |
1,465 | @require_GET
@require_can_edit_apps
def get_xform_source(request, domain, app_id, module_id, form_id):
app = get_app(domain, app_id)
try:
form = app.get_module(module_id).get_form(form_id)
except __HOLE__:
raise Http404()
return _get_xform_source(request, app, form) | IndexError | dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/app_manager/views/forms.py/get_xform_source |
1,466 | def rle(iterable):
"""
Run length encode a list.
"""
iterable = iter(iterable)
runlen = 1
result = []
try:
previous = iterable.next()
except __HOLE__:
return []
for element in iterable:
if element == previous:
runlen = runlen + 1
continue
else:
if isinstance(previous, (types.ListType, types.TupleType)):
previous = rle(previous)
result.append([previous, runlen])
previous = element
runlen = 1
if isinstance(previous, (types.ListType, types.TupleType)):
previous = rle(previous)
result.append([previous, runlen])
return result | StopIteration | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/python/dxprofile.py/rle |
1,467 | def InitializeFromConfig(self):
usernames = config_lib.CONFIG.Get("Dataserver.client_credentials")
self.client_users = {}
for user_spec in usernames:
try:
user, pwd, perm = user_spec.split(":", 2)
self.client_users[user] = data_server.DataServerClientInformation(
username=user, password=pwd, permissions=perm)
except __HOLE__:
raise errors.DataServerError(
"User %s from Dataserver.client_credentials is not"
" a valid specification" % user_spec) | ValueError | dataset/ETHPy150Open google/grr/grr/server/data_server/auth.py/ClientCredentials.InitializeFromConfig |
1,468 | def GetPassword(self, username):
try:
return self.client_users[username].password
except __HOLE__:
return None | KeyError | dataset/ETHPy150Open google/grr/grr/server/data_server/auth.py/ClientCredentials.GetPassword |
1,469 | def GetPermissions(self, username):
try:
return self.client_users[username].permissions
except __HOLE__:
return None | KeyError | dataset/ETHPy150Open google/grr/grr/server/data_server/auth.py/ClientCredentials.GetPermissions |
1,470 | def _handle_json_or_file_arg(json_arg):
"""Attempts to read JSON argument from file or string.
:param json_arg: May be a file name containing the JSON, or
a JSON string.
:returns: A list or dictionary parsed from JSON.
:raises: InvalidAttribute if the argument cannot be parsed.
"""
if os.path.isfile(json_arg):
try:
with open(json_arg, 'r') as f:
json_arg = f.read().strip()
except Exception as e:
err = _("Cannot get JSON from file '%(file)s'. "
"Error: %(err)s") % {'err': e, 'file': json_arg}
raise exc.InvalidAttribute(err)
try:
json_arg = json.loads(json_arg)
except __HOLE__ as e:
err = (_("For JSON: '%(string)s', error: '%(err)s'") %
{'err': e, 'string': json_arg})
raise exc.InvalidAttribute(err)
return json_arg | ValueError | dataset/ETHPy150Open openstack/python-ironicclient/ironicclient/v1/node_shell.py/_handle_json_or_file_arg |
1,471 | def test_triggertype_crud(self):
saved = ReactorModelTest._create_save_triggertype()
retrieved = TriggerType.get_by_id(saved.id)
self.assertEqual(saved.name, retrieved.name,
'Same triggertype was not returned.')
# test update
self.assertEqual(retrieved.description, '')
retrieved.description = DUMMY_DESCRIPTION
saved = TriggerType.add_or_update(retrieved)
retrieved = TriggerType.get_by_id(saved.id)
self.assertEqual(retrieved.description, DUMMY_DESCRIPTION, 'Update to trigger failed.')
# cleanup
ReactorModelTest._delete([retrieved])
try:
retrieved = TriggerType.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ReactorModelTest.test_triggertype_crud |
1,472 | def test_trigger_crud(self):
triggertype = ReactorModelTest._create_save_triggertype()
saved = ReactorModelTest._create_save_trigger(triggertype)
retrieved = Trigger.get_by_id(saved.id)
self.assertEqual(saved.name, retrieved.name,
'Same trigger was not returned.')
# test update
self.assertEqual(retrieved.description, '')
retrieved.description = DUMMY_DESCRIPTION
saved = Trigger.add_or_update(retrieved)
retrieved = Trigger.get_by_id(saved.id)
self.assertEqual(retrieved.description, DUMMY_DESCRIPTION, 'Update to trigger failed.')
# cleanup
ReactorModelTest._delete([retrieved, triggertype])
try:
retrieved = Trigger.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ReactorModelTest.test_trigger_crud |
1,473 | def test_triggerinstance_crud(self):
triggertype = ReactorModelTest._create_save_triggertype()
trigger = ReactorModelTest._create_save_trigger(triggertype)
saved = ReactorModelTest._create_save_triggerinstance(trigger)
retrieved = TriggerInstance.get_by_id(saved.id)
self.assertIsNotNone(retrieved, 'No triggerinstance created.')
ReactorModelTest._delete([retrieved, trigger, triggertype])
try:
retrieved = TriggerInstance.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ReactorModelTest.test_triggerinstance_crud |
1,474 | def test_rule_crud(self):
triggertype = ReactorModelTest._create_save_triggertype()
trigger = ReactorModelTest._create_save_trigger(triggertype)
runnertype = ActionModelTest._create_save_runnertype()
action = ActionModelTest._create_save_action(runnertype)
saved = ReactorModelTest._create_save_rule(trigger, action)
retrieved = Rule.get_by_id(saved.id)
self.assertEqual(saved.name, retrieved.name, 'Same rule was not returned.')
# test update
self.assertEqual(retrieved.enabled, True)
retrieved.enabled = False
saved = Rule.add_or_update(retrieved)
retrieved = Rule.get_by_id(saved.id)
self.assertEqual(retrieved.enabled, False, 'Update to rule failed.')
# cleanup
ReactorModelTest._delete([retrieved, trigger, action, runnertype, triggertype])
try:
retrieved = Rule.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ReactorModelTest.test_rule_crud |
1,475 | def test_action_crud(self):
runnertype = self._create_save_runnertype(metadata=False)
saved = self._create_save_action(runnertype, metadata=False)
retrieved = Action.get_by_id(saved.id)
self.assertEqual(saved.name, retrieved.name,
'Same Action was not returned.')
# test update
self.assertEqual(retrieved.description, 'awesomeness')
retrieved.description = DUMMY_DESCRIPTION
saved = Action.add_or_update(retrieved)
retrieved = Action.get_by_id(saved.id)
self.assertEqual(retrieved.description, DUMMY_DESCRIPTION, 'Update to action failed.')
# cleanup
self._delete([retrieved])
try:
retrieved = Action.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ActionModelTest.test_action_crud |
1,476 | def test_action_with_notify_crud(self):
runnertype = self._create_save_runnertype(metadata=False)
saved = self._create_save_action(runnertype, metadata=False)
# Update action with notification settings
on_complete = NotificationSubSchema(message='Action complete.')
saved.notify = NotificationSchema(on_complete=on_complete)
saved = Action.add_or_update(saved)
# Check if notification settings were set correctly.
retrieved = Action.get_by_id(saved.id)
self.assertEqual(retrieved.notify.on_complete.message, on_complete.message)
# Now reset notify in action to empty and validate it's gone.
retrieved.notify = NotificationSchema(on_complete=None)
saved = Action.add_or_update(retrieved)
retrieved = Action.get_by_id(saved.id)
self.assertEqual(retrieved.notify.on_complete, None)
# cleanup
self._delete([retrieved])
try:
retrieved = Action.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ActionModelTest.test_action_with_notify_crud |
1,477 | def test_parameter_schema(self):
runnertype = self._create_save_runnertype(metadata=True)
saved = self._create_save_action(runnertype, metadata=True)
retrieved = Action.get_by_id(saved.id)
# validate generated schema
schema = util_schema.get_schema_for_action_parameters(retrieved)
self.assertDictEqual(schema, PARAM_SCHEMA)
validator = util_schema.get_validator()
validator.check_schema(schema)
# use schema to validate parameters
jsonschema.validate({"r2": "abc", "p1": "def"}, schema, validator)
jsonschema.validate({"r2": "abc", "p1": "def", "r1": {"r1a": "ghi"}}, schema, validator)
self.assertRaises(jsonschema.ValidationError, jsonschema.validate,
'{"r2": "abc", "p1": "def"}', schema, validator)
self.assertRaises(jsonschema.ValidationError, jsonschema.validate,
{"r2": "abc"}, schema, validator)
self.assertRaises(jsonschema.ValidationError, jsonschema.validate,
{"r2": "abc", "p1": "def", "r1": 123}, schema, validator)
# cleanup
self._delete([retrieved])
try:
retrieved = Action.get_by_id(saved.id)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/ActionModelTest.test_parameter_schema |
1,478 | def test_kvp_crud(self):
saved = KeyValuePairModelTest._create_save_kvp()
retrieved = KeyValuePair.get_by_name(saved.name)
self.assertEqual(saved.id, retrieved.id,
'Same KeyValuePair was not returned.')
# test update
self.assertEqual(retrieved.value, '0123456789ABCDEF')
retrieved.value = 'ABCDEF0123456789'
saved = KeyValuePair.add_or_update(retrieved)
retrieved = KeyValuePair.get_by_name(saved.name)
self.assertEqual(retrieved.value, 'ABCDEF0123456789',
'Update of key value failed')
# cleanup
KeyValuePairModelTest._delete([retrieved])
try:
retrieved = KeyValuePair.get_by_name(saved.name)
except __HOLE__:
retrieved = None
self.assertIsNone(retrieved, 'managed to retrieve after failure.') | ValueError | dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db.py/KeyValuePairModelTest.test_kvp_crud |
1,479 | def test_linux(self):
"""
L{_getInstallFunction} chooses the epoll reactor on Linux, or poll if
epoll is unavailable.
"""
install = _getInstallFunction(linux)
try:
from twisted.internet import epollreactor
except __HOLE__:
self.assertIsPoll(install)
else:
self.assertEqual(
install.__module__, 'twisted.internet.epollreactor') | ImportError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/internet/test/test_default.py/PollReactorTests.test_linux |
1,480 | def __get__(self, obj, class_):
if self.owning_class is None:
self.owning_class = class_ and class_ or type(obj)
if obj is None:
return self
if self.scalar:
target = getattr(obj, self.target_collection)
return self._scalar_get(target)
else:
try:
# If the owning instance is reborn (orm session resurrect,
# etc.), refresh the proxy cache.
creator_id, proxy = getattr(obj, self.key)
if id(obj) == creator_id:
return proxy
except __HOLE__:
pass
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy | AttributeError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/ext/associationproxy.py/AssociationProxy.__get__ |
1,481 | def __add__(self, iterable):
try:
other = list(iterable)
except __HOLE__:
return NotImplemented
return list(self) + other | TypeError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/ext/associationproxy.py/_AssociationList.__add__ |
1,482 | def __radd__(self, iterable):
try:
other = list(iterable)
except __HOLE__:
return NotImplemented
return other + list(self) | TypeError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/ext/associationproxy.py/_AssociationList.__radd__ |
1,483 | def get(self, key, default=None):
try:
return self[key]
except __HOLE__:
return default | KeyError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/ext/associationproxy.py/_AssociationDict.get |
1,484 | def update(self, *a, **kw):
if len(a) > 1:
raise TypeError('update expected at most 1 arguments, got %i' %
len(a))
elif len(a) == 1:
seq_or_map = a[0]
# discern dict from sequence - took the advice from
# http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
self[item] = seq_or_map[item]
else:
try:
for k, v in seq_or_map:
self[k] = v
except __HOLE__:
raise ValueError(
"dictionary update sequence "
"requires 2-element tuples")
for key, value in kw:
self[key] = value | ValueError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/ext/associationproxy.py/_AssociationDict.update |
1,485 | def get_random_id():
"""Return a random id for javascript fields."""
from random import random
from time import time
try:
from hashlib import sha1 as sha
except __HOLE__:
import sha
sha = sha.new
return sha('%s|%s' % (random(), time())).hexdigest() | ImportError | dataset/ETHPy150Open adieu/allbuttonspressed/pygments/formatters/html.py/get_random_id |
1,486 | def __init__(self, **options):
Formatter.__init__(self, **options)
self.title = self._decodeifneeded(self.title)
self.nowrap = get_bool_opt(options, 'nowrap', False)
self.noclasses = get_bool_opt(options, 'noclasses', False)
self.classprefix = options.get('classprefix', '')
self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
linenos = options.get('linenos', False)
if linenos == 'inline':
self.linenos = 2
elif linenos:
# compatibility with <= 0.7
self.linenos = 1
else:
self.linenos = 0
self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
self.nobackground = get_bool_opt(options, 'nobackground', False)
self.lineseparator = options.get('lineseparator', '\n')
self.lineanchors = options.get('lineanchors', '')
self.anchorlinenos = options.get('anchorlinenos', False)
self.hl_lines = set()
for lineno in get_list_opt(options, 'hl_lines', []):
try:
self.hl_lines.add(int(lineno))
except __HOLE__:
pass
self._create_stylesheet() | ValueError | dataset/ETHPy150Open adieu/allbuttonspressed/pygments/formatters/html.py/HtmlFormatter.__init__ |
1,487 | def _wrap_full(self, inner, outfile):
if self.cssfile:
if os.path.isabs(self.cssfile):
# it's an absolute filename
cssfilename = self.cssfile
else:
try:
filename = outfile.name
if not filename or filename[0] == '<':
# pseudo files, e.g. name == '<fdopen>'
raise AttributeError
cssfilename = os.path.join(os.path.dirname(filename),
self.cssfile)
except AttributeError:
print >>sys.stderr, 'Note: Cannot determine output file name, ' \
'using current directory as base for the CSS file name'
cssfilename = self.cssfile
# write CSS file only if noclobber_cssfile isn't given as an option.
try:
if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
cf = open(cssfilename, "w")
cf.write(CSSFILE_TEMPLATE %
{'styledefs': self.get_style_defs('body')})
cf.close()
except __HOLE__, err:
err.strerror = 'Error writing CSS file: ' + err.strerror
raise
yield 0, (DOC_HEADER_EXTERNALCSS %
dict(title = self.title,
cssfile = self.cssfile,
encoding = self.encoding))
else:
yield 0, (DOC_HEADER %
dict(title = self.title,
styledefs = self.get_style_defs('body'),
encoding = self.encoding))
for t, line in inner:
yield t, line
yield 0, DOC_FOOTER | IOError | dataset/ETHPy150Open adieu/allbuttonspressed/pygments/formatters/html.py/HtmlFormatter._wrap_full |
1,488 | @staticmethod
def _import_management():
"""
Import the 'management' module within each installed app, to register
dispatcher events.
"""
from importlib import import_module
for app_name in settings.INSTALLED_APPS:
try:
import_module('.management', app_name)
except __HOLE__ as exc:
# This is slightly hackish. We want to ignore ImportErrors
# if the "management" module itself is missing -- but we don't
# want to ignore the exception if the management module exists
# but raises an ImportError for some reason. The only way we
# can do this is to check the text of the exception. Note that
# we're a bit broad in how we check the text, because different
# Python implementations may not use the same text.
# CPython uses the text "No module named management"
# PyPy uses "No module named myproject.myapp.management"
msg = exc.args[0]
if not msg.startswith('No module named') \
or 'management' not in msg:
raise | ImportError | dataset/ETHPy150Open r4fek/django-cassandra-engine/django_cassandra_engine/management/commands/sync_cassandra.py/Command._import_management |
1,489 | def fetch_bootstrap(bootstrap_id):
copy_set = [
"{}.bootstrap.tar.xz".format(bootstrap_id),
"{}.active.json".format(bootstrap_id)]
dest_dir = "/genconf/serve/bootstrap/"
container_cache_dir = "/artifacts/"
# If all the targets already exist, no-op
dest_files = [dest_dir + filename for filename in copy_set]
if all(map(os.path.exists, dest_files)):
return
# Make sure the internal cache files exist
src_files = [container_cache_dir + filename for filename in copy_set]
for filename in src_files:
if not os.path.exists(filename):
log.error("Internal Error: %s not found. Should have been in the installer container.", filename)
raise FileNotFoundError()
def cleanup_and_exit():
for filename in dest_files:
try:
os.remove(filename)
except OSError as ex:
log.error("Internal error removing temporary file. Might have corrupted file %s: %s",
filename, ex.strerror)
sys.exit(1)
# Copy out the files, rolling back if it fails
try:
subprocess.check_output(['mkdir', '-p', '/genconf/serve/bootstrap/'])
# Copy across
for filename in copy_set:
subprocess.check_output(['cp', container_cache_dir + filename, dest_dir + filename])
except subprocess.CalledProcessError as ex:
log.error("Copy failed: %s\nOutput:\n%s", ex.cmd, ex.output)
log.error("Removing partial artifacts")
cleanup_and_exit()
except __HOLE__:
log.error("Copy out of installer interrupted. Removing partial files.")
cleanup_and_exit() | KeyboardInterrupt | dataset/ETHPy150Open dcos/dcos/ext/dcos-installer/dcos_installer/action_lib/configure.py/fetch_bootstrap |
1,490 | def get_file_descriptor(self):
"""Return the file descriptor for the given websocket"""
try:
return uwsgi.connection_fd()
except __HOLE__ as e:
self.close()
raise WebSocketError(e) | IOError | dataset/ETHPy150Open jrief/django-websocket-redis/ws4redis/uwsgi_runserver.py/uWSGIWebsocket.get_file_descriptor |
1,491 | def receive(self):
if self._closed:
raise WebSocketError("Connection is already closed")
try:
return uwsgi.websocket_recv_nb()
except __HOLE__ as e:
self.close()
raise WebSocketError(e) | IOError | dataset/ETHPy150Open jrief/django-websocket-redis/ws4redis/uwsgi_runserver.py/uWSGIWebsocket.receive |
1,492 | def flush(self):
try:
uwsgi.websocket_recv_nb()
except __HOLE__:
self.close() | IOError | dataset/ETHPy150Open jrief/django-websocket-redis/ws4redis/uwsgi_runserver.py/uWSGIWebsocket.flush |
1,493 | def send(self, message, binary=None):
try:
uwsgi.websocket_send(message)
except __HOLE__ as e:
self.close()
raise WebSocketError(e) | IOError | dataset/ETHPy150Open jrief/django-websocket-redis/ws4redis/uwsgi_runserver.py/uWSGIWebsocket.send |
1,494 | def search(self, query=None, uris=None, exact=False, **kwargs):
"""
Search the library for tracks where ``field`` contains ``values``.
``field`` can be one of ``uri``, ``track_name``, ``album``, ``artist``,
``albumartist``, ``composer``, ``performer``, ``track_no``, ``genre``,
``date``, ``comment`` or ``any``.
If ``uris`` is given, the search is limited to results from within the
URI roots. For example passing ``uris=['file:']`` will limit the search
to the local backend.
Examples::
# Returns results matching 'a' in any backend
search({'any': ['a']})
# Returns results matching artist 'xyz' in any backend
search({'artist': ['xyz']})
# Returns results matching 'a' and 'b' and artist 'xyz' in any
# backend
search({'any': ['a', 'b'], 'artist': ['xyz']})
# Returns results matching 'a' if within the given URI roots
# "file:///media/music" and "spotify:"
search({'any': ['a']}, uris=['file:///media/music', 'spotify:'])
# Returns results matching artist 'xyz' and 'abc' in any backend
search({'artist': ['xyz', 'abc']})
:param query: one or more queries to search for
:type query: dict
:param uris: zero or more URI roots to limit the search to
:type uris: list of string or :class:`None`
:param exact: if the search should use exact matching
:type exact: :class:`bool`
:rtype: list of :class:`mopidy.models.SearchResult`
.. versionadded:: 1.0
The ``exact`` keyword argument, which replaces :meth:`find_exact`.
.. deprecated:: 1.0
Previously, if the query was empty, and the backend could support
it, all available tracks were returned. This has not changed, but
it is strongly discouraged. No new code should rely on this
behavior.
.. deprecated:: 1.1
Providing the search query via ``kwargs`` is no longer supported.
"""
query = _normalize_query(query or kwargs)
uris is None or validation.check_uris(uris)
query is None or validation.check_query(query)
validation.check_boolean(exact)
if kwargs:
deprecation.warn('core.library.search:kwargs_query')
if not query:
deprecation.warn('core.library.search:empty_query')
futures = {}
for backend, backend_uris in self._get_backends_to_uris(uris).items():
futures[backend] = backend.library.search(
query=query, uris=backend_uris, exact=exact)
# Some of our tests check for LookupError to catch bad queries. This is
# silly and should be replaced with query validation before passing it
# to the backends.
reraise = (TypeError, LookupError)
results = []
for backend, future in futures.items():
try:
with _backend_error_handling(backend, reraise=reraise):
result = future.get()
if result is not None:
validation.check_instance(result, models.SearchResult)
results.append(result)
except __HOLE__:
backend_name = backend.actor_ref.actor_class.__name__
logger.warning(
'%s does not implement library.search() with "exact" '
'support. Please upgrade it.', backend_name)
return results | TypeError | dataset/ETHPy150Open mopidy/mopidy/mopidy/core/library.py/LibraryController.search |
1,495 | def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
fname = self._key_to_file(key)
try:
with open(fname, 'rb') as f:
exp = pickle.load(f)
now = time.time()
if exp is not None and exp < now:
self._delete(fname)
else:
return pickle.load(f)
except (IOError, __HOLE__, EOFError, pickle.PickleError):
pass
return default | OSError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/cache/backends/filebased.py/FileBasedCache.get |
1,496 | def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
fname = self._key_to_file(key)
dirname = os.path.dirname(fname)
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
self._cull()
try:
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(fname, 'wb') as f:
expiry = None if timeout is None else time.time() + timeout
pickle.dump(expiry, f, pickle.HIGHEST_PROTOCOL)
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
except (IOError, __HOLE__):
pass | OSError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/cache/backends/filebased.py/FileBasedCache.set |
1,497 | def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
try:
self._delete(self._key_to_file(key))
except (__HOLE__, OSError):
pass | IOError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/cache/backends/filebased.py/FileBasedCache.delete |
1,498 | def _delete(self, fname):
os.remove(fname)
try:
# Remove the 2 subdirs if they're empty
dirname = os.path.dirname(fname)
os.rmdir(dirname)
os.rmdir(os.path.dirname(dirname))
except (IOError, __HOLE__):
pass | OSError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/cache/backends/filebased.py/FileBasedCache._delete |
1,499 | def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
fname = self._key_to_file(key)
try:
with open(fname, 'rb') as f:
exp = pickle.load(f)
now = time.time()
if exp < now:
self._delete(fname)
return False
else:
return True
except (__HOLE__, OSError, EOFError, pickle.PickleError):
return False | IOError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/cache/backends/filebased.py/FileBasedCache.has_key |
Subsets and Splits