Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
6,100 | def parse_blob_info(field_storage):
"""Parse a BlobInfo record from file upload field_storage.
Args:
field_storage: cgi.FieldStorage that represents uploaded blob.
Returns:
BlobInfo record as parsed from the field-storage instance.
None if there was no field_storage.
Raises:
BlobInfoParseError when provided field_storage does not contain enough
information to construct a BlobInfo object.
"""
if field_storage is None:
return None
field_name = field_storage.name
def get_value(dct, name):
value = dct.get(name, None)
if value is None:
raise BlobInfoParseError(
'Field %s has no %s.' % (field_name, name))
return value
filename = get_value(field_storage.disposition_options, 'filename')
blob_key_str = get_value(field_storage.type_options, 'blob-key')
blob_key = BlobKey(blob_key_str)
upload_content = email.message_from_file(field_storage.file)
content_type = get_value(upload_content, 'content-type')
size = get_value(upload_content, 'content-length')
creation_string = get_value(upload_content, UPLOAD_INFO_CREATION_HEADER)
md5_hash_encoded = get_value(upload_content, 'content-md5')
md5_hash = base64.urlsafe_b64decode(md5_hash_encoded)
try:
size = int(size)
except (TypeError, __HOLE__):
raise BlobInfoParseError(
'%s is not a valid value for %s size.' % (size, field_name))
try:
creation = blobstore._parse_creation(creation_string, field_name)
except blobstore._CreationFormatError, err:
raise BlobInfoParseError(str(err))
return BlobInfo(id=blob_key_str,
content_type=content_type,
creation=creation,
filename=filename,
size=size,
md5_hash=md5_hash,
) | ValueError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/ndb/blobstore.py/parse_blob_info |
6,101 | def __getitem__(self, key):
try:
if isinstance(key, Column):
key = key.slug
i = self.headers.index(key)
return [row[i] for row in self.rows]
except __HOLE__:
raise ValueError(key + " not in column headers") | ValueError | dataset/ETHPy150Open debrouwere/google-analytics/googleanalytics/query.py/Report.__getitem__ |
6,102 | def rev_list(repository, to_ref, from_ref=None, path=None, skip=0,
max_count=0, author=None, query=None, first_parent=None,
since=0, no_merges=None):
"""git rev-list command, pygit2 wrapper.
But this returns a commit list"""
if repository.is_empty:
return []
commits_index_list = []
commits_dict = {}
# TODO: use resolve_version
to_commit = repository.revparse_single(to_ref)
walk_order = GIT_SORT_TOPOLOGICAL if first_parent else GIT_SORT_TIME
if to_commit.type == GIT_OBJ_TAG:
to_commit = repository[to_commit.target]
next_commit = None
walker = repository.walk(to_commit.id, walk_order)
if from_ref:
try:
from_commit = repository.revparse_single(from_ref)
if from_commit.type == GIT_OBJ_TAG:
from_commit = repository[from_commit.target]
walker.hide(from_commit.id)
except (KeyError, __HOLE__):
from_commit = None
if max_count:
length = max_count + skip if skip else max_count
else:
length = 0
for c in walker:
if all([_check_author(c, author),
_check_file_change(c, path),
_check_message(c, query),
_check_date(c, since),
_check_no_merges(c, no_merges)]):
index = c.hex
if first_parent:
if next_commit and next_commit.hex != c.hex:
continue
if len(c.parents) == 0:
next_commit = None
elif len(c.parents) >= 1:
next_commit = c.parents[0]
else:
continue
if index not in commits_index_list:
commits_index_list.append(index)
commits_dict[index] = c
if length and len(commits_index_list) >= length:
break
if skip:
commits_index_list = commits_index_list[skip:]
return [format_commit(commits_dict[i], repository)
for i in commits_index_list] | ValueError | dataset/ETHPy150Open douban/ellen/ellen/git/rev_list.py/rev_list |
6,103 | def _check_path(tree, path):
try:
entry = tree[path]
except __HOLE__:
return None
return entry | KeyError | dataset/ETHPy150Open douban/ellen/ellen/git/rev_list.py/_check_path |
6,104 | def __getattr__(self, item):
try:
return getattr(self.data, item)
except __HOLE__:
raise AttributeError(("'MockSeries' object "
"has no attribute '{attr}'".
format(attr=item))) | AttributeError | dataset/ETHPy150Open scipy/scipy/scipy/fftpack/tests/test_basic.py/_TestRFFTBase.MockSeries.__getattr__ |
6,105 | def test_complex(self):
if np.dtype(np.longcomplex).itemsize == np.dtype(complex).itemsize:
# longdouble == double; so fft is supported
return
x = np.random.randn(10).astype(np.longdouble) + \
1j * np.random.randn(10).astype(np.longdouble)
for f in [fft, ifft]:
try:
f(x)
raise AssertionError("Type %r not supported but does not fail" %
np.longcomplex)
except __HOLE__:
pass | ValueError | dataset/ETHPy150Open scipy/scipy/scipy/fftpack/tests/test_basic.py/TestLongDoubleFailure.test_complex |
6,106 | def test_real(self):
if np.dtype(np.longdouble).itemsize == np.dtype(np.double).itemsize:
# longdouble == double; so fft is supported
return
x = np.random.randn(10).astype(np.longcomplex)
for f in [fft, ifft]:
try:
f(x)
raise AssertionError("Type %r not supported but does not fail" %
np.longcomplex)
except __HOLE__:
pass | ValueError | dataset/ETHPy150Open scipy/scipy/scipy/fftpack/tests/test_basic.py/TestLongDoubleFailure.test_real |
6,107 | def extract_header(msg_or_header):
"""Given a message or header, return the header."""
if not msg_or_header:
return {}
try:
# See if msg_or_header is the entire message.
h = msg_or_header['header']
except __HOLE__:
try:
# See if msg_or_header is just the header
h = msg_or_header['msg_id']
except KeyError:
raise
else:
h = msg_or_header
if not isinstance(h, dict):
h = dict(h)
return h | KeyError | dataset/ETHPy150Open jupyter/jupyter_client/jupyter_client/session.py/extract_header |
6,108 | def _signature_scheme_changed(self, name, old, new):
if not new.startswith('hmac-'):
raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
hash_name = new.split('-', 1)[1]
try:
self.digest_mod = getattr(hashlib, hash_name)
except __HOLE__:
raise TraitError("hashlib has no such attribute: %s" % hash_name)
self._new_auth() | AttributeError | dataset/ETHPy150Open jupyter/jupyter_client/jupyter_client/session.py/Session._signature_scheme_changed |
6,109 | def _id_from_session(self, session):
id_from_session = self.middleware.id_from_session
try:
return id_from_session(session)
except __HOLE__ as e:
raise IdentificationError(e) | AttributeError | dataset/ETHPy150Open MontmereLimited/django-lean/django_lean/lean_analytics/kissmetrics.py/KissMetrics._id_from_session |
6,110 | def _id_from_user(self, user):
id_from_user = self.middleware.id_from_user
try:
return id_from_user(user)
except __HOLE__ as e:
raise IdentificationError(e) | AttributeError | dataset/ETHPy150Open MontmereLimited/django-lean/django_lean/lean_analytics/kissmetrics.py/KissMetrics._id_from_user |
6,111 | @cached_property
def domain(self):
try:
domain = self._config.get("domain")
except __HOLE__:
raise exceptions.SitesNotConfigured("site has wrong configuration")
else:
return domain | AttributeError | dataset/ETHPy150Open niwinz/django-sites/django_sites/base.py/Site.domain |
6,112 | @cached_property
def name(self):
try:
name = self._config.get("name")
except __HOLE__:
raise exceptions.SitesNotConfigured("site has wrong configuration")
else:
return name | AttributeError | dataset/ETHPy150Open niwinz/django-sites/django_sites/base.py/Site.name |
6,113 | def get_site_from_settings():
"""
Get site instance from settings
configuration.
"""
sites = _get_sites_config()
try:
current_site_id = getattr(settings, "SITE_ID")
except __HOLE__:
raise exceptions.SitesNotConfigured()
if current_site_id not in sites:
raise exceptions.SitesNotConfigured()
return Site(sites[current_site_id]) | AttributeError | dataset/ETHPy150Open niwinz/django-sites/django_sites/base.py/get_site_from_settings |
6,114 | def get_by_id(id):
"""
Get site instance from settings configuration.
"""
sites = _get_sites_config()
try:
return Site(sites[id])
except __HOLE__:
raise exceptions.SiteNotFound("Site with id '{}' not found".format(id)) | KeyError | dataset/ETHPy150Open niwinz/django-sites/django_sites/base.py/get_by_id |
6,115 | @open_file(1,mode='w')
def write_yaml(G, path, encoding='UTF-8', **kwds):
"""Write graph G in YAML format to path.
YAML is a data serialization format designed for human readability
and interaction with scripting languages [1]_.
Parameters
----------
G : graph
A NetworkX graph
path : file or string
File or filename to write.
Filenames ending in .gz or .bz2 will be compressed.
encoding: string, optional
Specify which encoding to use when writing file.
Examples
--------
>>> G=nx.path_graph(4)
>>> nx.write_yaml(G,'test.yaml')
References
----------
.. [1] http://www.yaml.org
"""
try:
import yaml
except __HOLE__:
raise ImportError("write_yaml() requires PyYAML: http://pyyaml.org/")
yaml.dump(G, path, **kwds) | ImportError | dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/nx_yaml.py/write_yaml |
6,116 | @open_file(0,mode='r')
def read_yaml(path):
"""Read graph in YAML format from path.
YAML is a data serialization format designed for human readability
and interaction with scripting languages [1]_.
Parameters
----------
path : file or string
File or filename to read. Filenames ending in .gz or .bz2
will be uncompressed.
Returns
-------
G : NetworkX graph
Examples
--------
>>> G=nx.path_graph(4)
>>> nx.write_yaml(G,'test.yaml')
>>> G=nx.read_yaml('test.yaml')
References
----------
.. [1] http://www.yaml.org
"""
try:
import yaml
except __HOLE__:
raise ImportError("read_yaml() requires PyYAML: http://pyyaml.org/")
G=yaml.load(path)
return G
# fixture for nose tests | ImportError | dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/nx_yaml.py/read_yaml |
6,117 | def clean(self, value):
from .us_states import STATES_NORMALIZED
super(USStateField, self).clean(value)
if value in EMPTY_VALUES:
return ''
try:
value = value.strip().lower()
except __HOLE__:
pass
else:
try:
return STATES_NORMALIZED[value.strip().lower()]
except KeyError:
pass
raise ValidationError(self.error_messages['invalid']) | AttributeError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/localflavor/us/forms.py/USStateField.clean |
6,118 | def get_settings(self):
"""Retrieve existing configurations, or create the config file if it does not exist."""
try:
with open(self.file_path, "r") as f:
blob = json.load(f)
except FileNotFoundError:
self.logger.debug("No configuration file. Creating new one at {}."
.format(self.file_path))
with open(self.file_path, "w") as f:
json.dump({}, f)
blob = {}
except __HOLE__:
overwrite = input("Configuration file is formatted incorrectly. Erase and create a new"
" one? [y/n] ")
if overwrite.lower() == "y":
self.logger.info("Overwrote incorrectly-formatted configuration file.")
with open(self.file_path, "w") as f:
json.dump({}, f)
blob = {}
else:
return None
return blob | ValueError | dataset/ETHPy150Open molly/GorillaBot/gorillabot/configure.py/Configurator.get_settings |
6,119 | def delete(self):
"""Delete a configuration."""
settings = self.get_settings()
name = input("Please choose an existing configuration: ")
try:
del settings[name]
self.save_config(settings)
except __HOLE__:
print("No configuration named {}.".format(name)) | KeyError | dataset/ETHPy150Open molly/GorillaBot/gorillabot/configure.py/Configurator.delete |
6,120 | def _get_func_name(func):
"""returns a function's name"""
try:
return func.func_name
except __HOLE__:
return func.__name__ | AttributeError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/_get_func_name |
6,121 | def __getattr__(self, attr):
# custom node types add additional methods to the _additional_attrs_ dict
# that are returned here.
# They're not simply added into the class's __dict__ since it's an extension type
# and so the type itself is immuatable.
try:
value = self._additional_attrs_[attr]
try:
return value.__get__(self)
except AttributeError:
pass
return value
except __HOLE__:
raise AttributeError(attr)
#
# Hook in arithmetic operators. (These are added to _additional_attrs_ in nodetypes.py)
# NB: Cython arithmetic operator methods behave differently; there are no __r*__ variants.
# See: http://docs.cython.org/src/userguide/special_methods.html#arithmetic-methods
# | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.__getattr__ |
6,122 | @staticmethod
def _op(op_name, lhs, rhs=None):
"""
Implements a unary and binary operators.
NB: lhs must always be an MDFNode.
"""
try:
binop = lhs._additional_attrs_[op_name]
except __HOLE__:
raise AttributeError
return binop.__get__(lhs)(rhs) | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode._op |
6,123 | def _get_state(self, ctx):
"""returns the NodeState object for this node and context"""
try:
return self._states[ctx._id_obj]
except __HOLE__:
pass
# otherwise create a new state for this context and return it
state = self._states[ctx._id_obj] = NodeState(ctx._id_obj, self._default_dirty_flags_)
return state | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode._get_state |
6,124 | def get_state(self, ctx):
"""
Return the NodeState object for this node and context.
This is for debug purposes only. The NodeState object returned
is immutable and the members are not accessible outside of this
module intentionally. It can however be printed if debug information
about the state of the node in a context is required.
"""
# public API version of _get_state
# doesn't create a state if one doesn't already exist.
try:
return self._states[ctx._id_obj]
except __HOLE__:
return None | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.get_state |
6,125 | def clear(self, ctx):
"""
clears any cached state for a context
This clears dependencies as well as values and should usually only
be called when a context is deleted.
"""
try:
del self._states[ctx._id_obj]
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.clear |
6,126 | def depends_on(self, ctx, other_node, other_ctx):
node_state = cython.declare(NodeState)
try:
node_state = self._states[ctx._id_obj]
except __HOLE__:
# if there's no node state, it can't depend on any other node
return False
return self._depends_on(node_state, other_node, other_ctx._id_obj) | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.depends_on |
6,127 | def _depends_on(self, node_state, other, other_ctx_id):
"""
returns True of other appears in this nodes dependency
graph either directly or indirectly.
Nodes are dependent on themselves for the purpose of this
function.
read as: self[node_state.ctx] depends on other[other_ctx]
"""
try:
return node_state.depends_on_cache[(other, other_ctx_id)]
except KeyError:
pass
# do a breadth first search of the graph to find any callee
# that matches the other node
remaining_callees = cqueue()
cqueue_push(remaining_callees, (node_state.ctx_id, self))
seen = set()
callee = cython.declare(MDFNode)
callee_state = cython.declare(NodeState)
ctx_id = cython.declare(int)
while cqueue_len(remaining_callees) > 0:
ctx_id, callee = remaining_callees.popleft()
if (ctx_id, callee) in seen:
continue
seen.add((ctx_id, callee))
if ctx_id == other_ctx_id and callee is other:
node_state.depends_on_cache[(other, other_ctx_id)] = True
return True
try:
callee_state = callee._states[ctx_id]
except __HOLE__:
continue
# add the callees of this node to the search
for ctx_id, callees in callee_state.callees.iteritems():
for callee in callees:
cqueue_push(remaining_callees, (ctx_id, callee))
node_state.depends_on_cache[(other, other_ctx_id)] = False
return False | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode._depends_on |
6,128 | def _set_dirty(self, node_state, flags, _depth):
"""
Updates the dirty flags on this node.
Marking a node as dirty automatically marks all nodes
dependent on this node as dirty.
"""
node = cython.declare(MDFNode)
caller = cython.declare(MDFNode)
obj = cython.declare(NodeSetDirtyState)
to_process = cython.declare(cqueue)
# start off with a reasonable amount of space and just one entry
to_process = node_state.set_dirty_queue
cqueue_clear(to_process)
# NodeSetDirtyState objects are used instead of a tuple to avoid having to
# keep converting native types to python types
cqueue_push(to_process, create_NodeSetDirtyState(_depth, node_state, self, flags))
depth = cython.declare(int, _depth)
while cqueue_len(to_process) > 0:
# pop the current item of the left of the queue
obj = cqueue_popleft(to_process)
depth, node_state, node, flags = obj.depth, obj.node_state, obj.node, obj.flags
# if propagating dirty flags from one node to the calling nodes
# mask the propagated dirty flags by the calling node's mask
# as it may want to ignore some flags (this also stops them being
# propagated further up the graph).
if depth > 0:
flags = flags & node._dirty_flags_propagate_mask
# do nothing if all the dirty flags are already set
if (node_state.dirty_flags & flags) == flags:
if _trace_enabled:
ctx = _get_context(node_state.ctx_id)
_logger.info("%s%s[%s] already dirty (%s)" % (
("-" * depth) + "> " if depth else "",
node.name,
ctx,
DIRTY_FLAGS.to_string(node_state.dirty_flags)))
continue
if _trace_enabled:
ctx = _get_context(node_state.ctx_id)
_logger.info("%s%s[%s] marked dirty (%s)" % (
("-" * depth) + "> " if depth else "",
node.name,
ctx,
DIRTY_FLAGS.to_string(flags)))
# update the dirty flag
node_state.dirty_flags |= flags
if node._has_on_dirty_callback:
ctx = _get_context(node_state.ctx_id)
node.on_set_dirty(ctx, flags)
# remove any cached value as it will have to be re-calculated
if flags & ~DIRTY_FLAGS_TIME:
node_state.has_value = False
node_state.value = None
# add this node's callers to the list to process
for ctx_id, callers in node_state.callers.iteritems():
for caller in callers:
try:
caller_state = caller._states[ctx_id]
cqueue_push(to_process, create_NodeSetDirtyState(depth + 1, caller_state, caller, flags))
except __HOLE__:
continue | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode._set_dirty |
6,129 | def _touch(self, node_state, flags=DIRTY_FLAGS_ALL, _quiet=False, _depth=0):
"""
Mark this node as not dirty and all calling nodes as dirty.
All shifted contexts that also share this node are also touched.
If _quiet is True only this node is touched (in ctx and possibly
in any shifted contexts) and but nodes dependent on this one
are not dirtied.
"""
if _trace_enabled:
ctx = _get_context(node_state.ctx_id)
_logger.info("%s[%s] touched (%s)" % (
self.name,
ctx,
DIRTY_FLAGS.to_string(flags)))
# clear the flags
node_state.dirty_flags &= ~flags
if not _quiet:
# mark any calling nodes as dirty
caller = cython.declare(MDFNode)
caller_state = cython.declare(NodeState)
for ctx_id, callers in node_state.callers.iteritems():
for caller in callers:
try:
caller_state = caller._states[ctx_id]
caller._set_dirty(caller_state, flags, _depth+1)
except __HOLE__:
continue
# thread_id is passed in to avoid fetching it again later if this has to get another node value | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode._touch |
6,130 | def has_value(self, ctx):
"""
returns True if a cached value exists in this context.
"""
node_state = cython.declare(NodeState)
try:
node_state = self._states[ctx._id_obj]
return node_state.has_value
except __HOLE__:
return False | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.has_value |
6,131 | def was_called(self, ctx):
"""
returns True if this node has ever been called in this context
"""
node_state = cython.declare(NodeState)
try:
node_state = self._states[ctx._id_obj]
return node_state.called
except __HOLE__:
return False | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.was_called |
6,132 | def set_override(self, ctx, override_node):
"""
Sets an override node for this node in this context.
Once an override is set this node delegates its evaluation
to the override_node whenever its evaluated in ``ctx``.
"""
shift_set = ctx.get_shift_set()
if shift_set and override_node is not shift_set.get(self, None):
raise Exception("Nodes can only be overriden in the root context, "
"or if the context is shifted by the node being overriden")
# get the root context the override is being applied to
root_ctx = cython.declare(MDFContext)
root_ctx = ctx.get_parent() or ctx
if self in shift_set:
# the the context only shifted by this node (ctx could be shifted by other nodes as well)
root_ctx = root_ctx.shift({self : override_node})
# early out if the context is already shifted by override_node
node_state = cython.declare(NodeState)
node_state = self._get_state(root_ctx)
if node_state.override is override_node:
return
# set the override node to be used by get_value
node_state.override = override_node
# reset the state for the root context and all shifts of it
all_contexts = set([root_ctx])
if root_ctx.get_parent() is not None:
all_contexts.update(root_ctx.get_parent().get_shifted_contexts())
for ctx in all_contexts:
if ctx is root_ctx \
or ctx.is_shift_of(root_ctx):
# let any dependencies know the value of this node is invalid
self.set_dirty(ctx, DIRTY_FLAGS_ALL)
# clear any cached dependencies as they've changed
self._clear_dependency_cache(ctx)
try:
node_state = self._states[ctx._id_obj]
node_state.alt_context = None
node_state.prev_alt_context = None
node_state.override_cache = None
node_state.callees.clear()
node_state.add_dependency_cache.clear()
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode.set_override |
6,133 | def _get_override(self, ctx, node_state):
# if called for this context previously return the cached result
if node_state.override_cache is not None:
if node_state.override_cache is self:
return None
return node_state.override_cache
# if there's an override set for this node in this context return that
if node_state.override is not None:
node_state.override_cache = node_state.override
return node_state.override
# find the most shifted context with an override set between ctx
# and the root context
parent = cython.declare(MDFContext)
parent = ctx.get_parent()
if parent is None:
node_state.override_cache = self
return None
best_match_num_shifts = cython.declare(int)
best_match_num_shifts = -1
best_match = cython.declare(MDFContext)
best_match = None
shifted_ctx = cython.declare(MDFContext)
shifted_node_state = cython.declare(NodeState)
for shifted_ctx in itertools.chain([parent], parent.get_shifted_contexts()):
try:
shifted_node_state = self._states[shifted_ctx._id_obj]
except __HOLE__:
continue
if shifted_node_state.override is not None \
and (shifted_ctx is ctx or ctx.is_shift_of(shifted_ctx)):
# shifted_ctx is a shifted version of ctx and has an
# override set. If it's more shifted than any previously
# encountered use the shift from this context
num_shifts = len(shifted_ctx.get_shift_set())
if num_shifts == best_match_num_shifts:
# if two equally shifted contexts both have an override set for
# this context then there's no way to sensibly decide which
# override to use. This won't usually happen as overrides are
# either set on the root context or as a result of shifting.
raise Exception("Ambiguous override found for %s: %s vs. %s" % (self.name,
best_match,
shifted_ctx))
if num_shifts > best_match_num_shifts:
best_match = shifted_ctx
best_match_num_shifts = num_shifts
# early out if this context is the main context
# since there can't be a candidate that ctx is a
# shift of and is more shifted
if shifted_ctx is ctx:
break
# if no contexts with an override were found return None
if best_match is None:
node_state.override_cache = self
return None
shifted_node_state = self._states[best_match._id_obj]
node_state.override_cache = shifted_node_state.override
return shifted_node_state.override | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFNode._get_override |
6,134 | def _get_value(self, ctx, node_state):
try:
return self._get_cached_value_and_date(ctx, node_state)[0]
except __HOLE__:
if self._default_value is self._no_default_value_:
raise
return self._default_value | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFVarNode._get_value |
6,135 | def _get_value(self, ctx, node_state):
# if there's a timestep func and nothing's changed apart from the
# date look for a previous value and call the timestep func
dirty_flags = node_state.dirty_flags
if self._is_generator \
and dirty_flags == DIRTY_FLAGS_TIME \
and node_state.generator is not None:
# if this node has been valued already for this context
# check the date and see if it can be updated from that
try:
# don't unpack as a tuple as the produces slighly more complicated cython code
tmp = self._get_cached_value_and_date(ctx, node_state)
prev_value = tmp[0]
prev_date = tmp[1]
except __HOLE__:
prev_value, prev_date = None, None
if prev_date is not None:
date_cmp = cython.declare(int)
date_cmp = 0 if prev_date == ctx._now else (-1 if prev_date < ctx._now else 1)
if date_cmp == 0: # prev_date == ctx._now
self._touch(node_state, DIRTY_FLAGS_ALL, True)
return prev_value
if date_cmp < 0: # prev_date < ctx._now
# if a filter's set check if the previous value can be re-used
if self._filter_func is not None:
if _profiling_enabled:
with ctx._profile(self) as timer:
needs_update = self._filter_func()
else:
needs_update = self._filter_func()
if not needs_update:
# re-use the previous value
if _trace_enabled:
_logger.debug("Re-using previous value of %s[%s]" % (self.name, ctx))
self._touch(node_state, DIRTY_FLAGS_ALL, True)
return prev_value
# call the timestep function with or without the context
if _trace_enabled:
_logger.debug("Evaluating next value of %s[%s]" % (self.name, ctx))
if _profiling_enabled:
with ctx._profile(self):
new_value = next(node_state.generator)
else:
new_value = next(node_state.generator)
return new_value
# if still dirty call func to get the new value
if _trace_enabled:
_logger.debug("Evaluating %s[%s] (%s)" % (self.name,
ctx,
DIRTY_FLAGS.to_string(dirty_flags)))
# call the function, set the value and return it
if _profiling_enabled:
with ctx._profile(self) as timer:
value = self._func()
else:
value = self._func()
if self._is_generator:
gen = value
# evaluate the generator
if _profiling_enabled:
with ctx._profile(self) as timer:
value = next(gen)
else:
value = next(gen)
node_state.generator = iter(gen)
# if a filter's set call it just to make sure any dependencies it requires
# are set up correctly to avoid conditional dependency errors later
if self._filter_func is not None:
if _profiling_enabled:
with ctx._profile(self) as timer:
self._filter_func()
else:
self._filter_func()
return value | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFEvalNode._get_value |
6,136 | def _get_alt_context(self, ctx):
"""
returns the context values for this node[ctx] actually
belong in. This can be different from ctx if its a
shifted context but this node doesn't depend on that shift.
"""
ctx_shift_set = ctx.get_shift_set()
# if the context hasn't been shifted then the context to use
# is trivially this context
if not ctx_shift_set:
return ctx
# if the node's been overriden get the alt context from the override
node_state = self._get_state(ctx)
override_node = self._get_override(ctx, node_state)
if override_node and override_node is not self:
override_alt_ctx = cython.declare(MDFContext)
override_alt_ctx = override_node.get_alt_context(ctx)
return override_alt_ctx.shift({self : override_node})
# cython forward declarations
parent = cython.declare(MDFContext)
shifted_ctx = cython.declare(MDFContext)
best_match = cython.declare(MDFContext)
shifted_node_state = cython.declare(NodeState)
num_shifts = cython.declare(int)
best_match_num_shifts = cython.declare(int)
shifted_node = cython.declare(MDFNode)
# find the most shifted context where this node has been called
# and where this context is a shift of that shifted context
parent = ctx.get_parent() or ctx
best_match = None
best_match_num_shifts = -1
for shifted_ctx in itertools.chain([parent], parent.iter_shifted_contexts()):
try:
shifted_node_state = self._states[shifted_ctx._id_obj]
except __HOLE__:
continue
if shifted_node_state.called:
if shifted_ctx is ctx or ctx.is_shift_of(shifted_ctx):
# ctx is a shift of shifted_ctx so use the dependencies
# from this context to determine the correct alt context
num_shifts = len(shifted_ctx.get_shift_set())
if num_shifts > best_match_num_shifts:
best_match = shifted_ctx
best_match_num_shifts = num_shifts
# early out if this context is the main context
# since there can't be a candidate that ctx is a
# shift of and is more shifted
if shifted_ctx is ctx:
break
# if it's never been called before in any related context, use this context
if best_match is None:
return ctx
# get all the contexts that are shifts of the original context (or are the ctx)
# or that original context is a shift of. i.e. it's a list of all contexts from the root
# to ctx plus any shifts of ctx.
#
# NOTE this is *not* all the shifts and parents of best_match as that could be a much
# wider set and could include contexts that are actually not shifts or parents of
# the original context we're trying to get the value in.
all_shifted_ctxs = cqueue()
cqueue_push(all_shifted_ctxs, parent)
for shifted_ctx in parent.iter_shifted_contexts():
if shifted_ctx is ctx \
or shifted_ctx.is_shift_of(ctx) \
or ctx.is_shift_of(shifted_ctx):
cqueue_push(all_shifted_ctxs, shifted_ctx)
# sort so the least shifted are at the start. This should be more optimal
# for the next loop than if they were in a random order.
def get_shift_degree(x):
return len(x.get_shift_set())
cqueue_sort(all_shifted_ctxs, get_shift_degree, False)
best_match_state = cython.declare(NodeState)
best_match_state = self._get_state(best_match)
shift_set = cython.declare(dict)
shift_set = {}
for shifted_node, shifted_value in ctx_shift_set.iteritems():
# if the context is shifted by self then include self in the shift set
if shifted_node is self:
shift_set[shifted_node] = shifted_value
continue
# if self[best_match] depends on shifted_node[best_match] or
# on shifted_node[shifted_ctx] where shifted_ctx is any shift
# of the original ctx then we have to include the shifted node in the
# shift set
for shifted_ctx in all_shifted_ctxs:
if self._depends_on(best_match_state, shifted_node, shifted_ctx._id_obj):
shift_set[shifted_node] = shifted_value
break
return parent.shift(shift_set) | KeyError | dataset/ETHPy150Open manahl/mdf/mdf/nodes.py/MDFEvalNode._get_alt_context |
6,137 | def integer(value):
try:
return int(value)
except __HOLE__:
return long(value) # why does this help? (CM)
except OverflowError:
return long(value) | ValueError | dataset/ETHPy150Open XiaoMi/minos/supervisor/supervisor/datatypes.py/integer |
6,138 | def inet_address(s):
# returns (host, port) tuple
host = ''
port = None
if ":" in s:
host, s = s.split(":", 1)
if not s:
raise ValueError("no port number specified in %r" % s)
port = port_number(s)
host = host.lower()
else:
try:
port = port_number(s)
except __HOLE__:
raise ValueError("not a valid port number: %r " %s)
if not host or host == '*':
host = DEFAULT_HOST
return host, port | ValueError | dataset/ETHPy150Open XiaoMi/minos/supervisor/supervisor/datatypes.py/inet_address |
6,139 | def octal_type(arg):
try:
return int(arg, 8)
except __HOLE__:
raise ValueError('%s is not convertable to an octal type' % arg) | TypeError | dataset/ETHPy150Open XiaoMi/minos/supervisor/supervisor/datatypes.py/octal_type |
6,140 | def name_to_uid(name):
if name is None:
return None
try:
uid = int(name)
except ValueError:
try:
pwrec = pwd.getpwnam(name)
except KeyError:
return None
uid = pwrec[2]
else:
try:
pwrec = pwd.getpwuid(uid)
except __HOLE__:
return None
return uid | KeyError | dataset/ETHPy150Open XiaoMi/minos/supervisor/supervisor/datatypes.py/name_to_uid |
6,141 | def name_to_gid(name):
try:
gid = int(name)
except __HOLE__:
try:
pwrec = grp.getgrnam(name)
except KeyError:
return None
gid = pwrec[2]
else:
try:
pwrec = grp.getgrgid(gid)
except KeyError:
return None
return gid | ValueError | dataset/ETHPy150Open XiaoMi/minos/supervisor/supervisor/datatypes.py/name_to_gid |
6,142 | def signal_number(value):
result = None
try:
result = int(value)
except (ValueError, TypeError):
result = getattr(signal, 'SIG'+value, None)
try:
result = int(result)
return result
except (ValueError, __HOLE__):
raise ValueError('value %s is not a signal name/number' % value) | TypeError | dataset/ETHPy150Open XiaoMi/minos/supervisor/supervisor/datatypes.py/signal_number |
6,143 | def try_import(module_name):
try:
return importlib.import_module(module_name)
except __HOLE__ as e:
return _ModulePlaceholder(TryImportError(e)) | ImportError | dataset/ETHPy150Open alexmojaki/funcfinder/funcfinder/utils.py/try_import |
6,144 | def assertDeepEqualIters(a, b):
"""
Assert that nested iterables have equal elements in order, regardless of iterable type
like assertEqualIters, but arbitrarily deep and regardless of structure.
For example,
["ab", "cd", "ef", "gh", 1, [[[2]]]]
is not equal to
["ab", ["c", "d"], ("e", "f"), (c for c in "gh"), 1, [[[2]]]]
not even according to assertEqualIters, but according to this function they are.
"""
try:
iter(a)
except __HOLE__:
assert a == b
else:
# Avoid infinite recursion for single character strings
if isinstance(a, basestring) and len(a) == 1:
assert a == b
return
for i1, i2 in izip_longest(a, b, fillvalue=object()):
assertDeepEqualIters(i1, i2) | TypeError | dataset/ETHPy150Open alexmojaki/funcfinder/funcfinder/utils.py/assertDeepEqualIters |
6,145 | def popd(self):
try:
path = self._dirstack.pop()
except __HOLE__:
return None
else:
os.chdir(path)
return path | IndexError | dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/PosixServer.py/PosixAgent.popd |
6,146 | def poll(self, pid):
"""Poll for async process.
Returns exitstatus if done, -ENOENT if no such pid is managed, or -EAGAIN
if pid is still running.
"""
try:
sts = self._status[pid]
except __HOLE__:
return -errno.ENOENT
if sts is None:
return -errno.EAGAIN
else: # finished
del self._status[pid]
return sts | KeyError | dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/PosixServer.py/PosixAgent.poll |
6,147 | def kill(self, pid):
"""Kills a process that was started by spawn."""
logging.msg("kill", str(pid))
try:
sts = self._status.pop(pid)
except __HOLE__:
return -errno.ENOENT
else:
if sts is None: # a running process
pm = proctools.get_procmanager()
proc = pm.getbypid(pid)
proc.kill()
proc.wait()
sts = self.poll(pid)
return sts
else: # already exited
return sts | KeyError | dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/PosixServer.py/PosixAgent.kill |
6,148 | def instance():
import sys
try:
import dbus
return NotifyDbus()
except __HOLE__:
sys.stderr.write("python-dbus not installed. try:"
"`sudo pip install python-dbus`.")
if whereis_exe('notify-send'):
return NotifySendNotification()
sys.stderr.write("notify-send not found.")
return Notification() | ImportError | dataset/ETHPy150Open kivy/plyer/plyer/platforms/linux/notification.py/instance |
6,149 | def elementStream():
""" Preferred method to construct an ElementStream
Uses Expat-based stream if available, and falls back to Sux if necessary.
"""
try:
es = ExpatElementStream()
return es
except __HOLE__:
if SuxElementStream is None:
raise Exception("No parsers available :(")
es = SuxElementStream()
return es | ImportError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/words/xish/domish.py/elementStream |
6,150 | def perspective_persist(self, profile='jcli-prod'):
path = '%s/%s.smppccs' % (self.config.store_path, profile)
self.log.info('Persisting current configuration to [%s] profile in %s', profile, path)
try:
# Prepare connectors for persistence
# Will persist config and service status only
connectors = []
for c in self.connectors:
connectors.append({
'id': c['id'],
'config': c['config'],
'service_status':c['service'].running})
# Write configuration with datetime stamp
fh = open(path, 'w')
fh.write('Persisted on %s [Jasmin %s]\n' % (time.strftime("%c"), jasmin.get_release()))
fh.write(pickle.dumps(connectors, self.pickleProtocol))
fh.close()
# Set persistance state to True
self.persisted = True
except __HOLE__:
self.log.error('Cannot persist to %s', path)
return False
except Exception, e:
self.log.error('Unknown error occurred while persisting configuration: %s', e)
return False
return True | IOError | dataset/ETHPy150Open jookies/jasmin/jasmin/managers/clients.py/SMPPClientManagerPB.perspective_persist |
6,151 | @defer.inlineCallbacks
def perspective_load(self, profile='jcli-prod'):
path = '%s/%s.smppccs' % (self.config.store_path, profile)
self.log.info('Loading/Activating [%s] profile configuration from %s', profile, path)
try:
# Load configuration from file
fh = open(path, 'r')
lines = fh.readlines()
fh.close()
# Init migrator
cf = ConfigurationMigrator(context='smppccs', header=lines[0], data=''.join(lines[1:]))
# Remove current configuration
CIDs = []
for c in self.connectors:
CIDs.append(c['id'])
for cid in CIDs:
remRet = yield self.perspective_connector_remove(cid)
if not remRet:
raise ConfigProfileLoadingError('Error removing connector %s' % cid)
self.log.info('Removed connector [%s]', cid)
# Apply configuration
loadedConnectors = cf.getMigratedData()
for loadedConnector in loadedConnectors:
# Add connector
addRet = yield self.perspective_connector_add(
pickle.dumps(loadedConnector['config'],
self.pickleProtocol))
if not addRet:
raise ConfigProfileLoadingError('Error adding connector %s' % loadedConnector['id'])
# Start it if it's service where started when persisted
if loadedConnector['service_status'] == 1:
startRet = yield self.perspective_connector_start(loadedConnector['id'])
if not startRet:
self.log.error('Error starting connector %s', loadedConnector['id'])
# Set persistance state to True
self.persisted = True
except __HOLE__, e:
self.log.error('Cannot load configuration from %s: %s', path, str(e))
defer.returnValue(False)
except ConfigProfileLoadingError, e:
self.log.error('Error while loading configuration: %s', e)
defer.returnValue(False)
except Exception, e:
self.log.error('Unknown error occurred while loading configuration: %s', e)
defer.returnValue(False)
defer.returnValue(True) | IOError | dataset/ETHPy150Open jookies/jasmin/jasmin/managers/clients.py/SMPPClientManagerPB.perspective_load |
6,152 | def render(self, data, template=None):
if not isinstance(data, (list, tuple)):
data = [data]
if len(data) == 0:
return
try:
kwargs = {}
if self.app.pargs.format_json:
kwargs['sort_keys'] = True
kwargs['indent'] = 4
json_data = [row.serialize() for row in data]
json.dump(
json_data,
sys.stdout,
default=json_util.default,
**kwargs
)
except IOError:
# Handle pipes that could close before output is done.
# See http://stackoverflow.com/questions/15793886/
try:
sys.stdout.close()
except __HOLE__:
pass
try:
sys.stderr.close()
except IOError:
pass | IOError | dataset/ETHPy150Open newsdev/nyt-clerk/clerk/ext_json.py/ElexJSONOutputHandler.render |
6,153 | def __next__(self):
try:
return self.seq[self.pos + 1]
except __HOLE__:
return None | IndexError | dataset/ETHPy150Open mne-tools/mne-python/mne/externals/tempita/_looper.py/loop_pos.__next__ |
6,154 | def get_format_fn(self):
NO_DATA_TEXT = '--'
CANT_CALCULATE_TEXT = '?'
class NoData(Exception):
pass
class BadData(Exception):
pass
def trap_errors(fn):
def inner(*args, **kwargs):
try:
return fn(*args, **kwargs)
except BadData:
return CANT_CALCULATE_TEXT
except NoData:
return NO_DATA_TEXT
return inner
def _raw(data):
if data['denom']:
try:
return round(float(data['num']) / float(data['denom']), 3)
except (__HOLE__, TypeError):
raise BadData()
else:
raise NoData()
def _raw_pct(data, round_type=float):
return round_type(_raw(data) * 100)
@trap_errors
def _clean_raw(data):
return _raw(data)
@trap_errors
def _numeric_pct(data):
return _raw_pct(data, round_type=int)
@trap_errors
def _pct(data):
return '{0:.0f}%'.format(_raw_pct(data))
_fraction = lambda data: '{num}/{denom}'.format(**data)
return {
'percent': _pct,
'fraction': _fraction,
'both': lambda data: '{} ({})'.format(_pct(data), _fraction(data)),
'numeric_percent': _numeric_pct,
'decimal': _clean_raw,
}[self.format] | ValueError | dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/userreports/reports/specs.py/PercentageColumn.get_format_fn |
6,155 | def GetTasks():
"""Pull next set of tasks off the queue."""
try:
tasks = task_api.tasks().lease(leaseSecs=settings.LEASE_TIME_SEC,
taskqueue=settings.QUEUE_NAME,
project='s~'+settings.PROJECT_ID,
numTasks=settings.NUM_TASKS_TO_LEASE)
tasks = tasks.execute().get('items', [])
if tasks:
# Update Stats.
STATS['numTasksProcessing'] += len(tasks)
STATS['lastLeasedDate'] = datetime.now().strftime(DATETIME_STRSAFE)
return TASK_PASS, tasks
except HttpError, http_error:
logging.error('HttpError %s: \'%s\'',
http_error.resp.status,
http_error.resp.reason)
# Check for Rate-limit error.
if (http_error.resp.status == 403 and
(http_error.resp.reason == 'rateLimitExceeded' or
http_error.resp.reason == 'Rate Limit Exceeded' or
http_error.resp.reason == 'userRateLimitExceeded' or
http_error.resp.reason == 'User Rate Limit Exceeded')):
return TASK_RATE_LIMITTED, None
else:
return TASK_FAIL, None
except BadStatusLine, e:
logging.error('BadStatusLine while trying to lease tasks: %s', e)
return TASK_FAIL, None
except socket.error, e:
logging.error('Socket error: %s', e)
return TASK_FAIL, None
except __HOLE__, e:
logging.error('IO error: %s', e)
return TASK_FAIL, None
except Exception, e:
logging.error('Exception %s/%s', type(e), e)
return TASK_FAIL, None | IOError | dataset/ETHPy150Open GoogleCloudPlatform/solutions-orchestrator-android-sampleapp-smashpix/daemon/compute_engine_daemon.py/GetTasks |
6,156 | def DoTask(task):
"""Load, process and upload task image and return processed image link."""
payload = json.loads(base64.b64decode(task['payloadBase64']))
# Load Image from URL
url = payload['image_link']
# Save image temporarily for upload with timestamp as name
filename = '%s_%s' % (url[url.rfind('/')+1:],
datetime.strftime(datetime.now(),
'%Y_%M_%d_%H_%M_%S_%s'))
filepath = os.path.join('/tmp', filename)
# Save image to temporary file
try:
image_request = urllib2.urlopen(url)
with open(filepath, 'wb') as file_handle:
file_handle.write(image_request.read())
except urllib2.HTTPError, e:
logging.error('Error loading image link %s : %s', url, e)
return False
# Generate Bitified image from given image.
try:
processed_image = ConvertToBitifiedImage(filepath)
except IOError, e:
logging.error('Error processing image for %s : %s', url, e)
return False
filepath_processed_image = filepath+'.png'
try:
processed_image.save(filepath_processed_image)
except __HOLE__, e:
logging.error('Error saving processed image for %s : %s', url, e)
return False
# Upload processed image to bitified image cloud bucket
contents = file(filepath_processed_image, 'r')
uri = boto.storage_uri(settings.PROCESSED_IMG_BUCKET + '/' + filename,
settings.GOOGLE_STORAGE)
uri.new_key().set_contents_from_file(contents)
contents.close()
# Remove temporary image
os.remove(filepath)
logging.info('%s - Successfully created "%s/%s"\n',
datetime.now(),
uri.bucket_name,
uri.object_name)
return uri.object_name | IOError | dataset/ETHPy150Open GoogleCloudPlatform/solutions-orchestrator-android-sampleapp-smashpix/daemon/compute_engine_daemon.py/DoTask |
6,157 | def __add__(self, other):
out, i, j, u, v = [], 0, 0, self.value, other.value
try:
while True:
if u[i][0] < v[j][0]:
i += 1
out.append( ( i, u[i][1] ) )
elif u[i][0] > v[j][0]:
j += 1
out.append( ( i, v[j][1] ) )
else:
i, j = i+1, j+1
out.append( ( i, u[i][1]+v[j][1] ) )
except __HOLE__:
return Vector(tuple(out)) | IndexError | dataset/ETHPy150Open mollyproject/mollyproject/molly/wurfl/vsm.py/Vector.__add__ |
6,158 | def __sub__(self, other):
out, i, j, u, v = [], 0, 0, self.value, other.value
try:
while True:
if u[i][0] < v[j][0]:
i += 1
out.append( ( i, u[i][1] ) )
elif u[i][0] > v[j][0]:
j += 1
out.append( ( i, -v[j][1] ) )
else:
i, j = i+1, j+1
out.append( ( i, u[i][1]-v[j][1] ) )
except __HOLE__:
return Vector(tuple(out)) | IndexError | dataset/ETHPy150Open mollyproject/mollyproject/molly/wurfl/vsm.py/Vector.__sub__ |
6,159 | def __mul__(self, other):
if isinstance(other, Vector):
out, i, j, u, v = 0, 0, 0, self.value, other.value
try:
while True:
if u[i][0] < v[j][0]:
i += 1
elif u[i][0] > v[j][0]:
j += 1
else:
i, j = i+1, j+1
out += u[i][1]*v[j][1]
except __HOLE__:
return out
else:
return Vector(tuple((k,v*other) for (k,v) in self.value)) | IndexError | dataset/ETHPy150Open mollyproject/mollyproject/molly/wurfl/vsm.py/Vector.__mul__ |
6,160 | def startElement(self, name, attrs):
try:
fn = self._start_handlers[name]
except __HOLE__:
if self._error_mode == self.ERR_MODE_IGNORE:
fn = self._ignore
else:
raise SAXException('No start handler for tag {0!r}'.format(name)) # FIXME: better exception
self._tag_stack.append(name)
fn(attrs) | KeyError | dataset/ETHPy150Open cidles/graf-python/src/graf/io.py/SAXHandler.startElement |
6,161 | def endElement(self, name):
try:
fn = self._end_handlers[name]
except __HOLE__:
if self._error_mode == self.ERR_MODE_IGNORE:
fn = self._ignore
else:
raise SAXException('No end handler for tag {0!r}'.format(name)) # FIXME: better exception
fn()
assert self._tag_stack.pop() == name | KeyError | dataset/ETHPy150Open cidles/graf-python/src/graf/io.py/SAXHandler.endElement |
6,162 | def characters(self, ch):
name = self._tag_stack[-1]
try:
fn = self._char_handlers[name]
except __HOLE__:
if self._error_mode == self.ERR_MODE_IGNORE:
fn = self._ignore
else:
raise SAXException('No characters handler for tag {0!r}'.format(name)) # FIXME: better exception
fn(ch) | KeyError | dataset/ETHPy150Open cidles/graf-python/src/graf/io.py/SAXHandler.characters |
6,163 | def dependency_handle(self, attribs):
try:
type = attribs[self._g.TYPE_F_ID]
except __HOLE__:
type = attribs[self._g.TYPE]
self._parse_dependency(type, self.graph) | KeyError | dataset/ETHPy150Open cidles/graf-python/src/graf/io.py/GraphHandler.dependency_handle |
6,164 | def feature_start(self, attribs):
name = attribs.get(self._g.NAME)
try:
value = attribs.get(self._g.VALUE)
except __HOLE__:
value = ""
self._feat_name_stack.append(name)
self._fs_stack[-1][name] = value | KeyError | dataset/ETHPy150Open cidles/graf-python/src/graf/io.py/GraphHandler.feature_start |
6,165 | def send(address, subject, template_name, context):
if is_test_email(address):
return False
templateLoader = jinja2.FileSystemLoader(searchpath="totalimpactwebapp/templates")
templateEnv = jinja2.Environment(loader=templateLoader)
html_template = templateEnv.get_template(template_name + ".html")
html_to_send = html_template.render(context)
mailer = mandrill.Mandrill(os.getenv("MANDRILL_APIKEY"))
addressee = {"email": address}
try:
addressee["name"] = context["name"]
except KeyError:
pass
msg = {
"html": html_to_send,
"subject": subject,
"from_email": "[email protected]",
"from_name": "The Impactstory team",
"to": [addressee], # must be a list
"track_opens": True,
"track_clicks": True
}
try:
msg["tags"] = context["tags"]
except __HOLE__:
pass
mailer.messages.send(msg)
logger.info(u"Sent an email to " + address)
return msg | KeyError | dataset/ETHPy150Open Impactstory/total-impact-webapp/emailer.py/send |
6,166 | def extend_script_args(args, shortcut):
try:
args.append(shortcut['scriptargument'])
except __HOLE__:
pass
try:
args.extend(shortcut['scriptarguments'])
except KeyError:
pass | KeyError | dataset/ETHPy150Open ContinuumIO/menuinst/menuinst/win32.py/extend_script_args |
6,167 | def emit(self, record):
try:
msg = self.format(record)
stream = self.stream
if record.levelno in self.store_msg_loglevels:
self._store_msg(msg, record.levelno)
else:
self._write_msg(stream, msg, record)
except (__HOLE__, SystemExit):
raise
except:
self.handleError(record) | KeyboardInterrupt | dataset/ETHPy150Open KeepSafe/ks-email-parser/email_parser/utils.py/ProgressConsoleHandler.emit |
6,168 | def _expand(type):
"""
Expand an NE class name.
:type type: str
:rtype: str
"""
try:
return short2long[type]
except __HOLE__:
return type | KeyError | dataset/ETHPy150Open nltk/nltk/nltk/sem/relextract.py/_expand |
6,169 | def class_abbrev(type):
"""
Abbreviate an NE class name.
:type type: str
:rtype: str
"""
try:
return long2short[type]
except __HOLE__:
return type | KeyError | dataset/ETHPy150Open nltk/nltk/nltk/sem/relextract.py/class_abbrev |
6,170 | def _join(lst, sep=' ', untag=False):
"""
Join a list into a string, turning tags tuples into tag strings or just words.
:param untag: if ``True``, omit the tag from tagged input strings.
:type lst: list
:rtype: str
"""
try:
return sep.join(lst)
except __HOLE__:
if untag:
return sep.join(tup[0] for tup in lst)
from nltk.tag import tuple2str
return sep.join(tuple2str(tup) for tup in lst) | TypeError | dataset/ETHPy150Open nltk/nltk/nltk/sem/relextract.py/_join |
6,171 | def descape_entity(m, defs=htmlentitydefs.entitydefs):
"""
Translate one entity to its ISO Latin value.
Inspired by example from effbot.org
"""
#s = 'mcglashan_&_sarrail'
#l = ['mcglashan', '&', 'sarrail']
#pattern = re.compile("&(\w+?);")
#new = list2sym(l)
#s = pattern.sub(descape_entity, s)
#print s, new
try:
return defs[m.group(1)]
except __HOLE__:
return m.group(0) # use as is | KeyError | dataset/ETHPy150Open nltk/nltk/nltk/sem/relextract.py/descape_entity |
6,172 | def in_demo(trace=0, sql=True):
"""
Select pairs of organizations and locations whose mentions occur with an
intervening occurrence of the preposition "in".
If the sql parameter is set to True, then the entity pairs are loaded into
an in-memory database, and subsequently pulled out using an SQL "SELECT"
query.
"""
from nltk.corpus import ieer
if sql:
try:
import sqlite3
connection = sqlite3.connect(":memory:")
connection.text_factory = sqlite3.OptimizedUnicode
cur = connection.cursor()
cur.execute("""create table Locations
(OrgName text, LocationName text, DocID text)""")
except __HOLE__:
import warnings
warnings.warn("Cannot import sqlite; sql flag will be ignored.")
IN = re.compile(r'.*\bin\b(?!\b.+ing)')
print()
print("IEER: in(ORG, LOC) -- just the clauses:")
print("=" * 45)
for file in ieer.fileids():
for doc in ieer.parsed_docs(file):
if trace:
print(doc.docno)
print("=" * 15)
for rel in extract_rels('ORG', 'LOC', doc, corpus='ieer', pattern=IN):
print(clause(rel, relsym='IN'))
if sql:
try:
rtuple = (rel['subjtext'], rel['objtext'], doc.docno)
cur.execute("""insert into Locations
values (?, ?, ?)""", rtuple)
connection.commit()
except NameError:
pass
if sql:
try:
cur.execute("""select OrgName from Locations
where LocationName = 'Atlanta'""")
print()
print("Extract data from SQL table: ORGs in Atlanta")
print("-" * 15)
for row in cur:
print(row)
except NameError:
pass
############################################
# Example of has_role(PER, LOC)
############################################ | ImportError | dataset/ETHPy150Open nltk/nltk/nltk/sem/relextract.py/in_demo |
6,173 | def has_features(fcollection):
"""Returns true for a FeatureCollection-like structure."""
try:
return 'features' in fcollection
# and is_featurelike(fcollection['features'][0])
except (AttributeError, __HOLE__):
return False | TypeError | dataset/ETHPy150Open bkg/django-spillway/spillway/collections.py/has_features |
6,174 | def is_featurelike(feature):
"""Returns true for a Feature-like structure."""
try:
return 'geometry' in feature and 'properties' in feature
except (__HOLE__, TypeError):
return False | AttributeError | dataset/ETHPy150Open bkg/django-spillway/spillway/collections.py/is_featurelike |
6,175 | def __getstate__(self):
# weakref.proxy don't pickle well, so we use weakref.ref
# manually and don't pickle the weakref.
# We restore the weakref when we unpickle.
ret = [self.prev(), self.next()]
try:
ret.append(self.key)
except __HOLE__:
pass
return ret | AttributeError | dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/misc/ordered_set.py/Link.__getstate__ |
6,176 | def run(self, commandString, config):
commandLine = commandString.split()
if commandString == "{{BREAKAPPLICATION}}":
return False
if not commandString == "{{BREAKAPPLICATION}}" and len(commandLine) > 0 and not commandLine[0] == "ls":
try:
print ""
test = subprocess.Popen(commandLine, stdout=subprocess.PIPE)
output = test.communicate()[0]
print output
except __HOLE__ as e:
if not self.callPlugin(commandString, config):
print e,
if commandLine[0] == "ls":
self.callPlugin(commandString, config)
print ""
return True | OSError | dataset/ETHPy150Open jeffersonmourak/pyTerm/CommandsList.py/CommandInspector.run |
6,177 | def parse(self, cli_obj, arg):
try:
value = int(arg)
except __HOLE__:
return self.__parsePath(cli_obj, arg)
if value < 0:
msg = 'file index may not be negative'
raise cli.CommandArgumentsError(msg)
if value >= cli_obj.review.getNumEntries():
msg = 'file index must be less than %s' % \
(cli_obj.review.getNumEntries())
raise cli.CommandArgumentsError(msg)
return value | ValueError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/FileIndexArgument.parse |
6,178 | def runParsed(self, cli_obj, name, args):
try:
cli_obj.review.next()
except __HOLE__:
cli_obj.outputError('no more files')
cli_obj.indexUpdated() | IndexError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/NextCommand.runParsed |
6,179 | def runParsed(self, cli_obj, name, args):
try:
cli_obj.review.prev()
except __HOLE__:
cli_obj.outputError('no more files')
cli_obj.indexUpdated() | IndexError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/PrevCommand.runParsed |
6,180 | def runParsed(self, cli_obj, name, args):
try:
cli_obj.review.goto(args.index)
except __HOLE__:
cli_obj.outputError('invalid index %s' % (args.index,))
cli_obj.indexUpdated() | IndexError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/GotoCommand.runParsed |
6,181 | def runParsed(self, cli_obj, name, args):
try:
files = self.__getDiffFiles(cli_obj, args)
except NoCurrentEntryError, ex:
cli_obj.outputError(ex)
return 1
except git.NoSuchBlobError, ex:
# Convert the "blob" error message to "file", just to be more
# user-friendly for developers who aren't familiar with git
# terminology.
cli_obj.outputError('no such file %r' % (ex.name,))
return 1
except git.NotABlobError, ex:
cli_obj.outputError('not a file %r' % (ex.name,))
return 1
cmd = cli_obj.getDiffCommand(*files)
try:
p = subprocess.Popen(cmd)
except __HOLE__, ex:
cli_obj.outputError('failed to invoke %r: %s' % (cmd[0], ex))
return 1
ret = p.wait()
cli_obj.setSuggestedCommand('next')
return ret | OSError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/DiffCommand.runParsed |
6,182 | def runParsed(self, cli_obj, name, args):
if args.path is None:
# If no path was specified, pick the path from the current entry
try:
current_entry = cli_obj.review.getCurrentEntry()
except NoCurrentEntryError, ex:
cli_obj.outputError(ex)
return 1
# If this is a deleted file, view the old version
# Otherwise, view the new version
if current_entry.status == git.diff.Status.DELETED:
commit = 'parent'
path = current_entry.old.path
else:
commit = 'child'
path = current_entry.new.path
else:
commit, path = args.path
try:
file = cli_obj.review.getFile(commit, path)
except git.NoSuchBlobError, ex:
# Convert the "blob" error message to "file", just to be more
# user-friendly for developers who aren't familiar with git
# terminology.
cli_obj.outputError('no such file %r' % (ex.name,))
return 1
except git.NotABlobError, ex:
cli_obj.outputError('not a file %r' % (ex.name,))
return 1
cmd = cli_obj.getViewCommand(file)
try:
p = subprocess.Popen(cmd)
except __HOLE__, ex:
cli_obj.outputError('failed to invoke %r: %s' % (cmd[0], ex))
return 1
ret = p.wait()
cli_obj.setSuggestedCommand('next')
return ret | OSError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/ViewCommand.runParsed |
6,183 | def runParsed(self, cli_obj, name, args):
if args.alias is None:
# Show all aliases
sorted_aliases = sorted(cli_obj.review.commitAliases.iteritems(),
key=lambda x: x[0])
for (alias, commit) in sorted_aliases:
cli_obj.output('%s: %s'% (alias, commit))
elif args.commit is None:
# Show the specified alias
try:
commit = cli_obj.review.commitAliases[args.alias]
cli_obj.output('%s: %s'% (args.alias, commit))
except __HOLE__:
cli_obj.outputError('unknown alias %r' % (args.alias,))
return 1
else:
# Set the specified alias
try:
cli_obj.review.setCommitAlias(args.alias, args.commit)
except git.NoSuchObjectError, ex:
cli_obj.outputError(ex)
return 1
return 0 | KeyError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/AliasCommand.runParsed |
6,184 | def runParsed(self, cli_obj, name, args):
try:
cli_obj.review.unsetCommitAlias(args.alias)
except __HOLE__:
cli_obj.outputError('unknown alias %r' % (args.alias,))
return 1
return 0 | KeyError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/UnaliasCommand.runParsed |
6,185 | def listTree(self, commit, dirname=None):
key = (commit, dirname)
try:
return self.__treeCache[key]
except __HOLE__:
result = self.__repo.listTree(commit, dirname=dirname)
self.__treeCache[key] = result
return result | KeyError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/RepoCache.listTree |
6,186 | def completeFilename(self, commit, text):
"""
Complete a filename within the given commit.
"""
# Don't use os.path.split() or dirname() here, since that performs
# some canonicalization like stripping out extra slashes. We need to
# return matches that with the exact text specified.
idx = text.rfind(os.path.sep)
if idx < 0:
dirname = ''
basename = text
else:
dirname = text[:idx+1]
basename = text[idx+1:]
# Expand commit name aliases
commit = self.review.expandCommitName(commit)
matches = []
try:
tree_entries = self.repoCache.listTree(commit, dirname)
except __HOLE__, ex:
return []
for entry in tree_entries:
if entry.name.startswith(basename):
matches.append(entry)
# If there is only 1 match, and it is a blob, add a space
# TODO: It would be nicer to honor user's inputrc settings
if len(matches) == 1 and matches[0].type == git.OBJ_BLOB:
return [dirname + matches[0].name + ' ']
string_matches = []
for entry in matches:
full_match = dirname + entry.name
if entry.type == git.OBJ_TREE:
full_match += os.path.sep
string_matches.append(full_match)
return string_matches | OSError | dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/cli_reviewer.py/CliReviewer.completeFilename |
6,187 | def initialize(self, connection):
try:
self.server_version_info = \
self._get_server_version_info(connection)
except NotImplementedError:
self.server_version_info = None
try:
self.default_schema_name = \
self._get_default_schema_name(connection)
except __HOLE__:
self.default_schema_name = None
try:
self.default_isolation_level = \
self.get_isolation_level(connection.connection)
except NotImplementedError:
self.default_isolation_level = None
self.returns_unicode_strings = self._check_unicode_returns(connection)
if self.description_encoding is not None and \
self._check_unicode_description(connection):
self._description_decoder = self.description_encoding = None
self.do_rollback(connection.connection) | NotImplementedError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/engine/default.py/DefaultDialect.initialize |
6,188 | def _make_excp(self, error, status):
"""
Raise LiquidWebException.
"""
exc_type = error.get('error_class')
message = error.get('full_message')
try:
_type = EXCEPTIONS_FIELDS[exc_type]
fields = _type.get('fields')
extra = {}
except __HOLE__:
fields = []
for field in fields:
extra[field] = error.get(field)
return APIException(exc_type, message, status, extra=extra) | KeyError | dataset/ETHPy150Open apache/libcloud/libcloud/common/liquidweb.py/LiquidWebResponse._make_excp |
6,189 | def flatten(it):
if it:
try:
return reduce(operator.add,
map(force_list, (x for x in it if x)))
except __HOLE__:
return []
return it | TypeError | dataset/ETHPy150Open celery/cell/cell/utils/__init__.py/flatten |
6,190 | def first(it, default=None):
try:
next(it)
except __HOLE__:
return default | StopIteration | dataset/ETHPy150Open celery/cell/cell/utils/__init__.py/first |
6,191 | def first_reply(replies, key):
try:
return next(replies)
except __HOLE__:
raise KeyError(key) | StopIteration | dataset/ETHPy150Open celery/cell/cell/utils/__init__.py/first_reply |
6,192 | def get_commit_for_refspec(repo, branch_or_tag_or_sha):
try:
commit = repo.revparse_single(branch_or_tag_or_sha)
if commit.type == GIT_OBJ_TAG:
commit = commit.peel(GIT_OBJ_COMMIT)
return commit
except __HOLE__:
raise NotFound("no such branch, tag, or commit SHA") | KeyError | dataset/ETHPy150Open hulu/restfulgit/restfulgit/porcelain/retrieval.py/get_commit_for_refspec |
6,193 | def get_object_from_path(repo, tree, path):
path_segments = path.split("/")
ctree = tree
for i, path_seg in enumerate(path_segments):
if ctree.type != GIT_OBJ_TREE:
raise NotFound("invalid path; traversal unexpectedly encountered a non-tree")
if not path_seg and i == len(path_segments) - 1: # allow trailing slash in paths to directories
continue
try:
ctree = repo[ctree[path_seg].id]
except __HOLE__:
raise NotFound("invalid path; no such object")
return ctree | KeyError | dataset/ETHPy150Open hulu/restfulgit/restfulgit/porcelain/retrieval.py/get_object_from_path |
6,194 | def get_blame(repo, file_path, newest_commit, oldest_refspec=None, min_line=1, max_line=None): # pylint: disable=R0913
kwargs = {
'flags': (GIT_BLAME_TRACK_COPIES_SAME_COMMIT_MOVES | GIT_BLAME_TRACK_COPIES_SAME_COMMIT_COPIES),
'newest_commit': newest_commit.id,
}
if oldest_refspec is not None:
oldest_commit = get_commit_for_refspec(repo, oldest_refspec)
kwargs['oldest_commit'] = oldest_commit.id
if min_line > 1:
kwargs['min_line'] = min_line
if max_line is not None:
kwargs['max_line'] = max_line
try:
return repo.blame(file_path, **kwargs)
except KeyError as no_such_file_err: # pragma: no cover
raise NotFound(no_such_file_err.message)
except __HOLE__: # pragma: no cover
raise BadRequest("path resolved to non-blob object") | ValueError | dataset/ETHPy150Open hulu/restfulgit/restfulgit/porcelain/retrieval.py/get_blame |
6,195 | def _get_common_ancestor_or_none(repo, left_oid, right_oid):
try:
return repo.merge_base(left_oid, right_oid)
except __HOLE__:
# Couldn't find merge base
return None | KeyError | dataset/ETHPy150Open hulu/restfulgit/restfulgit/porcelain/retrieval.py/_get_common_ancestor_or_none |
6,196 | def test_validate_url_good(self):
self.settings.url = 'http://frozen.pizza.reviews/'
try:
self.settings.save()
except __HOLE__:
assert 0 | ValidationError | dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/forward/tests/test_models.py/TestSettingsValidation.test_validate_url_good |
6,197 | def test_validate_redirect_bool_good(self):
self.settings.redirect_bool = False
try:
self.settings.save()
except __HOLE__:
assert 0 | ValidationError | dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/forward/tests/test_models.py/TestSettingsValidation.test_validate_redirect_bool_good |
6,198 | def test_validate_redirect_secs_good(self):
self.settings.redirect_secs = 20
try:
self.settings.save()
except __HOLE__:
assert 0 | ValidationError | dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/forward/tests/test_models.py/TestSettingsValidation.test_validate_redirect_secs_good |
6,199 | def test_label_sanitary(self):
self.settings.label = 'safe'
try:
self.settings.save()
except __HOLE__:
assert False | ValidationError | dataset/ETHPy150Open CenterForOpenScience/osf.io/website/addons/forward/tests/test_models.py/TestSettingsValidation.test_label_sanitary |
Subsets and Splits