Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,900 |
def _new(self, im):
new = Image()
new.im = im
new.mode = im.mode
new.size = im.size
new.palette = self.palette
if im.mode == "P":
new.palette = ImagePalette.ImagePalette()
try:
new.info = self.info.copy()
except __HOLE__:
# fallback (pre-1.5.2)
new.info = {}
for k, v in self.info:
new.info[k] = v
return new
|
AttributeError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image._new
|
4,901 |
def convert(self, mode=None, data=None, dither=None,
palette=WEB, colors=256):
"Convert to other pixel format"
if not mode:
# determine default mode
if self.mode == "P":
self.load()
if self.palette:
mode = self.palette.mode
else:
mode = "RGB"
else:
return self.copy()
self.load()
if data:
# matrix conversion
if mode not in ("L", "RGB"):
raise ValueError("illegal conversion")
im = self.im.convert_matrix(mode, data)
return self._new(im)
if mode == "P" and palette == ADAPTIVE:
im = self.im.quantize(colors)
return self._new(im)
# colourspace conversion
if dither is None:
dither = FLOYDSTEINBERG
try:
im = self.im.convert(mode, dither)
except __HOLE__:
try:
# normalize source image and try again
im = self.im.convert(getmodebase(self.mode))
im = im.convert(mode, dither)
except KeyError:
raise ValueError("illegal conversion")
return self._new(im)
|
ValueError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image.convert
|
4,902 |
def getpalette(self):
"Get palette contents."
self.load()
try:
return map(ord, self.im.getpalette())
except __HOLE__:
return None # no palette
##
# Returns the pixel value at a given position.
#
# @param xy The coordinate, given as (x, y).
# @return The pixel value. If the image is a multi-layer image,
# this method returns a tuple.
|
ValueError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image.getpalette
|
4,903 |
def putalpha(self, alpha):
"Set alpha layer"
self.load()
if self.readonly:
self._copy()
if self.mode not in ("LA", "RGBA"):
# attempt to promote self to a matching alpha mode
try:
mode = getmodebase(self.mode) + "A"
try:
self.im.setmode(mode)
except (AttributeError, ValueError):
# do things the hard way
im = self.im.convert(mode)
if im.mode not in ("LA", "RGBA"):
raise ValueError # sanity check
self.im = im
self.mode = self.im.mode
except (KeyError, ValueError):
raise ValueError("illegal image mode")
if self.mode == "LA":
band = 1
else:
band = 3
if isImageType(alpha):
# alpha layer
if alpha.mode not in ("1", "L"):
raise ValueError("illegal image mode")
alpha.load()
if alpha.mode == "1":
alpha = alpha.convert("L")
else:
# constant alpha
try:
self.im.fillband(band, alpha)
except (__HOLE__, ValueError):
# do things the hard way
alpha = new("L", self.size, alpha)
else:
return
self.im.putband(alpha.im, band)
##
# Copies pixel data to this image. This method copies data from a
# sequence object into the image, starting at the upper left
# corner (0, 0), and continuing until either the image or the
# sequence ends. The scale and offset values are used to adjust
# the sequence values: <b>pixel = value*scale + offset</b>.
#
# @param data A sequence object.
# @param scale An optional scale value. The default is 1.0.
# @param offset An optional offset value. The default is 0.0.
|
AttributeError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image.putalpha
|
4,904 |
def resize(self, size, resample=NEAREST):
"Resize image"
if resample not in (NEAREST, BILINEAR, BICUBIC, ANTIALIAS):
raise ValueError("unknown resampling filter")
self.load()
if self.mode in ("1", "P"):
resample = NEAREST
if resample == ANTIALIAS:
# requires stretch support (imToolkit & PIL 1.1.3)
try:
im = self.im.stretch(size, resample)
except __HOLE__:
raise ValueError("unsupported resampling filter")
else:
im = self.im.resize(size, resample)
return self._new(im)
##
# Returns a rotated copy of this image. This method returns a
# copy of this image, rotated the given number of degrees counter
# clockwise around its centre.
#
# @def rotate(angle, filter=NEAREST)
# @param angle In degrees counter clockwise.
# @param filter An optional resampling filter. This can be
# one of <b>NEAREST</b> (use nearest neighbour), <b>BILINEAR</b>
# (linear interpolation in a 2x2 environment), or <b>BICUBIC</b>
# (cubic spline interpolation in a 4x4 environment).
# If omitted, or if the image has mode "1" or "P", it is
# set <b>NEAREST</b>.
# @param expand Optional expansion flag. If true, expands the output
# image to make it large enough to hold the entire rotated image.
# If false or omitted, make the output image the same size as the
# input image.
# @return An Image object.
|
AttributeError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image.resize
|
4,905 |
def save(self, fp, format=None, **params):
"Save image to file or stream"
if isStringType(fp):
filename = fp
else:
if hasattr(fp, "name") and isStringType(fp.name):
filename = fp.name
else:
filename = ""
# may mutate self!
self.load()
self.encoderinfo = params
self.encoderconfig = ()
preinit()
ext = string.lower(os.path.splitext(filename)[1])
if not format:
try:
format = EXTENSION[ext]
except __HOLE__:
init()
try:
format = EXTENSION[ext]
except KeyError:
raise KeyError(ext) # unknown extension
try:
save_handler = SAVE[string.upper(format)]
except KeyError:
init()
save_handler = SAVE[string.upper(format)] # unknown format
if isStringType(fp):
import __builtin__
fp = __builtin__.open(fp, "wb")
close = 1
else:
close = 0
try:
save_handler(self, fp, filename)
finally:
# do what we can to clean up
if close:
fp.close()
##
# Seeks to the given frame in this sequence file. If you seek
# beyond the end of the sequence, the method raises an
# <b>EOFError</b> exception. When a sequence file is opened, the
# library automatically seeks to frame 0.
# <p>
# Note that in the current version of the library, most sequence
# formats only allows you to seek to the next frame.
#
# @param frame Frame number, starting at 0.
# @exception EOFError If the call attempts to seek beyond the end
# of the sequence.
# @see #Image.tell
|
KeyError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image.save
|
4,906 |
def thumbnail(self, size, resample=NEAREST):
"Create thumbnail representation (modifies image in place)"
# FIXME: the default resampling filter will be changed
# to ANTIALIAS in future versions
# preserve aspect ratio
x, y = self.size
if x > size[0]: y = max(y * size[0] / x, 1); x = size[0]
if y > size[1]: x = max(x * size[1] / y, 1); y = size[1]
size = x, y
if size == self.size:
return
self.draft(None, size)
self.load()
try:
im = self.resize(size, resample)
except __HOLE__:
if resample != ANTIALIAS:
raise
im = self.resize(size, NEAREST) # fallback
self.im = im.im
self.mode = im.mode
self.size = size
self.readonly = 0
# FIXME: the different tranform methods need further explanation
# instead of bloating the method docs, add a separate chapter.
##
# Transforms this image. This method creates a new image with the
# given size, and the same mode as the original, and copies data
# to the new image using the given transform.
# <p>
# @def transform(size, method, data, resample=NEAREST)
# @param size The output size.
# @param method The transformation method. This is one of
# <b>EXTENT</b> (cut out a rectangular subregion), <b>AFFINE</b>
# (affine transform), <b>PERSPECTIVE</b> (perspective
# transform), <b>QUAD</b> (map a quadrilateral to a
# rectangle), or <b>MESH</b> (map a number of source quadrilaterals
# in one operation).
# @param data Extra data to the transformation method.
# @param resample Optional resampling filter. It can be one of
# <b>NEAREST</b> (use nearest neighbour), <b>BILINEAR</b>
# (linear interpolation in a 2x2 environment), or
# <b>BICUBIC</b> (cubic spline interpolation in a 4x4
# environment). If omitted, or if the image has mode
# "1" or "P", it is set to <b>NEAREST</b>.
# @return An Image object.
|
ValueError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/Image.thumbnail
|
4,907 |
def fromarray(obj, mode=None):
arr = obj.__array_interface__
shape = arr['shape']
ndim = len(shape)
try:
strides = arr['strides']
except __HOLE__:
strides = None
if mode is None:
try:
typekey = (1, 1) + shape[2:], arr['typestr']
mode, rawmode = _fromarray_typemap[typekey]
except KeyError:
# print typekey
raise TypeError("Cannot handle this data type")
else:
rawmode = mode
if mode in ["1", "L", "I", "P", "F"]:
ndmax = 2
elif mode == "RGB":
ndmax = 3
else:
ndmax = 4
if ndim > ndmax:
raise ValueError("Too many dimensions.")
size = shape[1], shape[0]
if strides is not None:
obj = obj.tostring()
return frombuffer(mode, size, obj, "raw", rawmode, 0, 1)
|
KeyError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/fromarray
|
4,908 |
def open(fp, mode="r"):
"Open an image file, without loading the raster data"
if mode != "r":
raise ValueError("bad mode")
if isStringType(fp):
import __builtin__
filename = fp
fp = __builtin__.open(fp, "rb")
else:
filename = ""
prefix = fp.read(16)
preinit()
for i in ID:
try:
factory, accept = OPEN[i]
if not accept or accept(prefix):
fp.seek(0)
return factory(fp, filename)
except (SyntaxError, IndexError, __HOLE__):
pass
if init():
for i in ID:
try:
factory, accept = OPEN[i]
if not accept or accept(prefix):
fp.seek(0)
return factory(fp, filename)
except (SyntaxError, IndexError, TypeError):
pass
raise IOError("cannot identify image file")
#
# Image processing.
##
# Creates a new image by interpolating between two input images, using
# a constant alpha.
#
# <pre>
# out = image1 * (1.0 - alpha) + image2 * alpha
# </pre>
#
# @param im1 The first image.
# @param im2 The second image. Must have the same mode and size as
# the first image.
# @param alpha The interpolation alpha factor. If alpha is 0.0, a
# copy of the first image is returned. If alpha is 1.0, a copy of
# the second image is returned. There are no restrictions on the
# alpha value. If necessary, the result is clipped to fit into
# the allowed output range.
# @return An Image object.
|
TypeError
|
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/open
|
4,909 |
def label(self, key):
try:
return self.segments[key]
except __HOLE__:
return key
|
KeyError
|
dataset/ETHPy150Open MontmereLimited/django-lean/django_lean/lean_segments/segments.py/BaseSegments.label
|
4,910 |
def write_version(name):
'''
Removes the previous configuration file, then creates a new one and writes
the name line. This function is intended to be used from states.
If :mod:`syslog_ng.set_config_file
<salt.modules.syslog_ng.set_config_file>`, is called before, this function
will use the set config file.
CLI Example:
.. code-block:: bash
salt '*' syslog_ng.write_version name="3.6"
'''
line = '@version: {0}'.format(name)
try:
if os.path.exists(__SYSLOG_NG_CONFIG_FILE):
log.debug(
'Removing previous configuration file: {0}'.format(
__SYSLOG_NG_CONFIG_FILE
)
)
os.remove(__SYSLOG_NG_CONFIG_FILE)
log.debug('Configuration file successfully removed')
header = _format_generated_config_header()
_write_config(config=header, newlines=1)
_write_config(config=line, newlines=2)
return _format_state_result(name, result=True)
except __HOLE__ as err:
log.error(
'Failed to remove previous configuration file \'{0}\': {1}'
.format(__SYSLOG_NG_CONFIG_FILE, str(err))
)
return _format_state_result(name, result=False)
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/syslog_ng.py/write_version
|
4,911 |
def get_page(self, url):
"""
Get the HTML for an URL, possibly from an in-memory cache.
XXX TODO Note: this cache is never actually cleared. It's assumed that
the data won't get stale over the lifetime of a locator instance (not
necessarily true for the default_locator).
"""
# http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
scheme, netloc, path, _, _, _ = urlparse(url)
if scheme == 'file' and os.path.isdir(url2pathname(path)):
url = urljoin(ensure_slash(url), 'index.html')
if url in self._page_cache:
result = self._page_cache[url]
logger.debug('Returning %s from cache: %s', url, result)
else:
host = netloc.split(':', 1)[0]
result = None
if host in self._bad_hosts:
logger.debug('Skipping %s due to bad host %s', url, host)
else:
req = Request(url, headers={'Accept-encoding': 'identity'})
try:
logger.debug('Fetching %s', url)
resp = self.opener.open(req, timeout=self.timeout)
logger.debug('Fetched %s', url)
headers = resp.info()
content_type = headers.get('Content-Type', '')
if HTML_CONTENT_TYPE.match(content_type):
final_url = resp.geturl()
data = resp.read()
encoding = headers.get('Content-Encoding')
if encoding:
decoder = self.decoders[encoding] # fail if not found
data = decoder(data)
encoding = 'utf-8'
m = CHARSET.search(content_type)
if m:
encoding = m.group(1)
try:
data = data.decode(encoding)
except UnicodeError:
data = data.decode('latin-1') # fallback
result = Page(data, final_url)
self._page_cache[final_url] = result
except __HOLE__ as e:
if e.code != 404:
logger.exception('Fetch failed: %s: %s', url, e)
except URLError as e:
logger.exception('Fetch failed: %s: %s', url, e)
with self._lock:
self._bad_hosts.add(host)
except Exception as e:
logger.exception('Fetch failed: %s: %s', url, e)
finally:
self._page_cache[url] = result # even if None (failure)
return result
|
HTTPError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/site-packages/pip/_vendor/distlib/locators.py/SimpleScrapingLocator.get_page
|
4,912 |
def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
result = set()
for locator in self.locators:
try:
result |= locator.get_distribution_names()
except __HOLE__:
pass
return result
# We use a legacy scheme simply because most of the dists on PyPI use legacy
# versions which don't conform to PEP 426 / PEP 440.
|
NotImplementedError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/site-packages/pip/_vendor/distlib/locators.py/AggregatingLocator.get_distribution_names
|
4,913 |
def load_settings(globals):
"""Loads settings from the configuration file into the caller's locals
dict. This is intended to be used from within a Django settings.py to load
dynamic settings like this:
>>> from opus.lib.conf import load_settings
>>> load_settings()
"""
try:
settings_file = os.environ["OPUS_SETTINGS_FILE"]
if not settings_file:
raise KeyError()
except __HOLE__:
raise ImportError("Cannot find Opus settings file, OPUS_SETTINGS_FILE was undefined")
try:
with open(settings_file,'r') as f:
objs = json.load(f)
except IOError, e:
print repr(settings_file)
raise
# Encode all strings, see docstring explanation in recurse_encode
objs = recurse_encode(objs)
for name, value in objs.iteritems():
globals[name] = value
|
KeyError
|
dataset/ETHPy150Open bmbouter/Opus/opus/lib/conf/__init__.py/load_settings
|
4,914 |
def pack(self, kid='', owner='', **kwargs):
keys = self.keyjar.get_signing_key(jws.alg2keytype(self.sign_alg),
owner=owner, kid=kid)
if not keys:
raise NoSuitableSigningKeys('kid={}'.format(kid))
key = keys[0] # Might be more then one if kid == ''
if key.kid:
kwargs['kid'] = key.kid
iat = utc_time_sans_frac()
if not 'exp' in kwargs:
kwargs['exp'] = iat + self.lifetime
try:
_encrypt = kwargs['encrypt']
except __HOLE__:
_encrypt = self.encrypt
else:
del kwargs['encrypt']
_jwt = self.message_type(iss=self.iss, iat=iat, **kwargs)
if 'jti' in self.message_type.c_param:
try:
_jti = kwargs['jti']
except:
_jti = uuid.uuid4().hex
_jwt['jti'] = _jti
_jws = _jwt.to_jwt([key], self.sign_alg)
if _encrypt:
return self._encrypt(_jws)
else:
return _jws
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/jwt.py/JWT.pack
|
4,915 |
def _gen_graph(self, modules, title='Dependency Graph'):
"""
Invoke graphviz and generate graph
"""
try:
graph = Digraph(comment=title, strict=True, format='jpeg')
logging.debug('Creating graph ..')
except __HOLE__:
logging.warning('graphviz module does not support strict graphs, please upgrade python graphviz !!')
graph = Digraph(comment=title, format='jpeg')
except:
logging.error('Failed to create dependency graph !!')
return None
for name in modules:
m = self.modules.get(name, None)
if m is None:
continue
for imp in m.imports:
graph.node(imp)
graph.edge(name, imp)
for inc in m.includes:
graph.node(inc)
graph.edge(name, inc)
for dep in m.depends:
graph.node(dep)
graph.edge(dep, name)
return self._apply_style(graph)
|
TypeError
|
dataset/ETHPy150Open CiscoDevNet/yang-explorer/server/explorer/utils/dygraph.py/DYGraph._gen_graph
|
4,916 |
@app.route('/<ip_addr>')
def lookup_ip(ip_addr):
try:
ip = ipaddress.ip_address(ip_addr)
except __HOLE__:
abort(400)
if ip.version == 4:
gi_city = GI_CITY
gi_org = GI_ORG
r = gi_city.record_by_addr(ip.exploded)
org = gi_org.name_by_addr(ip.exploded)
else:
gi_city = GI_CITY_V6
gi_org = GI_ORG_V6
r = gi_city.record_by_addr_v6(ip.exploded)
org = gi_org.name_by_addr_v6(ip.exploded)
if org:
m = re.search(r'^(AS[0-9]+) (.+)$', org)
if m and isinstance(r, dict):
r['asn'] = m.group(1)
r['org'] = m.group(2)
return Response(json.dumps(r), mimetype='application/json')
|
ValueError
|
dataset/ETHPy150Open kz26/balise/main.py/lookup_ip
|
4,917 |
def is_frozen(G):
"""Return True if graph is frozen.
Parameters
----------
G : graph
A NetworkX graph
See Also
--------
freeze
"""
try:
return G.frozen
except __HOLE__:
return False
|
AttributeError
|
dataset/ETHPy150Open networkx/networkx/networkx/classes/function.py/is_frozen
|
4,918 |
def skip(self):
""" Determine whether or not this object should be skipped.
If this model instance is a parent of a single subclassed
instance, skip it. The subclassed instance will create this
parent instance for us.
TODO: Allow the user to force its creation?
"""
if self.skip_me is not None:
return self.skip_me
def get_skip_version():
""" Return which version of the skip code should be run
Django's deletion code was refactored in r14507 which
was just two days before 1.3 alpha 1 (r14519)
"""
if not hasattr(self, '_SKIP_VERSION'):
version = django.VERSION
# no, it isn't lisp. I swear.
self._SKIP_VERSION = (
version[0] > 1 or ( # django 2k... someday :)
version[0] == 1 and ( # 1.x
version[1] >= 4 or # 1.4+
version[1] == 3 and not ( # 1.3.x
(version[3] == 'alpha' and version[1] == 0)
)
)
)
) and 2 or 1 # NOQA
return self._SKIP_VERSION
if get_skip_version() == 1:
try:
# Django trunk since r7722 uses CollectedObjects instead of dict
from django.db.models.query import CollectedObjects
sub_objects = CollectedObjects()
except __HOLE__:
# previous versions don't have CollectedObjects
sub_objects = {}
self.instance._collect_sub_objects(sub_objects)
sub_objects = sub_objects.keys()
elif get_skip_version() == 2:
from django.db.models.deletion import Collector
from django.db import router
cls = self.instance.__class__
using = router.db_for_write(cls, instance=self.instance)
collector = Collector(using=using)
collector.collect([self.instance], collect_related=False)
# collector stores its instances in two places. I *think* we
# only need collector.data, but using the batches is needed
# to perfectly emulate the old behaviour
# TODO: check if batches are really needed. If not, remove them.
sub_objects = sum([list(i) for i in collector.data.values()], [])
if hasattr(collector, 'batches'):
# Django 1.6 removed batches for being dead code
# https://github.com/django/django/commit/a170c3f755351beb35f8166ec3c7e9d524d9602
for batch in collector.batches.values():
# batch.values can be sets, which must be converted to lists
sub_objects += sum([list(i) for i in batch.values()], [])
sub_objects_parents = [so._meta.parents for so in sub_objects]
if [self.model in p for p in sub_objects_parents].count(True) == 1:
# since this instance isn't explicitly created, it's variable name
# can't be referenced in the script, so record None in context dict
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = None
self.skip_me = True
else:
self.skip_me = False
return self.skip_me
|
ImportError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/dumpscript.py/InstanceCode.skip
|
4,919 |
def get_many_to_many_lines(self, force=False):
""" Generates lines that define many to many relations for this instance. """
lines = []
for field, rel_items in self.many_to_many_waiting_list.items():
for rel_item in list(rel_items):
try:
pk_name = rel_item._meta.pk.name
key = '%s_%s' % (rel_item.__class__.__name__, getattr(rel_item, pk_name))
value = "%s" % self.context[key]
lines.append('%s.%s.add(%s)' % (self.variable_name, field.name, value))
self.many_to_many_waiting_list[field].remove(rel_item)
except __HOLE__:
if force:
item_locator = orm_item_locator(rel_item)
self.context["__extra_imports"][rel_item._meta.object_name] = rel_item.__module__
lines.append('%s.%s.add( %s )' % (self.variable_name, field.name, item_locator))
self.many_to_many_waiting_list[field].remove(rel_item)
if lines:
lines.append("")
return lines
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/dumpscript.py/InstanceCode.get_many_to_many_lines
|
4,920 |
def get_attribute_value(item, field, context, force=False, skip_autofield=True):
""" Gets a string version of the given attribute's value, like repr() might. """
# Find the value of the field, catching any database issues
try:
value = getattr(item, field.name)
except __HOLE__:
raise SkipValue('Could not find object for %s.%s, ignoring.\n' % (item.__class__.__name__, field.name))
# AutoField: We don't include the auto fields, they'll be automatically recreated
if skip_autofield and isinstance(field, AutoField):
raise SkipValue()
# Some databases (eg MySQL) might store boolean values as 0/1, this needs to be cast as a bool
elif isinstance(field, BooleanField) and value is not None:
return repr(bool(value))
# Post file-storage-refactor, repr() on File/ImageFields no longer returns the path
elif isinstance(field, FileField):
return repr(force_unicode(value))
# ForeignKey fields, link directly using our stored python variable name
elif isinstance(field, ForeignKey) and value is not None:
# Special case for contenttype foreign keys: no need to output any
# content types in this script, as they can be generated again
# automatically.
# NB: Not sure if "is" will always work
if field.rel.to is ContentType:
return 'ContentType.objects.get(app_label="%s", model="%s")' % (value.app_label, value.model)
# Generate an identifier (key) for this foreign object
pk_name = value._meta.pk.name
key = '%s_%s' % (value.__class__.__name__, getattr(value, pk_name))
if key in context:
variable_name = context[key]
# If the context value is set to None, this should be skipped.
# This identifies models that have been skipped (inheritance)
if variable_name is None:
raise SkipValue()
# Return the variable name listed in the context
return "%s" % variable_name
elif value.__class__ not in context["__avaliable_models"] or force:
context["__extra_imports"][value._meta.object_name] = value.__module__
item_locator = orm_item_locator(value)
return item_locator
else:
raise DoLater('(FK) %s.%s\n' % (item.__class__.__name__, field.name))
elif isinstance(field, (DateField, DateTimeField)) and value is not None:
return "dateutil.parser.parse(\"%s\")" % value.isoformat()
# A normal field (e.g. a python built-in)
else:
return repr(value)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/dumpscript.py/get_attribute_value
|
4,921 |
def installed_apps(parser, token):
try:
tag_name, arg = token.contents.split(None, 1)
except __HOLE__:
raise template.TemplateSyntaxError, "%r tag requires arguments" % token.contents.split()[0]
m = re.search(r'as (\w+)', arg)
if not m:
raise template.TemplateSyntaxError, "%r tag had invalid arguments" % tag_name
var_name = m.groups()[0]
return InstalledAppsNode(var_name)
|
ValueError
|
dataset/ETHPy150Open svetlyak40wt/django-apps/django_apps/templatetags/apps_tags.py/installed_apps
|
4,922 |
def format_pymux_string(pymux, cli, string, window=None, pane=None):
"""
Apply pymux sting formatting. (Similar to tmux.)
E.g. #P is replaced by the index of the active pane.
We try to stay compatible with tmux, if possible.
One thing that we won't support (for now) is colors, because our styling
works different. (With a Style class.) On the other hand, in the future, we
could allow things like `#[token=Token.Title.PID]`. This gives a clean
separation of semantics and colors, making it easy to write different color
schemes.
"""
arrangement = pymux.arrangement
if window is None:
window = arrangement.get_active_window(cli)
if pane is None:
pane = window.active_pane
def id_of_pane():
return '%s' % (pane.pane_id, )
def index_of_pane():
try:
return '%s' % (window.get_pane_index(pane), )
except ValueError:
return '/'
def index_of_window():
return '%s' % (window.index, )
def name_of_window():
return window.name or '(noname)'
def window_flags():
z = 'Z' if window.zoom else ''
if window == arrangement.get_active_window(cli):
return '*' + z
elif window == arrangement.get_previous_active_window(cli):
return '-' + z
else:
return z + ' '
def name_of_session():
return pymux.session_name
def title_of_pane():
return pane.process.screen.title
def hostname():
return socket.gethostname()
def literal():
return '#'
format_table = {
'#D': id_of_pane,
'#F': window_flags,
'#I': index_of_window,
'#P': index_of_pane,
'#S': name_of_session,
'#T': title_of_pane,
'#W': name_of_window,
'#h': hostname,
'##': literal,
}
# Date/time formatting.
try:
if six.PY2:
string = datetime.datetime.now().strftime(
string.encode('utf-8')).decode('utf-8')
else:
string = datetime.datetime.now().strftime(string)
except __HOLE__: # strftime format ends with raw %
string = '<ValueError>'
# Apply '#' formatting.
for symbol, f in format_table.items():
if symbol in string:
string = string.replace(symbol, f())
return string
|
ValueError
|
dataset/ETHPy150Open jonathanslenders/pymux/pymux/format.py/format_pymux_string
|
4,923 |
def _request(self, url, method='GET', params=None, api_call=None):
"""Internal request method"""
method = method.lower()
params = params or {}
func = getattr(self.client, method)
params, files = _transparent_params(params)
requests_args = {}
for k, v in self.client_args.items():
# Maybe this should be set as a class variable and only done once?
if k in ('timeout', 'allow_redirects', 'stream', 'verify'):
requests_args[k] = v
if method == 'get':
requests_args['params'] = params
else:
requests_args.update({
'data': params,
'files': files,
})
response = func(url, **requests_args)
content = response.content.decode('utf-8')
# create stash for last function intel
self._last_call = {
'api_call': api_call,
'api_error': None,
'cookies': response.cookies,
'headers': response.headers,
'status_code': response.status_code,
'url': response.url,
'content': content,
}
# Wrap the json loads in a try, and defer an error
# Twitter will return invalid json with an error code in the headers
json_error = False
try:
try:
# try to get json
content = content.json()
except AttributeError:
# if unicode detected
content = json.loads(content)
except __HOLE__:
json_error = True
content = {}
if response.status_code > 304:
# If there is no error message, use a default.
errors = content.get('errors',
[{'message': 'An error occurred processing your request.'}])
if errors and isinstance(errors, list):
error_message = errors[0]['message']
else:
error_message = errors
self._last_call['api_error'] = error_message
ExceptionType = TwythonError
if response.status_code == 429:
# Twitter API 1.1, always return 429 when rate limit is exceeded
ExceptionType = TwythonRateLimitError
elif response.status_code == 401 or 'Bad Authentication data' in error_message:
# Twitter API 1.1, returns a 401 Unauthorized or
# a 400 "Bad Authentication data" for invalid/expired app keys/user tokens
ExceptionType = TwythonAuthError
raise ExceptionType(error_message,
error_code=response.status_code,
retry_after=response.headers.get('retry-after'))
# if we have a json error here, then it's not an official Twitter API error
if json_error and not response.status_code in (200, 201, 202):
raise TwythonError('Response was not valid JSON, unable to decode.')
return content
|
ValueError
|
dataset/ETHPy150Open splunk/splunk-app-twitter/twitter2/bin/twython/api.py/Twython._request
|
4,924 |
def obtain_access_token(self):
"""Returns an OAuth 2 access token to make OAuth 2 authenticated read-only calls.
:rtype: string
"""
if self.oauth_version != 2:
raise TwythonError('This method can only be called when your OAuth version is 2.0.')
data = {'grant_type': 'client_credentials'}
basic_auth = HTTPBasicAuth(self.app_key, self.app_secret)
try:
response = self.client.post(self.request_token_url,
data=data, auth=basic_auth)
content = response.content.decode('utf-8')
try:
content = content.json()
except __HOLE__:
content = json.loads(content)
access_token = content['access_token']
except (ValueError, requests.exceptions.RequestException):
raise TwythonAuthError('Unable to obtain OAuth 2 access token.')
else:
return access_token
|
AttributeError
|
dataset/ETHPy150Open splunk/splunk-app-twitter/twitter2/bin/twython/api.py/Twython.obtain_access_token
|
4,925 |
def search_gen(self, search_query, **params):
"""Returns a generator of tweets that match a specified query.
Documentation: https://dev.twitter.com/docs/api/1.1/get/search/tweets
:param search_query: Query you intend to search Twitter for
:param \*\*params: Extra parameters to send with your search request
:rtype: generator
Usage::
>>> from twython import Twython
>>> twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
>>> search = twitter.search_gen('python')
>>> for result in search:
>>> print result
"""
content = self.search(q=search_query, **params)
if not content.get('statuses'):
raise StopIteration
for tweet in content['statuses']:
yield tweet
try:
if not 'since_id' in params:
params['since_id'] = (int(content['statuses'][0]['id_str']) + 1)
except (TypeError, __HOLE__):
raise TwythonError('Unable to generate next page of search results, `page` is not a number.')
for tweet in self.search_gen(search_query, **params):
yield tweet
|
ValueError
|
dataset/ETHPy150Open splunk/splunk-app-twitter/twitter2/bin/twython/api.py/Twython.search_gen
|
4,926 |
@extensionclassmethod(Observable)
def catch_exception(cls, *args):
"""Continues an observable sequence that is terminated by an
exception with the next observable sequence.
1 - res = Observable.catch_exception(xs, ys, zs)
2 - res = Observable.catch_exception([xs, ys, zs])
Returns an observable sequence containing elements from consecutive
source sequences until a source sequence terminates successfully.
"""
if isinstance(args[0], list) or isinstance(args[0], Enumerable):
sources = args[0]
else:
sources = list(args)
#return Enumerable.catch_exception(Enumerable.for_each(sources))
def subscribe(observer):
e = iter(sources)
is_disposed = [False]
last_exception = [None]
subscription = SerialDisposable()
def action(action1, state=None):
def on_error(exn):
last_exception[0] = exn
action1()
if is_disposed[0]:
return
try:
current = next(e)
except __HOLE__:
if last_exception[0]:
observer.on_error(last_exception[0])
else:
observer.on_completed()
except Exception as ex:
observer.on_error(ex)
else:
d = SingleAssignmentDisposable()
subscription.disposable = d
d.disposable = current.subscribe(
observer.on_next,
on_error,
observer.on_completed
)
cancelable = immediate_scheduler.schedule_recursive(action)
def dispose():
is_disposed[0] = True
return CompositeDisposable(subscription, cancelable, Disposable(dispose))
return AnonymousObservable(subscribe)
|
StopIteration
|
dataset/ETHPy150Open ReactiveX/RxPY/rx/linq/observable/catch.py/catch_exception
|
4,927 |
@requires_version('scipy', '0.11')
def test_add_patch_info():
"""Test adding patch info to source space"""
# let's setup a small source space
src = read_source_spaces(fname_small)
src_new = read_source_spaces(fname_small)
for s in src_new:
s['nearest'] = None
s['nearest_dist'] = None
s['pinfo'] = None
# test that no patch info is added for small dist_limit
try:
add_source_space_distances(src_new, dist_limit=0.00001)
except __HOLE__: # what we throw when scipy version is wrong
pass
else:
assert_true(all(s['nearest'] is None for s in src_new))
assert_true(all(s['nearest_dist'] is None for s in src_new))
assert_true(all(s['pinfo'] is None for s in src_new))
# now let's use one that works
add_source_space_distances(src_new)
for s1, s2 in zip(src, src_new):
assert_array_equal(s1['nearest'], s2['nearest'])
assert_allclose(s1['nearest_dist'], s2['nearest_dist'], atol=1e-7)
assert_equal(len(s1['pinfo']), len(s2['pinfo']))
for p1, p2 in zip(s1['pinfo'], s2['pinfo']):
assert_array_equal(p1, p2)
|
RuntimeError
|
dataset/ETHPy150Open mne-tools/mne-python/mne/tests/test_source_space.py/test_add_patch_info
|
4,928 |
@testing.requires_testing_data
@requires_version('scipy', '0.11')
def test_add_source_space_distances_limited():
"""Test adding distances to source space with a dist_limit"""
tempdir = _TempDir()
src = read_source_spaces(fname)
src_new = read_source_spaces(fname)
del src_new[0]['dist']
del src_new[1]['dist']
n_do = 200 # limit this for speed
src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy()
src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy()
out_name = op.join(tempdir, 'temp-src.fif')
try:
add_source_space_distances(src_new, dist_limit=0.007)
except __HOLE__: # what we throw when scipy version is wrong
raise SkipTest('dist_limit requires scipy > 0.13')
write_source_spaces(out_name, src_new)
src_new = read_source_spaces(out_name)
for so, sn in zip(src, src_new):
assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32))
assert_array_equal(sn['dist_limit'], np.array([0.007], np.float32))
do = so['dist']
dn = sn['dist']
# clean out distances > 0.007 in C code
do.data[do.data > 0.007] = 0
do.eliminate_zeros()
# make sure we have some comparable distances
assert_true(np.sum(do.data < 0.007) > 400)
# do comparison over the region computed
d = (do - dn)[:sn['vertno'][n_do - 1]][:, :sn['vertno'][n_do - 1]]
assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-6)
|
RuntimeError
|
dataset/ETHPy150Open mne-tools/mne-python/mne/tests/test_source_space.py/test_add_source_space_distances_limited
|
4,929 |
def reverts(action):
try:
return list(_INVERSES[action])
except __HOLE__:
return []
|
KeyError
|
dataset/ETHPy150Open openstack/anvil/anvil/actions/states.py/reverts
|
4,930 |
def receiver(self):
buf = bytearray()
sock = self._sock
wait_read = gethub().do_read
add_chunk = buf.extend
pos = [0]
def readmore():
while True:
wait_read(sock)
try:
if pos[0]*2 > len(buf):
del buf[:pos[0]]
pos[0] = 0
bytes = sock.recv(self.BUFSIZE)
if not bytes:
raise EOFError()
add_chunk(bytes)
except socket.error as e:
if e.errno in (errno.EAGAIN, errno.EINTR):
continue
else:
raise
else:
break
def readchar():
if len(buf) <= pos[0]:
readmore()
c = buf[pos[0]]
pos[0] += 1
return c
def readline():
if len(buf) < 2 or pos[0] >= len(buf):
readmore()
while True:
try:
idx = buf.index(b'\r\n', pos[0])
except __HOLE__:
pass
else:
break
readmore()
res = buf[pos[0]:idx]
pos[0] = idx + 2
return res
def readslice(ln):
while len(buf) - pos[0] < ln:
readmore()
res = buf[pos[0]:pos[0]+ln]
pos[0] += ln
return res
def readone():
ch = readchar()
if ch == 42: # b'*'
cnt = int(readline())
return [readone() for i in range(cnt)]
elif ch == 43: # b'+'
return readline().decode('ascii')
elif ch == 45: # b'-'
return RedisError(readline().decode('ascii'))
elif ch == 58: # b':'
return int(readline())
elif ch == 36: # b'$'
ln = int(readline())
if ln < 0:
return None
res = readslice(ln)
assert readline() == b''
return res
else:
raise NotImplementedError(ch)
while True:
self.produce(readone())
|
ValueError
|
dataset/ETHPy150Open tailhook/zorro/zorro/redis.py/RedisChannel.receiver
|
4,931 |
def findPanels(self) :
"""
find panels from grabbed websites
the attacker may do a lot of vulnerabilty
tests on the admin area
"""
print "[~] Finding admin panels"
adminList = ['admin/', 'site/admin', 'admin.php/', 'up/admin/', 'central/admin/', 'whm/admin/', 'whmcs/admin/', 'support/admin/', 'upload/admin/', 'video/admin/', 'shop/admin/', 'shoping/admin/', 'wp-admin/', 'wp/wp-admin/', 'blog/wp-admin/', 'admincp/', 'admincp.php/', 'vb/admincp/', 'forum/admincp/', 'up/admincp/', 'administrator/', 'administrator.php/', 'joomla/administrator/', 'jm/administrator/', 'site/administrator/', 'install/', 'vb/install/', 'dimcp/', 'clientes/', 'admin_cp/', 'login/', 'login.php', 'site/login', 'site/login.php', 'up/login/', 'up/login.php', 'cp.php', 'up/cp', 'cp', 'master', 'adm', 'member', 'control', 'webmaster', 'myadmin', 'admin_cp', 'admin_site']
clearScr()
for site in self.sites :
for admin in adminList :
try :
if urllib.urlopen(site + admin).getcode() == 200 :
print " [*] Found admin panel -> ", site + admin
except __HOLE__ :
pass
############################
#find ZIP files
|
IOError
|
dataset/ETHPy150Open x3omdax/PenBox/Versions/V1.3/penbox.py/TNscan.findPanels
|
4,932 |
def findZip(self) :
"""
find zip files from grabbed websites
it may contain useful informations
"""
zipList = ['backup.tar.gz', 'backup/backup.tar.gz', 'backup/backup.zip', 'vb/backup.zip', 'site/backup.zip', 'backup.zip', 'backup.rar', 'backup.sql', 'vb/vb.zip', 'vb.zip', 'vb.sql', 'vb.rar', 'vb1.zip', 'vb2.zip', 'vbb.zip', 'vb3.zip', 'upload.zip', 'up/upload.zip', 'joomla.zip', 'joomla.rar', 'joomla.sql', 'wordpress.zip', 'wp/wordpress.zip', 'blog/wordpress.zip', 'wordpress.rar']
clearScr()
print "[~] Finding zip file"
for site in self.sites :
for zip1 in zipList :
try:
if urllib.urlopen(site + zip1).getcode() == 200 :
print " [*] Found zip file -> ", site + zip1
except __HOLE__ :
pass
############################
#find upload directories
|
IOError
|
dataset/ETHPy150Open x3omdax/PenBox/Versions/V1.3/penbox.py/TNscan.findZip
|
4,933 |
def findUp(self) :
"""
find upload forms from grabbed
websites the attacker may succeed to
upload malicious files like webshells
"""
upList = ['up.php', 'up1.php', 'up/up.php', 'site/up.php', 'vb/up.php', 'forum/up.php','blog/up.php', 'upload.php', 'upload1.php', 'upload2.php', 'vb/upload.php', 'forum/upload.php', 'blog/upload.php', 'site/upload.php', 'download.php']
clearScr()
print "[~] Finding Upload"
for site in self.sites :
for up in upList :
try :
if (urllib.urlopen(site + up).getcode() == 200) :
html = urllib.urlopen(site + up).readlines()
for line in html :
if re.findall('type=file', line) :
print " [*] Found upload -> ", site+up
except __HOLE__ :
pass
############################
#find users
|
IOError
|
dataset/ETHPy150Open x3omdax/PenBox/Versions/V1.3/penbox.py/TNscan.findUp
|
4,934 |
def ParseRate(rate):
"""Parses a rate string in the form number/unit, or the literal 0.
The unit is one of s (seconds), m (minutes), h (hours) or d (days).
Args:
rate: the rate string.
Returns:
a floating point number representing the rate/second.
Raises:
MalformedQueueConfiguration: if the rate is invalid
"""
if rate == "0":
return 0.0
elements = rate.split('/')
if len(elements) != 2:
raise MalformedQueueConfiguration('Rate "%s" is invalid.' % rate)
number, unit = elements
try:
number = float(number)
except __HOLE__:
raise MalformedQueueConfiguration('Rate "%s" is invalid:'
' "%s" is not a number.' %
(rate, number))
if unit not in 'smhd':
raise MalformedQueueConfiguration('Rate "%s" is invalid:'
' "%s" is not one of s, m, h, d.' %
(rate, unit))
if unit == 's':
return number
if unit == 'm':
return number/60
if unit == 'h':
return number/(60 * 60)
if unit == 'd':
return number/(24 * 60 * 60)
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/queueinfo.py/ParseRate
|
4,935 |
def ParseTotalStorageLimit(limit):
"""Parses a string representing the storage bytes limit.
Optional limit suffixes are:
B (bytes), K (kilobytes), M (megabytes), G (gigabytes), T (terabytes)
Args:
limit: The storage bytes limit string.
Returns:
An int representing the storage limit in bytes.
Raises:
MalformedQueueConfiguration: if the limit argument isn't a valid python
double followed by an optional suffix.
"""
limit = limit.strip()
if not limit:
raise MalformedQueueConfiguration('Total Storage Limit must not be empty.')
try:
if limit[-1] in BYTE_SUFFIXES:
number = float(limit[0:-1])
for c in BYTE_SUFFIXES:
if limit[-1] != c:
number = number * 1024
else:
return int(number)
else:
return int(limit)
except __HOLE__:
raise MalformedQueueConfiguration('Total Storage Limit "%s" is invalid.' %
limit)
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/queueinfo.py/ParseTotalStorageLimit
|
4,936 |
def ParseTaskAgeLimit(age_limit):
"""Parses a string representing the task's age limit (maximum allowed age).
The string must be a non-negative integer or floating point number followed by
one of s, m, h, or d (seconds, minutes, hours or days respectively).
Args:
age_limit: The task age limit string.
Returns:
An int representing the age limit in seconds.
Raises:
MalformedQueueConfiguration: if the limit argument isn't a valid python
double followed by a required suffix.
"""
age_limit = age_limit.strip()
if not age_limit:
raise MalformedQueueConfiguration('Task Age Limit must not be empty.')
unit = age_limit[-1]
if unit not in "smhd":
raise MalformedQueueConfiguration('Task Age_Limit must be in s (seconds), '
'm (minutes), h (hours) or d (days)')
try:
number = float(age_limit[0:-1])
if unit == 's':
return int(number)
if unit == 'm':
return int(number * 60)
if unit == 'h':
return int(number * 3600)
if unit == 'd':
return int(number * 86400)
except __HOLE__:
raise MalformedQueueConfiguration('Task Age_Limit "%s" is invalid.' %
age_limit)
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/queueinfo.py/ParseTaskAgeLimit
|
4,937 |
def on_request(self, request):
if not self.ssl:
return request
try:
record, remaining = tls.parse_tls(request)
message = record.messages[0]
if not self.clienthello_adjusted:
self.clienthello_adjusted = True
hello = message.obj
# Force a full handshake (and thus a key exchange) by preventing
# session resumption by clearing session ID and SessionTicket.
hello.session_id = []
for ext in hello.extension_list:
if ext.type == Extension.TYPE.SESSIONTICKET:
ext.raw_data = []
# Retain in ClientHello only cipher suites which require the
# server to send a ServerKeyExchange message: ephemeral (EC)DH
# and RSA_EXPORT cipher suites. Also retain pseudo/signalling
# cipher suites because they don't affect this attack/test.
hello.ciphers = [c for c in hello.ciphers
if ("_DHE_" in str(c) or
"_ECDHE_" in str(c) or
"_RSA_EXPORT_" in str(c) or
str(c).endswith("_SCSV"))]
return record.to_bytes()
if self.signature_tampered:
# The client MUST reply with an alert and close the connection.
# Just closing the connection is also acceptable.
if not self.first_alert_received_after_tampering:
if isinstance(message, Alert):
self.first_alert_received_after_tampering = message
return request
self.vuln_detected = True
self.log(
logging.CRITICAL,
("Client is vulnerable to server key substitution"
" attack! Client reply: %s" % str(message)))
self.connection.vuln_notify(
util.vuln.VULN_TLS_SERVER_KEY_REPLACEMENT)
self.log_attack_event()
self.connection.close()
return request
except __HOLE__:
# Failed to parse TLS, this is probably due to a short read of a TLS
# record.
pass
return request
|
ValueError
|
dataset/ETHPy150Open google/nogotofail/nogotofail/mitm/connection/handlers/connection/serverkeyreplace.py/ServerKeyReplacementMITM.on_request
|
4,938 |
def update_latest_symlink(outdir, latest_symlink):
"""Updates the 'latest' symlink to point to the given outdir."""
if os.path.lexists(latest_symlink):
try:
os.remove(latest_symlink)
except OSError as err:
return err
try:
os.symlink(os.path.basename(auto_outdir), latest_symlink)
except __HOLE__ as err:
return err
return None
|
OSError
|
dataset/ETHPy150Open m-lab/operator/tools/fetch.py/update_latest_symlink
|
4,939 |
def log(self, level, message):
logline = self._logline(message)
try:
self.assem_logger.log(level, logline)
except __HOLE__ as e:
LOG.error(e)
|
IOError
|
dataset/ETHPy150Open openstack/solum/solum/uploaders/tenant_logger.py/TenantLogger.log
|
4,940 |
@defer.inlineCallbacks
def action_get_request(self, data):
from .http import Request
try:
reqid = data['reqid']
req = yield Request.load_request(reqid)
except __HOLE__:
raise PappyException("Request with given ID does not exist")
dat = json.loads(req.to_json())
defer.returnValue(dat)
|
KeyError
|
dataset/ETHPy150Open roglew/pappy-proxy/pappyproxy/comm.py/CommServer.action_get_request
|
4,941 |
@defer.inlineCallbacks
def action_get_response(self, data):
from .http import Request, Response
try:
reqid = data['reqid']
req = yield Request.load_request(reqid)
except __HOLE__:
raise PappyException("Request with given ID does not exist, cannot fetch associated response.")
if req.response:
rsp = yield Response.load_response(req.response.rspid)
dat = json.loads(rsp.to_json())
else:
dat = {}
defer.returnValue(dat)
|
KeyError
|
dataset/ETHPy150Open roglew/pappy-proxy/pappyproxy/comm.py/CommServer.action_get_response
|
4,942 |
def base64_b64decode(instr):
'''
Decode a base64-encoded string using the "modern" Python interface
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' hashutil.base64_b64decode 'Z2V0IHNhbHRlZA=='
'''
if six.PY3:
b = salt.utils.to_bytes(instr)
data = base64.b64decode(b)
try:
return salt.utils.to_str(data)
except __HOLE__:
return data
return base64.b64decode(instr)
|
UnicodeDecodeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/hashutil.py/base64_b64decode
|
4,943 |
def base64_decodestring(instr):
'''
Decode a base64-encoded string using the "legacy" Python interface
.. versionadded:: 2014.7.0
CLI Example:
.. code-block:: bash
salt '*' hashutil.base64_decodestring instr='Z2V0IHNhbHRlZAo='
'''
if six.PY3:
b = salt.utils.to_bytes(instr)
data = base64.decodebytes(b)
try:
return salt.utils.to_str(data)
except __HOLE__:
return data
return base64.decodestring(instr)
|
UnicodeDecodeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/hashutil.py/base64_decodestring
|
4,944 |
def test_push_pop(self):
# 1) Push 256 random numbers and pop them off, verifying all's OK.
heap = []
data = []
self.check_invariant(heap)
for i in range(256):
item = random.random()
data.append(item)
self.module.heappush(heap, item)
self.check_invariant(heap)
results = []
while heap:
item = self.module.heappop(heap)
self.check_invariant(heap)
results.append(item)
data_sorted = data[:]
data_sorted.sort()
self.assertEqual(data_sorted, results)
# 2) Check that the invariant holds for a sorted array
self.check_invariant(results)
self.assertRaises(TypeError, self.module.heappush, [])
try:
self.assertRaises(TypeError, self.module.heappush, None, None)
self.assertRaises(TypeError, self.module.heappop, None)
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_heapq.py/TestHeap.test_push_pop
|
4,945 |
def heapiter(self, heap):
# An iterator returning a heap's elements, smallest-first.
try:
while 1:
yield self.module.heappop(heap)
except __HOLE__:
pass
|
IndexError
|
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_heapq.py/TestHeap.heapiter
|
4,946 |
def open(self, url):
try:
content, headers_dict = self.url_mappings[url]
except __HOLE__:
raise IOError # simulate a connection error
if callable(content):
content = content()
if not hasattr(content, 'read'):
f = StringIO(content)
headers = self.headers
if not headers_dict and headers_dict != {}:
headers_dict = {}
headers.update(headers_dict)
return MockResponseWrapper(url, f, headers)
# Mock sources
|
KeyError
|
dataset/ETHPy150Open ofri/Open-Knesset/mks/mock.py/MockReader.open
|
4,947 |
def main(restarted=False):
es = ExploitSearch()
if restarted:
es.intro = '\n'
try:
es.cmdloop()
except __HOLE__:
main(True)
|
KeyboardInterrupt
|
dataset/ETHPy150Open Gioyik/getExploit/getExploit.py/main
|
4,948 |
def __iter__(self):
it = iter(self.table)
hdr = next(it)
shdr = sorted(hdr)
indices = asindices(hdr, shdr)
transform = rowgetter(*indices)
# yield the transformed header
yield tuple(shdr)
# construct the transformed data
missing = self.missing
for row in it:
try:
yield transform(row)
except __HOLE__:
# row is short, let's be kind and fill in any missing fields
yield tuple(row[i] if i < len(row) else missing
for i in indices)
|
IndexError
|
dataset/ETHPy150Open alimanfoo/petl/petl/transform/headers.py/SortHeaderView.__iter__
|
4,949 |
def parse_json_file(filename):
"""open file and parse its content as json"""
with open(filename, 'r') as jsonfile:
content = jsonfile.read()
try:
result = json.loads(content)
except __HOLE__:
_logger.error(
"Parsing file %s failed. Check syntax with a JSON validator:"
"\nhttp://jsonlint.com/?json=%s", filename, quote(content))
sys.exit(1)
return result
|
ValueError
|
dataset/ETHPy150Open MA3STR0/kimsufi-crawler/crawler.py/parse_json_file
|
4,950 |
@coroutine
def run(self):
"""Run a crawler iteration"""
http_client = AsyncHTTPClient()
# request OVH availability API asynchronously
try:
response = yield http_client.fetch(self.API_URL, request_timeout=REQUEST_TIMEOUT)
except __HOLE__ as ex:
# Internal Server Error
self.HTTP_ERRORS.append(ex)
if len(self.HTTP_ERRORS) > 3:
_logger.error("Too many HTTP Errors: %s", self.HTTP_ERRORS)
return
except Exception as gex:
# Also catch other errors.
_logger.error("Socket Error: %s", str(gex))
return
if self.HTTP_ERRORS:
del self.HTTP_ERRORS[:]
response_json = json.loads(response.body.decode('utf-8'))
if not response_json or not response_json['answer']:
return
availability = response_json['answer']['availability']
for item in availability:
# get server type of availability item
server_type = self.SERVER_TYPES.get(item['reference'])
# return if this server type is not in mapping
if not server_type:
continue
# make a flat list of zones where servers of this type are available
available_zones = set([
e['zone'] for e in item['zones']
if e['availability'] not in ['unavailable', 'unknown']])
_logger.debug('%s is available in %s', server_type, available_zones)
# iterate over all regions and update availability states
for region, places in self.REGIONS.items():
server_available = bool(available_zones.intersection(places))
state_id = '%s_available_in_%s' % (server_type, region)
message = {
'title': "{0} is available".format(server_type),
'text': "Server {server} is available in {region}".format(
server=server_type, region=region.capitalize()),
'url': "http://www.kimsufi.com/en/index.xml"
}
if 'sys' in item['reference'] or 'bk' in item['reference']:
message['url'] = 'http://www.soyoustart.com/de/essential-server/'
self.update_state(state_id, server_available, message)
|
HTTPError
|
dataset/ETHPy150Open MA3STR0/kimsufi-crawler/crawler.py/Crawler.run
|
4,951 |
def get_git_shas_for_service(service, deploy_groups, soa_dir):
"""Returns a list of 2-tuples of the form (sha, timestamp) for each deploy tag in a service's git
repository"""
if service is None:
return []
git_url = get_git_url(service=service, soa_dir=soa_dir)
all_deploy_groups = {config.get_deploy_group() for config in get_instance_config_for_service(
service=service,
soa_dir=soa_dir,
)}
deploy_groups, _ = validate_given_deploy_groups(all_deploy_groups, deploy_groups)
previously_deployed_shas = {}
for ref, sha in list_remote_refs(git_url).items():
regex_match = extract_tags(ref)
try:
deploy_group = regex_match['deploy_group']
tstamp = regex_match['tstamp']
except __HOLE__:
pass
else:
# note that all strings are greater than ''
if deploy_group in deploy_groups and tstamp > previously_deployed_shas.get(sha, ''):
previously_deployed_shas[sha] = tstamp
return previously_deployed_shas.items()
|
KeyError
|
dataset/ETHPy150Open Yelp/paasta/paasta_tools/cli/cmds/rollback.py/get_git_shas_for_service
|
4,952 |
def test_impersonation():
from hbased import Hbase as thrift_hbase
c = make_logged_in_client(username='test_hbase', is_superuser=False)
grant_access('test_hbase', 'test_hbase', 'hbase')
user = User.objects.get(username='test_hbase')
proto = MockProtocol()
client = thrift_hbase.Client(proto)
impersonation_enabled = is_impersonation_enabled()
get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = 'FALSE'
try:
client.getTableNames(doas=user.username)
except __HOLE__:
pass # We don't mock everything
finally:
get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = impersonation_enabled
assert_equal({}, proto.get_headers())
get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = 'TRUE'
try:
client.getTableNames(doas=user.username)
except AttributeError:
pass # We don't mock everything
finally:
get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = impersonation_enabled
assert_equal({'doAs': u'test_hbase'}, proto.get_headers())
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/apps/hbase/src/hbase/tests.py/test_impersonation
|
4,953 |
def log_line_parser(self, raw_log):
'''given a raw access log line, return a dict of the good parts'''
d = {}
try:
log_source = None
split_log = raw_log[16:].split(' ')
(unused,
server,
client_ip,
lb_ip,
timestamp,
method,
request,
http_version,
code,
referrer,
user_agent,
auth_token,
bytes_in,
bytes_out,
etag,
trans_id,
headers,
processing_time) = (unquote(x) for x in split_log[:18])
if len(split_log) > 18:
log_source = split_log[18]
except __HOLE__:
self.logger.debug(_('Bad line data: %s') % repr(raw_log))
return {}
if server != self.server_name:
# incorrect server name in log line
self.logger.debug(_('Bad server name: found "%(found)s" ' \
'expected "%(expected)s"') %
{'found': server, 'expected': self.server_name})
return {}
try:
parsed_url = urlparse(request)
request = parsed_url.path
query = parsed_url.query
(version, account, container_name, object_name) = \
split_path(request, 2, 4, True)
except ValueError, e:
self.logger.debug(_('Invalid path: %(error)s from data: %(log)s') %
{'error': e, 'log': repr(raw_log)})
return {}
if version != 'v1':
# "In the wild" places this can be caught are with auth systems
# that use the same endpoint as the rest of the Swift API (eg
# tempauth or swauth). But if the Swift API ever does change, this
# protects that too.
self.logger.debug(_('Unexpected Swift version string: found ' \
'"%s" expected "v1"') % version)
return {}
if query != "":
args = query.split('&')
d['query'] = query
# Count each query argument. This is used later to aggregate
# the number of format, prefix, etc. queries.
for q in args:
if '=' in q:
k, v = q.split('=', 1)
else:
k = q
# Certain keys will get summmed in stats reporting
# (format, path, delimiter, etc.). Save a "1" here
# to indicate that this request is 1 request for
# its respective key.
if k in LISTING_PARAMS:
d[k] = 1
d['client_ip'] = client_ip
d['lb_ip'] = lb_ip
d['method'] = method
d['request'] = request
d['http_version'] = http_version
d['code'] = code
d['referrer'] = referrer
d['user_agent'] = user_agent
d['auth_token'] = auth_token
d['bytes_in'] = bytes_in
d['bytes_out'] = bytes_out
d['etag'] = etag
d['trans_id'] = trans_id
d['processing_time'] = processing_time
day, month, year, hour, minute, second = timestamp.split('/')
d['day'] = day
month = ('%02s' % month_map.index(month)).replace(' ', '0')
d['month'] = month
d['year'] = year
d['hour'] = hour
d['minute'] = minute
d['second'] = second
d['tz'] = '+0000'
d['account'] = account
d['container_name'] = container_name
d['object_name'] = object_name
d['bytes_out'] = int(d['bytes_out'].replace('-', '0'))
d['bytes_in'] = int(d['bytes_in'].replace('-', '0'))
d['code'] = int(d['code'])
d['log_source'] = log_source
return d
|
ValueError
|
dataset/ETHPy150Open notmyname/slogging/slogging/access_processor.py/AccessLogProcessor.log_line_parser
|
4,954 |
def __getattr__(self, name):
try:
return self.instance.metadata.get(metadata_type__name=name).value
except __HOLE__:
raise AttributeError(
_('\'metadata\' object has no attribute \'%s\'') % name
)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/metadata/classes.py/DocumentMetadataHelper.__getattr__
|
4,955 |
def exception_str(value):
'''
Formats Exception object to a string. Unlike default str():
- can handle unicode strings in exception arguments
- tries to format arguments as str(), not as repr()
'''
try:
return ', '.join([smart_str(b) for b in value])
except (TypeError, __HOLE__): # happens for non-iterable values
try:
return smart_str(value)
except UnicodeEncodeError:
try:
return repr(value)
except Exception:
return '<Unprintable value>'
|
AttributeError
|
dataset/ETHPy150Open isagalaev/django_errorlog/django_errorlog/models.py/exception_str
|
4,956 |
def put_stream(self, bucket, label, stream_object, params={}):
## QUESTION: do we enforce that the bucket's have to be 'claimed' first?
## NB this method doesn't care if it has been
po, json_payload = self._get_object(bucket)
if label in json_payload.keys():
creation_date = None
else:
# New upload - record creation date
creation_date = datetime.now().isoformat().split(".")[0] ## '2010-07-08T19:56:47'
if params.has_key('_label'):
json_payload[label] = {"_label":params['_label']}
else:
json_payload[label] = {"_label":label}
hash_vals = po.add_bytestream_by_path(label, stream_object)
stat_vals = po.stat(label)
# Userland parameters for the file
cleaned_params = dict( [ (k, params[k]) for k in params if not k.startswith("_")])
json_payload[label].update(cleaned_params)
try:
json_payload[label]['_content_length'] = int(stat_vals.st_size)
except __HOLE__:
print "Error getting filesize from os.stat().st_size into an integer..."
if creation_date:
json_payload[label]['_creation_date'] = creation_date
json_payload[label]['_last_modified'] = creation_date
else:
# Modification date
json_payload[label]['_last_modified'] = datetime.now().isoformat().split(".")[0]
# Hash details:
if hash_vals:
json_payload[label]['_checksum'] = "%s:%s" % (hash_vals['type'], hash_vals['checksum'])
json_payload.sync()
return json_payload.state[label]
|
TypeError
|
dataset/ETHPy150Open okfn/ofs/ofs/local/pairtreestore.py/PTOFS.put_stream
|
4,957 |
@staticmethod
def GetHGRevision():
"""Attempts to retrieve the current mercurial revision number from the local
filesystem.
"""
filename = os.path.join(os.path.dirname(__file__), '../../hg_revision.txt')
try:
with open(filename) as f:
return f.read().strip()
except __HOLE__:
return None
|
IOError
|
dataset/ETHPy150Open viewfinderco/viewfinder/backend/base/environ.py/ServerEnvironment.GetHGRevision
|
4,958 |
def _makeDgModGhostObject(mayaType, dagMod, dgMod):
# we create a dummy object of this type in a dgModifier (or dagModifier)
# as the dgModifier.doIt() method is never called, the object
# is never actually created in the scene
# Note: at one point, if we didn't call the dgMod/dagMod.deleteNode method,
# and we call this function while loading a scene (for instance, if the scene requires
# a plugin that isn't loaded, and defines custom node types), then the nodes were still
# somehow created, despite never explicitly calling doIt()...
# ... however, this seems to no longer be the case, and the deleteNode calls are apparently
# harmful
if type(dagMod) is not api.MDagModifier or type(dgMod) is not api.MDGModifier :
raise ValueError, "Need a valid MDagModifier and MDGModifier or cannot return a valid MObject"
# Regardless of whether we're making a DG or DAG node, make a parent first -
# for some reason, this ensures good cleanup (don't ask me why...??)
parent = dagMod.createNode ( 'transform', api.MObject())
try :
# DependNode
obj = dgMod.createNode ( mayaType )
except __HOLE__:
# DagNode
try:
obj = dagMod.createNode ( mayaType, parent )
except Exception, err:
_logger.debug("Error trying to create ghost node for '%s': %s" % (mayaType, err))
return None
if api.isValidMObject(obj) :
return obj
else :
_logger.debug("Error trying to create ghost node for '%s'" % mayaType)
return None
|
RuntimeError
|
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/internal/apicache.py/_makeDgModGhostObject
|
4,959 |
def _buildApiClassInfo(self):
_logger.debug("Starting ApiCache._buildApiClassInfo...")
from pymel.internal.parsers import ApiDocParser
self.apiClassInfo = {}
parser = ApiDocParser(api, enumClass=ApiEnum)
for name, obj in inspect.getmembers( api, lambda x: type(x) == type and x.__name__.startswith('M') ):
if not name.startswith( 'MPx' ):
try:
info = parser.parse(name)
self.apiClassInfo[ name ] = info
except (IOError, __HOLE__,IndexError), e:
_logger.warn( "failed to parse docs for %r:\n%s" % (name, e) )
_logger.debug("...finished ApiCache._buildApiClassInfo")
|
ValueError
|
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/internal/apicache.py/ApiCache._buildApiClassInfo
|
4,960 |
def static_serve(request, path, client):
"""
Given a request for a media asset, this view does the necessary wrangling
to get the correct thing delivered to the user. This can also emulate the
combo behavior seen when SERVE_REMOTE == False and EMULATE_COMBO == True.
"""
if msettings['SERVE_REMOTE']:
# We're serving from S3, redirect there.
url = client.remote_media_url().strip('/') + '/%(path)s'
return redirect(url, permanent=True)
if not msettings['SERVE_REMOTE'] and msettings['EMULATE_COMBO']:
# Combo emulation is on and we're serving media locally. Try to see if
# the given path matches a combo file defined in the JOINED dict in
# the MEDIASYNC settings dict.
combo_match = _find_combo_match(path)
if combo_match:
# We found a combo file match. Combine it and serve the result.
return combo_serve(request, combo_match, client)
# No combo file, but we're serving locally. Use the standard (inefficient)
# Django static serve view.
resp = serve(request, path, document_root=client.media_root, show_indexes=True)
try:
resp.content = client.process(resp.content, resp['Content-Type'], path)
except __HOLE__:
# HTTPNotModifiedResponse lacks the "Content-Type" key.
pass
return resp
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/django-mediasync/mediasync/views.py/static_serve
|
4,961 |
def rebuild_servers():
"""
The function the rebuilds the set of servers
"""
try:
global Servers
servers = btcnet_info.get_pools().copy()
for filter_f in filters:
servers = filter_f(servers)
Servers = list(servers)
except __HOLE__ as Error:
logging.warn(Error)
except Exception as Error:
logging.error(traceback.format_exc())
|
ValueError
|
dataset/ETHPy150Open c00w/bitHopper/bitHopper/Logic/ServerLogic.py/rebuild_servers
|
4,962 |
def create_container(image,
command=None,
hostname=None,
user=None,
detach=True,
stdin_open=False,
tty=False,
mem_limit=None,
ports=None,
environment=None,
dns=None,
volumes=None,
volumes_from=None,
name=None,
cpu_shares=None,
cpuset=None,
binds=None):
'''
Create a new container
image
image to create the container from
command
command to execute while starting
hostname
hostname of the container
user
user to run docker as
detach
daemon mode, Default is ``True``
environment
environment variable mapping ``({'foo':'BAR'})``
ports
port redirections ``({'222': {}})``
volumes
list of volume mappings in either local volume, bound volume, or read-only
bound volume form::
(['/var/lib/mysql/', '/usr/local/etc/ssl:/etc/ssl', '/etc/passwd:/etc/passwd:ro'])
binds
complete dictionary of bound volume mappings::
{ '/usr/local/etc/ssl/certs/internal.crt': {
'bind': '/etc/ssl/certs/com.example.internal.crt',
'ro': True
},
'/var/lib/mysql': {
'bind': '/var/lib/mysql/',
'ro': False
}
}
This dictionary is suitable for feeding directly into the Docker API, and all
keys are required.
(see http://docker-py.readthedocs.org/en/latest/volumes/)
tty
attach ttys, Default is ``False``
stdin_open
let stdin open, Default is ``False``
name
name given to container
cpu_shares
CPU shares (relative weight)
cpuset
CPUs in which to allow execution ('0-3' or '0,1')
CLI Example:
.. code-block:: bash
salt '*' docker.create_container o/ubuntu volumes="['/s','/m:/f']"
'''
log.trace("modules.dockerio.create_container() called for image " + image)
status = base_status.copy()
client = _get_client()
# In order to permit specification of bind volumes in the volumes field,
# we'll look through it for bind-style specs and move them. This is purely
# for CLI convenience and backwards-compatibility, as states.dockerio
# should parse volumes before this, and the binds argument duplicates this.
# N.B. this duplicates code in states.dockerio._parse_volumes()
if isinstance(volumes, list):
for volume in volumes:
if ':' in volume:
volspec = volume.split(':')
source = volspec[0]
target = volspec[1]
ro = False
try:
if len(volspec) > 2:
ro = volspec[2] == "ro"
except __HOLE__:
pass
binds[source] = {'bind': target, 'ro': ro}
volumes.remove(volume)
try:
if salt.utils.version_cmp(client.version()['ApiVersion'], '1.18') == 1:
container_info = client.create_container(
image=image,
command=command,
hostname=hostname,
user=user,
detach=detach,
stdin_open=stdin_open,
tty=tty,
ports=ports,
environment=environment,
dns=dns,
volumes=volumes,
volumes_from=volumes_from,
name=name,
cpu_shares=cpu_shares,
cpuset=cpuset,
host_config=docker.utils.create_host_config(binds=binds,
mem_limit=mem_limit)
)
else:
container_info = client.create_container(
image=image,
command=command,
hostname=hostname,
user=user,
detach=detach,
stdin_open=stdin_open,
tty=tty,
mem_limit=mem_limit,
ports=ports,
environment=environment,
dns=dns,
volumes=volumes,
volumes_from=volumes_from,
name=name,
cpu_shares=cpu_shares,
cpuset=cpuset,
host_config=docker.utils.create_host_config(binds=binds)
)
log.trace("docker.client.create_container returned: " + str(container_info))
container = container_info['Id']
callback = _valid
comment = 'Container created'
out = {
'info': _get_container_infos(container),
'out': container_info
}
__salt__['mine.send']('dockerng.ps', verbose=True, all=True, host=True)
return callback(status, id_=container, comment=comment, out=out)
except Exception as e:
_invalid(status, id_=image, out=traceback.format_exc())
raise e
__salt__['mine.send']('dockerng.ps', verbose=True, all=True, host=True)
return status
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/dockerio.py/create_container
|
4,963 |
def start(container,
binds=None,
port_bindings=None,
lxc_conf=None,
publish_all_ports=None,
links=None,
privileged=False,
dns=None,
volumes_from=None,
network_mode=None,
restart_policy=None,
cap_add=None,
cap_drop=None):
'''
Start the specified container
container
container id
CLI Example:
.. code-block:: bash
salt '*' docker.start <container id>
'''
if binds:
if not isinstance(binds, dict):
raise SaltInvocationError('binds must be formatted as a dictionary')
client = _get_client()
status = base_status.copy()
try:
dcontainer = _get_container_infos(container)['Id']
if not is_running(container):
bindings = None
if port_bindings is not None:
try:
bindings = {}
for key, val in six.iteritems(port_bindings):
bindings[key] = (val.get('HostIp', ''), val['HostPort'])
except __HOLE__:
raise SaltInvocationError(
'port_bindings must be formatted as a dictionary of '
'dictionaries'
)
client.start(dcontainer,
binds=binds,
port_bindings=bindings,
lxc_conf=lxc_conf,
publish_all_ports=publish_all_ports,
links=links,
privileged=privileged,
dns=dns,
volumes_from=volumes_from,
network_mode=network_mode,
restart_policy=restart_policy,
cap_add=cap_add,
cap_drop=cap_drop)
if is_running(dcontainer):
_valid(status,
comment='Container {0} was started'.format(container),
id_=container)
else:
_invalid(status)
else:
_valid(status,
comment='Container {0} was already started'.format(container),
id_=container)
except Exception:
_invalid(status,
id_=container,
out=traceback.format_exc(),
comment=(
'An exception occurred while starting '
'your container {0}').format(container))
__salt__['mine.send']('dockerng.ps', verbose=True, all=True, host=True)
return status
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/dockerio.py/start
|
4,964 |
def get_images(name=None, quiet=False, all=True):
'''
List docker images
name
repository name
quiet
only show image id, Default is ``False``
all
show all images, Default is ``True``
CLI Example:
.. code-block:: bash
salt '*' docker.get_images <name> [quiet=True|False] [all=True|False]
'''
client = _get_client()
status = base_status.copy()
try:
infos = client.images(name=name, quiet=quiet, all=all)
for i in range(len(infos)):
inf = infos[i]
try:
inf['Human_Size'] = _sizeof_fmt(int(inf['Size']))
except ValueError:
pass
try:
ts = int(inf['Created'])
dts = datetime.datetime.fromtimestamp(ts)
inf['Human_IsoCreated'] = dts.isoformat()
inf['Human_Created'] = dts.strftime(
'%Y-%m-%d %H:%M:%S')
except Exception:
pass
try:
inf['Human_VirtualSize'] = (
_sizeof_fmt(int(inf['VirtualSize'])))
except __HOLE__:
pass
_valid(status, out=infos)
except Exception:
_invalid(status, out=traceback.format_exc())
return status
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/dockerio.py/get_images
|
4,965 |
def find_resource(manager, name_or_id):
"""Helper for the _find_* methods."""
# first try to get entity as integer id
try:
if isinstance(name_or_id, int) or name_or_id.isdigit():
return manager.get(int(name_or_id))
except exceptions.NotFound:
pass
# now try to get entity as uuid
try:
uuid.UUID(str(name_or_id))
return manager.get(name_or_id)
except (__HOLE__, exceptions.NotFound):
pass
try:
try:
return manager.find(human_id=name_or_id)
except exceptions.NotFound:
pass
# finally try to find entity by name
try:
resource = getattr(manager, 'resource_class', None)
name_attr = resource.NAME_ATTR if resource else 'name'
kwargs = {name_attr: name_or_id}
return manager.find(**kwargs)
except exceptions.NotFound:
msg = "No %s with a name or ID of '%s' exists." % \
(manager.resource_class.__name__.lower(), name_or_id)
raise exceptions.CommandError(msg)
except exceptions.NoUniqueMatch:
msg = ("Multiple %s matches found for '%s', use an ID to be more"
" specific." % (manager.resource_class.__name__.lower(),
name_or_id))
raise exceptions.CommandError(msg)
|
ValueError
|
dataset/ETHPy150Open kwminnick/rackspace-dns-cli/dnsclient/utils.py/find_resource
|
4,966 |
def command(self, cmd):
""" Run gphoto2 command """
# Test to see if there is a running command already
if self._proc and self._proc.poll():
raise error.InvalidCommand("Command already running")
else:
# Build the command.
run_cmd = [self._gphoto2, '--port', self.port]
run_cmd.extend(listify(cmd))
self.logger.debug("gphoto2 command: {}".format(run_cmd))
try:
self._proc = subprocess.Popen(
run_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
except __HOLE__ as e:
raise error.InvalidCommand("Can't send command to gphoto2. {} \t {}".format(e, run_cmd))
except ValueError as e:
raise error.InvalidCommand("Bad parameters to gphoto2. {} \t {}".format(e, run_cmd))
except Exception as e:
raise error.PanError(e)
|
OSError
|
dataset/ETHPy150Open panoptes/POCS/panoptes/camera/camera.py/AbstractGPhotoCamera.command
|
4,967 |
def GetDefaultAPIProxy():
try:
runtime = __import__('google.appengine.runtime', globals(), locals(),
['apiproxy'])
return APIProxyStubMap(runtime.apiproxy)
except (__HOLE__, ImportError):
return APIProxyStubMap()
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/apiproxy_stub_map.py/GetDefaultAPIProxy
|
4,968 |
def get_saved_policy(table='filter', chain=None, conf_file=None, family='ipv4'):
'''
Return the current policy for the specified table/chain
CLI Examples:
.. code-block:: bash
salt '*' iptables.get_saved_policy filter INPUT
salt '*' iptables.get_saved_policy filter INPUT \\
conf_file=/etc/iptables.saved
IPv6:
salt '*' iptables.get_saved_policy filter INPUT family=ipv6
salt '*' iptables.get_saved_policy filter INPUT \\
conf_file=/etc/iptables.saved family=ipv6
'''
if not chain:
return 'Error: Chain needs to be specified'
rules = _parse_conf(conf_file, family=family)
try:
return rules[table][chain]['policy']
except __HOLE__:
return None
|
KeyError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/iptables.py/get_saved_policy
|
4,969 |
def get_policy(table='filter', chain=None, family='ipv4'):
'''
Return the current policy for the specified table/chain
CLI Example:
.. code-block:: bash
salt '*' iptables.get_policy filter INPUT
IPv6:
salt '*' iptables.get_policy filter INPUT family=ipv6
'''
if not chain:
return 'Error: Chain needs to be specified'
rules = _parse_conf(in_mem=True, family=family)
try:
return rules[table][chain]['policy']
except __HOLE__:
return None
|
KeyError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/iptables.py/get_policy
|
4,970 |
def messagize(self, pkt, ha):
"""
Returns duple of (message, remote) converted from packet pkt and ha
Override in subclass
"""
msg = pkt.packed.decode("ascii")
try:
remote = self.haRemotes[ha]
except __HOLE__ as ex:
console.concise(("{0}: Dropping packet received from unknown remote "
"ha '{1}'.\n{2}\n".format(self.name, ha, pkt.packed)))
return (msg, remote)
|
KeyError
|
dataset/ETHPy150Open ioflo/ioflo/ioflo/aio/proto/stacking.py/UdpStack.messagize
|
4,971 |
def _load_config():
try:
with open('config.json') as config_file:
return json.load(config_file)
except __HOLE__:
print('Please check your config.json file!')
return {}
|
IOError
|
dataset/ETHPy150Open Alephbet/gimel/config.py/_load_config
|
4,972 |
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
p1, p2 = urlparse(url1), urlparse(url2)
try:
o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme])
o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme])
return o1 == o2
except (ValueError, __HOLE__):
return False
|
KeyError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/same_origin
|
4,973 |
def is_authenticated(self, request, **kwargs):
"""
Checks a user's basic auth credentials against the current
Django auth backend.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
try:
username, password = self.extract_credentials(request)
except __HOLE__:
return self._unauthorized()
if not username or not password:
return self._unauthorized()
if self.backend:
user = self.backend.authenticate(
username=username,
password=password
)
else:
user = authenticate(username=username, password=password)
if user is None:
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return True
|
ValueError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/BasicAuthentication.is_authenticated
|
4,974 |
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's basic auth username.
"""
try:
username = self.extract_credentials(request)[0]
except __HOLE__:
username = ''
return username or 'nouser'
|
ValueError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/BasicAuthentication.get_identifier
|
4,975 |
def extract_credentials(self, request):
try:
data = self.get_authorization_data(request)
except __HOLE__:
username = request.GET.get('username') or request.POST.get('username')
api_key = request.GET.get('api_key') or request.POST.get('api_key')
else:
username, api_key = data.split(':', 1)
return username, api_key
|
ValueError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/ApiKeyAuthentication.extract_credentials
|
4,976 |
def is_authenticated(self, request, **kwargs):
"""
Finds the user and checks their API key.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
try:
username, api_key = self.extract_credentials(request)
except __HOLE__:
return self._unauthorized()
if not username or not api_key:
return self._unauthorized()
username_field = get_username_field()
User = get_user_model()
lookup_kwargs = {username_field: username}
try:
user = User.objects.select_related('api_key').get(**lookup_kwargs)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return self._unauthorized()
if not self.check_active(user):
return False
key_auth_check = self.get_key(user, api_key)
if key_auth_check and not isinstance(key_auth_check, HttpUnauthorized):
request.user = user
return key_auth_check
|
ValueError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/ApiKeyAuthentication.is_authenticated
|
4,977 |
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
try:
username = self.extract_credentials(request)[0]
except __HOLE__:
username = ''
return username or 'nouser'
|
ValueError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/ApiKeyAuthentication.get_identifier
|
4,978 |
def is_authenticated(self, request, **kwargs):
"""
Finds the user and checks their API key.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
try:
self.get_authorization_data(request)
except __HOLE__:
return self._unauthorized()
digest_response = python_digest.parse_digest_credentials(request.META['HTTP_AUTHORIZATION'])
# FIXME: Should the nonce be per-user?
if not python_digest.validate_nonce(digest_response.nonce, settings.SECRET_KEY):
return self._unauthorized()
user = self.get_user(digest_response.username)
api_key = self.get_key(user)
if user is False or api_key is False:
return self._unauthorized()
expected = python_digest.calculate_request_digest(
request.method,
python_digest.calculate_partial_digest(digest_response.username, self.realm, api_key),
digest_response)
if not digest_response.response == expected:
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return True
|
ValueError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/DigestAuthentication.is_authenticated
|
4,979 |
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns a combination of IP address and hostname.
"""
try:
return request._authentication_backend.get_identifier(request)
except __HOLE__:
return 'nouser'
|
AttributeError
|
dataset/ETHPy150Open django-tastypie/django-tastypie/tastypie/authentication.py/MultiAuthentication.get_identifier
|
4,980 |
def _DetermineFormat(self):
"""Determines whether the feed is in a form that we understand, and
if so, returns True."""
if self._zip:
# If zip was passed to __init__ then path isn't used
assert not self._path
return True
if not isinstance(self._path, basestring) and hasattr(self._path, 'read'):
# A file-like object, used for testing with a StringIO file
self._zip = zipfile.ZipFile(self._path, mode='r')
return True
if not os.path.exists(self._path):
self._problems.FeedNotFound(self._path)
return False
if self._path.endswith('.zip'):
try:
self._zip = zipfile.ZipFile(self._path, mode='r')
except __HOLE__: # self._path is a directory
pass
except zipfile.BadZipfile:
self._problems.UnknownFormat(self._path)
return False
if not self._zip and not os.path.isdir(self._path):
self._problems.UnknownFormat(self._path)
return False
return True
|
IOError
|
dataset/ETHPy150Open google/transitfeed/transitfeed/loader.py/Loader._DetermineFormat
|
4,981 |
def _ReadCsvDict(self, file_name, cols, required, deprecated):
"""Reads lines from file_name, yielding a dict of unicode values."""
assert file_name.endswith(".txt")
table_name = file_name[0:-4]
contents = self._GetUtf8Contents(file_name)
if not contents:
return
eol_checker = util.EndOfLineChecker(StringIO.StringIO(contents),
file_name, self._problems)
# The csv module doesn't provide a way to skip trailing space, but when I
# checked 15/675 feeds had trailing space in a header row and 120 had spaces
# after fields. Space after header fields can cause a serious parsing
# problem, so warn. Space after body fields can cause a problem time,
# integer and id fields; they will be validated at higher levels.
reader = csv.reader(eol_checker, skipinitialspace=True)
raw_header = reader.next()
header_occurrences = util.defaultdict(lambda: 0)
header = []
valid_columns = [] # Index into raw_header and raw_row
for i, h in enumerate(raw_header):
h_stripped = h.strip()
if not h_stripped:
self._problems.CsvSyntax(
description="The header row should not contain any blank values. "
"The corresponding column will be skipped for the "
"entire file.",
context=(file_name, 1, [''] * len(raw_header), raw_header),
type=problems.TYPE_ERROR)
continue
elif h != h_stripped:
self._problems.CsvSyntax(
description="The header row should not contain any "
"space characters.",
context=(file_name, 1, [''] * len(raw_header), raw_header),
type=problems.TYPE_WARNING)
header.append(h_stripped)
valid_columns.append(i)
header_occurrences[h_stripped] += 1
for name, count in header_occurrences.items():
if count > 1:
self._problems.DuplicateColumn(
header=name,
file_name=file_name,
count=count)
self._schedule._table_columns[table_name] = header
# check for unrecognized columns, which are often misspellings
header_context = (file_name, 1, [''] * len(header), header)
valid_cols = cols + [deprecated_name for (deprecated_name, _) in deprecated]
unknown_cols = set(header) - set(valid_cols)
if len(unknown_cols) == len(header):
self._problems.CsvSyntax(
description="The header row did not contain any known column "
"names. The file is most likely missing the header row "
"or not in the expected CSV format.",
context=(file_name, 1, [''] * len(raw_header), raw_header),
type=problems.TYPE_ERROR)
else:
for col in unknown_cols:
# this is provided in order to create a nice colored list of
# columns in the validator output
self._problems.UnrecognizedColumn(file_name, col, header_context)
# check for missing required columns
missing_cols = set(required) - set(header)
for col in missing_cols:
# this is provided in order to create a nice colored list of
# columns in the validator output
self._problems.MissingColumn(file_name, col, header_context)
# check for deprecated columns
for (deprecated_name, new_name) in deprecated:
if deprecated_name in header:
self._problems.DeprecatedColumn(file_name, deprecated_name, new_name,
header_context)
line_num = 1 # First line read by reader.next() above
for raw_row in reader:
line_num += 1
if len(raw_row) == 0: # skip extra empty lines in file
continue
if len(raw_row) > len(raw_header):
self._problems.OtherProblem('Found too many cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(line_num, file_name),
(file_name, line_num),
type=problems.TYPE_WARNING)
if len(raw_row) < len(raw_header):
self._problems.OtherProblem('Found missing cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(line_num, file_name),
(file_name, line_num),
type=problems.TYPE_WARNING)
# raw_row is a list of raw bytes which should be valid utf-8. Convert each
# valid_columns of raw_row into Unicode.
valid_values = []
unicode_error_columns = [] # index of valid_values elements with an error
for i in valid_columns:
try:
valid_values.append(raw_row[i].decode('utf-8'))
except UnicodeDecodeError:
# Replace all invalid characters with REPLACEMENT CHARACTER (U+FFFD)
valid_values.append(codecs.getdecoder("utf8")
(raw_row[i], errors="replace")[0])
unicode_error_columns.append(len(valid_values) - 1)
except __HOLE__:
break
# The error report may contain a dump of all values in valid_values so
# problems can not be reported until after converting all of raw_row to
# Unicode.
for i in unicode_error_columns:
self._problems.InvalidValue(header[i], valid_values[i],
'Unicode error',
(file_name, line_num,
valid_values, header))
# We strip ALL whitespace from around values. This matches the behavior
# of both the Google and OneBusAway GTFS parser.
valid_values = [value.strip() for value in valid_values]
d = dict(zip(header, valid_values))
yield (d, line_num, header, valid_values)
# TODO: Add testing for this specific function
|
IndexError
|
dataset/ETHPy150Open google/transitfeed/transitfeed/loader.py/Loader._ReadCsvDict
|
4,982 |
def _ReadCSV(self, file_name, cols, required, deprecated):
"""Reads lines from file_name, yielding a list of unicode values
corresponding to the column names in cols."""
contents = self._GetUtf8Contents(file_name)
if not contents:
return
eol_checker = util.EndOfLineChecker(StringIO.StringIO(contents),
file_name, self._problems)
reader = csv.reader(eol_checker) # Use excel dialect
header = reader.next()
header = map(lambda x: x.strip(), header) # trim any whitespace
header_occurrences = util.defaultdict(lambda: 0)
for column_header in header:
header_occurrences[column_header] += 1
for name, count in header_occurrences.items():
if count > 1:
self._problems.DuplicateColumn(
header=name,
file_name=file_name,
count=count)
# check for unrecognized columns, which are often misspellings
header_context = (file_name, 1, [''] * len(header), header)
valid_cols = cols + [deprecated_name for (deprecated_name, _) in deprecated]
unknown_cols = set(header).difference(set(valid_cols))
for col in unknown_cols:
# this is provided in order to create a nice colored list of
# columns in the validator output
self._problems.UnrecognizedColumn(file_name, col, header_context)
# check for missing required columns
col_index = [-1] * len(cols)
for i in range(len(cols)):
if cols[i] in header:
col_index[i] = header.index(cols[i])
elif cols[i] in required:
self._problems.MissingColumn(file_name, cols[i], header_context)
# check for deprecated columns
for (deprecated_name, new_name) in deprecated:
if deprecated_name in header:
self._problems.DeprecatedColumn(file_name, deprecated_name, new_name,
header_context)
row_num = 1
for row in reader:
row_num += 1
if len(row) == 0: # skip extra empty lines in file
continue
if len(row) > len(header):
self._problems.OtherProblem('Found too many cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(row_num, file_name), (file_name, row_num),
type=problems.TYPE_WARNING)
if len(row) < len(header):
self._problems.OtherProblem('Found missing cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(row_num, file_name), (file_name, row_num),
type=problems.TYPE_WARNING)
result = [None] * len(cols)
unicode_error_columns = [] # A list of column numbers with an error
for i in range(len(cols)):
ci = col_index[i]
if ci >= 0:
if len(row) <= ci: # handle short CSV rows
result[i] = u''
else:
try:
result[i] = row[ci].decode('utf-8').strip()
except __HOLE__:
# Replace all invalid characters with
# REPLACEMENT CHARACTER (U+FFFD)
result[i] = codecs.getdecoder("utf8")(row[ci],
errors="replace")[0].strip()
unicode_error_columns.append(i)
for i in unicode_error_columns:
self._problems.InvalidValue(cols[i], result[i],
'Unicode error',
(file_name, row_num, result, cols))
yield (result, row_num, cols)
|
UnicodeDecodeError
|
dataset/ETHPy150Open google/transitfeed/transitfeed/loader.py/Loader._ReadCSV
|
4,983 |
def _FileContents(self, file_name):
results = None
if self._zip:
try:
results = self._zip.read(file_name)
except KeyError: # file not found in archve
self._problems.MissingFile(file_name)
return None
else:
try:
data_file = open(os.path.join(self._path, file_name), 'rb')
results = data_file.read()
except __HOLE__: # file not found
self._problems.MissingFile(file_name)
return None
if not results:
self._problems.EmptyFile(file_name)
return results
|
IOError
|
dataset/ETHPy150Open google/transitfeed/transitfeed/loader.py/Loader._FileContents
|
4,984 |
def _LoadStopTimes(self):
stop_time_class = self._gtfs_factory.StopTime
for (row, row_num, cols) in self._ReadCSV('stop_times.txt',
stop_time_class._FIELD_NAMES,
stop_time_class._REQUIRED_FIELD_NAMES,
stop_time_class._DEPRECATED_FIELD_NAMES):
file_context = ('stop_times.txt', row_num, row, cols)
self._problems.SetFileContext(*file_context)
(trip_id, arrival_time, departure_time, stop_id, stop_sequence,
stop_headsign, pickup_type, drop_off_type, shape_dist_traveled,
timepoint) = row
try:
sequence = int(stop_sequence)
except (__HOLE__, ValueError):
self._problems.InvalidValue('stop_sequence', stop_sequence,
'This should be a number.')
continue
if sequence < 0:
self._problems.InvalidValue('stop_sequence', sequence,
'Sequence numbers should be 0 or higher.')
if stop_id not in self._schedule.stops:
self._problems.InvalidValue('stop_id', stop_id,
'This value wasn\'t defined in stops.txt')
continue
stop = self._schedule.stops[stop_id]
if trip_id not in self._schedule.trips:
self._problems.InvalidValue('trip_id', trip_id,
'This value wasn\'t defined in trips.txt')
continue
trip = self._schedule.trips[trip_id]
# If self._problems.Report returns then StopTime.__init__ will return
# even if the StopTime object has an error. Thus this code may add a
# StopTime that didn't validate to the database.
# Trip.GetStopTimes then tries to make a StopTime from the invalid data
# and calls the problem reporter for errors. An ugly solution is to
# wrap problems and a better solution is to move all validation out of
# __init__. For now make sure Trip.GetStopTimes gets a problem reporter
# when called from Trip.Validate.
stop_time = stop_time_class(self._problems, stop,
arrival_time, departure_time, stop_headsign, pickup_type,
drop_off_type, shape_dist_traveled, stop_sequence=sequence,
timepoint=timepoint)
trip._AddStopTimeObjectUnordered(stop_time, self._schedule)
self._problems.ClearContext()
# stop_times are validated in Trip.ValidateChildren, called by
# Schedule.Validate
|
TypeError
|
dataset/ETHPy150Open google/transitfeed/transitfeed/loader.py/Loader._LoadStopTimes
|
4,985 |
def get(self, request, **kwargs):
params = self.get_params(request)
# Get the request's view and context
view = self.get_view(request)
context = self.get_context(request)
# Configure the query options used for retrieving the results.
query_options = {
'export_type': HTMLExporter.short_name,
'query_name': self._get_query_name(request),
}
query_options.update(**kwargs)
query_options.update(params)
try:
row_data = utils.get_result_rows(context, view, query_options)
except __HOLE__:
raise Http404
return process_results(
request, PREVIEW_RESULT_PROCESSOR_NAME, row_data)
# POST mimics GET to support sending large request bodies for on-the-fly
# context and view data.
|
ValueError
|
dataset/ETHPy150Open chop-dbhi/serrano/serrano/resources/preview.py/PreviewResource.get
|
4,986 |
def _mkdir_p(path):
"""mkdir -p path"""
try:
os.makedirs(path)
except __HOLE__ as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
else:
logger.info("New: %s%s", path, os.path.sep)
|
OSError
|
dataset/ETHPy150Open hustlzp/Flask-Boost/flask_boost/cli.py/_mkdir_p
|
4,987 |
@test.raises(TypeError)
def test_render_bad_out(self):
self.app.setup()
self.app.run()
try:
self.app.render(dict(foo='bar'), out='bogus type')
except __HOLE__ as e:
self.eq(e.args[0], "Argument 'out' must be a 'file' like object")
raise
|
TypeError
|
dataset/ETHPy150Open datafolklabs/cement/tests/core/foundation_tests.py/FoundationTestCase.test_render_bad_out
|
4,988 |
def test_none_member(self):
class Test(object):
var = None
self.app.setup()
self.app.args.parsed_args = Test()
try:
self.app._parse_args()
except __HOLE__:
pass
|
SystemExit
|
dataset/ETHPy150Open datafolklabs/cement/tests/core/foundation_tests.py/FoundationTestCase.test_none_member
|
4,989 |
@test.raises(SystemExit)
def test_close_with_code(self):
app = self.make_app(APP, exit_on_close=True)
app.setup()
app.run()
try:
app.close(114)
except __HOLE__ as e:
self.eq(e.code, 114)
raise
|
SystemExit
|
dataset/ETHPy150Open datafolklabs/cement/tests/core/foundation_tests.py/FoundationTestCase.test_close_with_code
|
4,990 |
@test.raises(AssertionError)
def test_close_with_bad_code(self):
self.app.setup()
self.app.run()
try:
self.app.close('Not An Int')
except __HOLE__ as e:
self.eq(e.args[0], "Invalid exit status code (must be integer)")
raise
|
AssertionError
|
dataset/ETHPy150Open datafolklabs/cement/tests/core/foundation_tests.py/FoundationTestCase.test_close_with_bad_code
|
4,991 |
@test.raises(AssertionError)
def test_run_forever(self):
class Controller(CementBaseController):
class Meta:
label = 'base'
@expose()
def runit(self):
raise Exception("Fake some error")
app = self.make_app(base_controller=Controller, argv=['runit'])
def handler(signum, frame):
raise AssertionError('It ran forever!')
# set the signal handler and a 5-second alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(5)
try:
# this will run indefinitely
with app as app:
app.run_forever()
except __HOLE__ as e:
self.eq(e.args[0], 'It ran forever!')
raise
finally:
signal.alarm(0)
|
AssertionError
|
dataset/ETHPy150Open datafolklabs/cement/tests/core/foundation_tests.py/FoundationTestCase.test_run_forever
|
4,992 |
def messageReceived(self, message):
try:
msgtype, topic, payload = self._messageSplit(message)
except __HOLE__:
log.msg('invalid message received <%s>' % message)
return
if topic not in self.channels:
return
if topic in self.endpoints:
session = self.endpoints[topic]
if msgtype == 'uns':
del self.endpoints[topic]
session._close()
elif msgtype == 'msg':
if payload:
session.messageReceived(payload)
else:
if msgtype == 'sub':
session_args = (self.channels[topic], self.session.server,
self, topic)
session = self.session.server.create_session(
session_id=None, register=False,
session_factory=(MultiplexChannelSession,
session_args, {}))
session.set_handler(self.handler)
session.verify_state()
self.endpoints[topic] = session
|
ValueError
|
dataset/ETHPy150Open flaviogrossi/sockjs-cyclone/sockjs/cyclone/conn.py/MultiplexConnection.messageReceived
|
4,993 |
def handleResponse(self, response_body_bytes):
try:
json_response = json.loads(response_body_bytes)
except __HOLE__:
# logger.info("Invalid JSON response from %s",
# self.transport.getHost())
self.transport.abortConnection()
return
certificate = self.transport.getPeerCertificate()
self.callback((json_response, certificate))
self.transport.abortConnection()
self.timer.cancel()
|
ValueError
|
dataset/ETHPy150Open matrix-org/synapse/synapse/crypto/keyclient.py/SynapseKeyClientProtocol.handleResponse
|
4,994 |
def is_volume_pro_available():
"""Returns `True` if there is a volume pro card available.
"""
try:
map = tvtk.VolumeProMapper()
except __HOLE__:
return False
else:
return map.number_of_boards > 0
|
AttributeError
|
dataset/ETHPy150Open enthought/mayavi/mayavi/modules/volume.py/is_volume_pro_available
|
4,995 |
def find_volume_mappers():
res = []
for name in dir(tvtk):
if 'Volume' in name and 'Mapper' in name and 'OpenGL' not in name:
try:
klass = getattr(tvtk, name)
inst = klass()
except __HOLE__:
pass
else:
res.append(name)
ignores = ['VolumeTextureMapper3D', 'VolumeProMapper']
for name in ignores:
if name in res:
res.remove(name)
return res
|
TypeError
|
dataset/ETHPy150Open enthought/mayavi/mayavi/modules/volume.py/find_volume_mappers
|
4,996 |
def size_filter(image_url):
try:
file = urlopen(image_url)
except __HOLE__:
return None
data = file.read(1024)
file.close()
parser = ImageParser()
parser.feed(data)
if parser.image:
if parser.image.size[0] > MIN_LINK_THUMB_WIDTH and \
parser.image.size[1] > MIN_LINK_THUMB_HEIGHT:
print image_url, parser.image.size
return image_url
|
HTTPError
|
dataset/ETHPy150Open linkfloyd/linkfloyd/linkfloyd/experimental/imgparsing/parser.py/size_filter
|
4,997 |
def QuerySetMock(model, *return_value):
"""
Get a SharedMock that returns self for most attributes and a new copy of
itself for any method that ordinarily generates QuerySets.
Set the results to two items:
>>> class Post(object): pass
>>> objects = QuerySetMock(Post, 'return', 'values')
>>> assert list(objects.filter()) == list(objects.all())
Force an exception:
>>> objects = QuerySetMock(Post, Exception())
Chain calls:
>>> objects.all().filter(filter_arg='dummy')
"""
def make_get(self, model):
def _get(*a, **k):
results = list(self)
if len(results) > 1:
raise model.MultipleObjectsReturned
try:
return results[0]
except __HOLE__:
raise model.DoesNotExist
return _get
def make_qs_returning_method(self):
def _qs_returning_method(*a, **k):
return copy.deepcopy(self)
return _qs_returning_method
def make_getitem(self):
def _getitem(k):
if isinstance(k, slice):
self.__start = k.start
self.__stop = k.stop
else:
return list(self)[k]
return self
return _getitem
def make_iterator(self):
def _iterator(*a, **k):
if len(return_value) == 1 and isinstance(return_value[0], Exception):
raise return_value[0]
start = getattr(self, '__start', None)
stop = getattr(self, '__stop', None)
for x in return_value[start:stop]:
yield x
return _iterator
actual_model = model
if actual_model:
model = mock.MagicMock(spec=actual_model())
else:
model = mock.MagicMock()
m = SharedMock(reserved=['count', 'exists'] + QUERYSET_RETURNING_METHODS)
m.__start = None
m.__stop = None
m.__iter__.side_effect = lambda: iter(m.iterator())
m.__getitem__.side_effect = make_getitem(m)
if hasattr(m, "__nonzero__"):
# Python 2
m.__nonzero__.side_effect = lambda: bool(return_value)
m.exists.side_effect = m.__nonzero__
else:
# Python 3
m.__bool__.side_effect = lambda: bool(return_value)
m.exists.side_effect = m.__bool__
m.__len__.side_effect = lambda: len(return_value)
m.count.side_effect = m.__len__
m.model = model
m.get = make_get(m, actual_model)
for method_name in QUERYSET_RETURNING_METHODS:
setattr(m, method_name, make_qs_returning_method(m))
# Note since this is a SharedMock, *all* auto-generated child
# attributes will have the same side_effect ... might not make
# sense for some like count().
m.iterator.side_effect = make_iterator(m)
return m
|
IndexError
|
dataset/ETHPy150Open dcramer/mock-django/mock_django/query.py/QuerySetMock
|
4,998 |
def process_request(self, req, resp):
if req.content_length in (None, 0):
# Nothing to do
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
req.context['doc'] = json.loads(body.decode('utf-8'))
except (__HOLE__, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
# did they pass text var
if 'text' not in req.context['doc']:
raise falcon.HTTPBadRequest(
'Error',
'Missing json var text, come on you had one var to pass')
# did they pass an empty text var
if not req.context['doc']['text']:
raise falcon.HTTPBadRequest(
'Error',
'Missing empty var text, come on you had one var to pass')
|
ValueError
|
dataset/ETHPy150Open mikelynn2/sentimentAPI/sentimentAPI.py/JSONTranslator.process_request
|
4,999 |
def identify(self, environ):
logger = environ.get('repoze.who.logger','')
logger.info("formplugin identify")
#logger and logger.info("environ keys: %s" % environ.keys())
query = parse_dict_querystring(environ)
# If the extractor finds a special query string on any request,
# it will attempt to find the values in the input body.
if query.get(self.login_form_qs):
form = parse_formvars(environ)
from StringIO import StringIO
# we need to replace wsgi.input because we've read it
# this smells funny
environ['wsgi.input'] = StringIO()
form.update(query)
qinfo = {}
for key, val in form.items():
if key.startswith("_") and key.endswith("_"):
qinfo[key[1:-1]] = val
if qinfo:
environ["s2repoze.qinfo"] = qinfo
try:
login = form['login']
password = form['password']
except __HOLE__:
return None
del query[self.login_form_qs]
query.update(qinfo)
environ['QUERY_STRING'] = urllib.urlencode(query)
environ['repoze.who.application'] = HTTPFound(
construct_url(environ))
credentials = {'login':login, 'password':password}
max_age = form.get('max_age', None)
if max_age is not None:
credentials['max_age'] = max_age
return credentials
return None
# IChallenger
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/s2repoze/plugins/formswithhidden.py/FormHiddenPlugin.identify
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.