Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,300 |
def decompressing_file(FILE_TO_DECOMPRESS):
try:
f_in = gzip.open(FILE_TO_DECOMPRESS, 'rb')
contend = f_in.read()
f_in.close()
file_dec = FILE_TO_DECOMPRESS.split('.')[0] + '.txt'
f_out = open(file_dec, 'w')
f_out.writelines(contend)
f_out.close()
return file_dec
except __HOLE__ as e:
print "I/O error({0}): {1} for file {2}".format(e.errno, e.strerror, FILE_TO_DECOMPRESS)
|
IOError
|
dataset/ETHPy150Open bt3gl/Neat-Problems-in-Python-and-Flask/Version-Control/src/system_operations.py/decompressing_file
|
4,301 |
def handle_label(self, project_name, **options):
# Determine the project_name a bit naively -- by looking at the name of
# the parent directory.
directory = os.getcwd()
# Check that the project_name cannot be imported.
try:
import_module(project_name)
except __HOLE__:
pass
else:
raise CommandError("%r conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name." % project_name)
copy_helper(self.style, 'project', project_name, directory)
# Create a random SECRET_KEY hash, and put it in the main settings.
main_settings_file = os.path.join(directory, project_name, 'settings.py')
settings_contents = open(main_settings_file, 'r').read()
fp = open(main_settings_file, 'w')
secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
fp.write(settings_contents)
fp.close()
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/core/management/commands/startproject.py/Command.handle_label
|
4,302 |
def ensure_asyncio():
try:
import asyncio
except __HOLE__:
file, pathname, description = imp.find_module('trollius')
try:
asyncio = imp.load_module('asyncio', file, pathname, description)
finally:
if file is not None:
try:
file.close()
except:
pass
sys.modules['asyncio'] = asyncio
|
ImportError
|
dataset/ETHPy150Open foxdog-studios/pyddp/ddp/utils.py/ensure_asyncio
|
4,303 |
def _has_property(self, subject, name, *args):
if args:
try:
value = getattr(subject, name)
except __HOLE__:
return False, 'property {0!r} not found'.format(name)
else:
expected_value = default_matcher(args[0])
result, _ = expected_value._match(value)
if not result:
return False, 'property {0!r} {1!r} not found'.format(name, expected_value)
return True, 'property {0!r} {1!r} found'.format(name, expected_value)
if not hasattr(subject, name):
return False, 'property {0!r} not found'.format(name)
return True, 'property {0!r} found'.format(name)
|
AttributeError
|
dataset/ETHPy150Open jaimegildesagredo/expects/expects/matchers/built_in/have_properties.py/_PropertyMatcher._has_property
|
4,304 |
def __init__(self, *args, **kwargs):
try:
self._expected = (), dict(*args, **kwargs)
except (__HOLE__, ValueError):
self._expected = args, kwargs
|
TypeError
|
dataset/ETHPy150Open jaimegildesagredo/expects/expects/matchers/built_in/have_properties.py/have_properties.__init__
|
4,305 |
def get_buf_by_path(self, path):
try:
p = utils.to_rel_path(path)
except __HOLE__:
return
buf_id = self.paths_to_ids.get(p)
if buf_id:
return self.bufs.get(buf_id)
|
ValueError
|
dataset/ETHPy150Open Floobits/floobits-sublime/floo/common/handlers/floo_handler.py/FlooHandler.get_buf_by_path
|
4,306 |
def _on_delete_buf(self, data):
buf_id = data['id']
try:
buf = self.bufs.get(buf_id)
if buf:
del self.paths_to_ids[buf['path']]
del self.bufs[buf_id]
except __HOLE__:
msg.debug('KeyError deleting buf id ', buf_id)
# TODO: if data['unlink'] == True, add to ignore?
action = 'removed'
path = utils.get_full_path(data['path'])
if data.get('unlink', False):
action = 'deleted'
try:
utils.rm(path)
except Exception as e:
msg.debug('Error deleting ', path, ': ', str_e(e))
user_id = data.get('user_id')
username = self.get_username_by_id(user_id)
msg.log(username, ' ', action, ' ', path)
|
KeyError
|
dataset/ETHPy150Open Floobits/floobits-sublime/floo/common/handlers/floo_handler.py/FlooHandler._on_delete_buf
|
4,307 |
def _on_saved(self, data):
buf_id = data['id']
buf = self.bufs.get(buf_id)
if not buf:
return
on_view_load = self.on_load.get(buf_id)
if on_view_load:
try:
del on_view_load['patch']
except __HOLE__:
pass
view = self.get_view(data['id'])
if view:
self.save_view(view)
elif 'buf' in buf:
utils.save_buf(buf)
username = self.get_username_by_id(data['user_id'])
msg.log('%s saved buffer %s' % (username, buf['path']))
|
KeyError
|
dataset/ETHPy150Open Floobits/floobits-sublime/floo/common/handlers/floo_handler.py/FlooHandler._on_saved
|
4,308 |
def _rate_limited_upload(self, paths_iter, total_bytes, bytes_uploaded=0.0, upload_func=None):
reactor.tick()
upload_func = upload_func or (lambda x: self._upload(utils.get_full_path(x)))
if len(self.proto) > 0:
self.upload_timeout = utils.set_timeout(self._rate_limited_upload, 10, paths_iter, total_bytes, bytes_uploaded, upload_func)
return
bar_len = 20
try:
p = next(paths_iter)
size = upload_func(p)
bytes_uploaded += size
try:
percent = (bytes_uploaded / total_bytes)
except ZeroDivisionError:
percent = 0.5
bar = ' |' + ('|' * int(bar_len * percent)) + (' ' * int((1 - percent) * bar_len)) + '|'
editor.status_message('Uploading... %2.2f%% %s' % (percent * 100, bar))
except __HOLE__:
editor.status_message('Uploading... 100% ' + ('|' * bar_len) + '| complete')
msg.log('All done uploading')
return
self.upload_timeout = utils.set_timeout(self._rate_limited_upload, 50, paths_iter, total_bytes, bytes_uploaded, upload_func)
|
StopIteration
|
dataset/ETHPy150Open Floobits/floobits-sublime/floo/common/handlers/floo_handler.py/FlooHandler._rate_limited_upload
|
4,309 |
def _upload(self, path, text=None):
size = 0
try:
if text is None:
with open(path, 'rb') as buf_fd:
buf = buf_fd.read()
else:
try:
# work around python 3 encoding issue
buf = text.encode('utf8')
except Exception as e:
msg.debug('Error encoding buf ', path, ': ', str_e(e))
# We're probably in python 2 so it's ok to do this
buf = text
size = len(buf)
encoding = 'utf8'
rel_path = utils.to_rel_path(path)
existing_buf = self.get_buf_by_path(path)
if existing_buf:
if text is None:
buf_md5 = hashlib.md5(buf).hexdigest()
if existing_buf['md5'] == buf_md5:
msg.log(path, ' already exists and has the same md5. Skipping.')
return size
existing_buf['md5'] = buf_md5
msg.log('Setting buffer ', rel_path)
try:
buf = buf.decode('utf-8')
except Exception:
buf = base64.b64encode(buf).decode('utf-8')
encoding = 'base64'
existing_buf['buf'] = buf
existing_buf['encoding'] = encoding
self.send({
'name': 'set_buf',
'id': existing_buf['id'],
'buf': buf,
'md5': existing_buf['md5'],
'encoding': encoding,
})
self.send({'name': 'saved', 'id': existing_buf['id']})
return size
try:
buf = buf.decode('utf-8')
except Exception:
buf = base64.b64encode(buf).decode('utf-8')
encoding = 'base64'
msg.log('Creating buffer ', rel_path, ' (', len(buf), ' bytes)')
event = {
'name': 'create_buf',
'buf': buf,
'path': rel_path,
'encoding': encoding,
}
self.send(event)
except (IOError, __HOLE__):
msg.error('Failed to open ', path)
except Exception as e:
msg.error('Failed to create buffer ', path, ': ', str_e(e))
return size
|
OSError
|
dataset/ETHPy150Open Floobits/floobits-sublime/floo/common/handlers/floo_handler.py/FlooHandler._upload
|
4,310 |
def quit(self, quit_command='qa!'):
"""Send a quit command to Nvim.
By default, the quit command is 'qa!' which will make Nvim quit without
saving anything.
"""
try:
self.command(quit_command)
except __HOLE__:
# sending a quit command will raise an IOError because the
# connection is closed before a response is received. Safe to
# ignore it.
pass
|
IOError
|
dataset/ETHPy150Open neovim/python-client/neovim/api/nvim.py/Nvim.quit
|
4,311 |
def main(argv=sys.argv[1:]):
"""Parses the command line comments."""
usage = 'usage: %prog [options] FILE\n\n' + __doc__
parser = OptionParser(usage)
# options
parser.add_option("-f", "--force",
action='store_true', default=False,
help="make changes even if they cannot undone before saving the new file")
parser.add_option("-m", "--min_level",
default='NONE',
help="minimum level of logging statements to modify [default: no minimum]")
parser.add_option("-M", "--max_level",
default='NONE',
help="maximum level of logging statements to modify [default: no maximum]")
parser.add_option("-o", "--output-file",
default=None,
help="where to output the result [default: overwrite the input file]")
parser.add_option("-r", "--restore",
action='store_true', default=False,
help="restore logging statements previously commented out and replaced with pass statements")
parser.add_option("-v", "--verbose",
action='store_true', default=False,
help="print informational messages about changes made")
(options, args) = parser.parse_args(argv)
if len(args) != 1:
parser.error("expected 1 argument but got %d arguments: %s" % (len(args), ' '.join(args)))
input_fn = args[0]
if not options.output_file:
options.output_file = input_fn
# validate min/max level
LEVEL_CHOICES = LEVELS + ['NONE']
min_level_value = 0 if options.min_level == 'NONE' else get_level_value(options.min_level)
if options.min_level is None:
parser.error("min level must be an integer or one of these values: %s" % ', '.join(LEVEL_CHOICES))
max_level_value = sys.maxint if options.max_level == 'NONE' else get_level_value(options.max_level)
if options.max_level is None:
parser.error("max level must be an integer or one of these values: %s" % ', '.join(LEVEL_CHOICES))
if options.verbose:
logging.getLogger().setLevel(logging.INFO)
try:
return modify_logging(input_fn, options.output_file,
min_level_value, max_level_value,
options.restore, options.force)
except __HOLE__ as e:
logging.error(str(e))
return -1
# matches two main groups: 1) leading whitespace and 2) all following text
|
IOError
|
dataset/ETHPy150Open neovim/python-client/scripts/logging_statement_modifier.py/main
|
4,312 |
def first_arg_to_level_name(arg):
"""Decide what level the argument specifies and return it. The argument
must contain (case-insensitive) one of the values in LEVELS or be an integer
constant. Otherwise None will be returned."""
try:
return int(arg)
except __HOLE__:
arg = arg.upper()
for level in LEVELS:
if level in arg:
return level
return None
|
ValueError
|
dataset/ETHPy150Open neovim/python-client/scripts/logging_statement_modifier.py/first_arg_to_level_name
|
4,313 |
def get_level_value(level):
"""Returns the logging value associated with a particular level name. The
argument must be present in LEVELS_DICT or be an integer constant.
Otherwise None will be returned."""
try:
# integral constants also work: they are the level value
return int(level)
except __HOLE__:
try:
return LEVELS_DICT[level.upper()]
except KeyError:
logging.warning("level '%s' cannot be translated to a level value (not present in LEVELS_DICT)" % level)
return None
|
ValueError
|
dataset/ETHPy150Open neovim/python-client/scripts/logging_statement_modifier.py/get_level_value
|
4,314 |
def _loop_until(self, predicate):
self._until_predicate = predicate
try:
# this runs self._next ONE time, but
# self._next re-runs itself until
# the predicate says to quit.
self._loop.add_callback(self._next)
self._loop.start()
except __HOLE__:
self.close("user interruption")
|
KeyboardInterrupt
|
dataset/ETHPy150Open bokeh/bokeh/bokeh/client/_connection.py/ClientConnection._loop_until
|
4,315 |
@gen.coroutine
def _pop_message(self):
while True:
if self._socket is None:
raise gen.Return(None)
# log.debug("Waiting for fragment...")
fragment = None
try:
fragment = yield self._socket.read_message()
except Exception as e:
# this happens on close, so debug level since it's "normal"
log.debug("Error reading from socket %r", e)
# log.debug("... got fragment %r", fragment)
if fragment is None:
# XXX Tornado doesn't give us the code and reason
log.info("Connection closed by server")
raise gen.Return(None)
try:
message = yield self._receiver.consume(fragment)
if message is not None:
log.debug("Received message %r" % message)
raise gen.Return(message)
except (MessageError, ProtocolError, __HOLE__) as e:
log.error("%r", e, exc_info=True)
self.close(why="error parsing message from server")
|
ValidationError
|
dataset/ETHPy150Open bokeh/bokeh/bokeh/client/_connection.py/ClientConnection._pop_message
|
4,316 |
def do_full_push(self):
try:
bundle = {"labels": {}}
for key, value in self.wallet.labels.iteritems():
encoded = self.encode(key)
bundle["labels"][encoded] = self.encode(value)
params = json.dumps(bundle)
connection = httplib.HTTPConnection(self.target_host)
connection.request("POST", ("/api/wallets/%s/labels/batch.json?auth_token=%s" % (self.wallet_id, self.auth_token())), params, {'Content-Type': 'application/json'})
response = connection.getresponse()
if response.reason == httplib.responses[httplib.NOT_FOUND]:
return
try:
response = json.loads(response.read())
except __HOLE__ as e:
return False
if "error" in response:
QMessageBox.warning(None, _("Error"),_("Could not sync labels: %s" % response["error"]))
return False
return True
except socket.gaierror as e:
print_error('Error connecting to service: %s ' % e)
return False
|
ValueError
|
dataset/ETHPy150Open bitxbay/BitXBay/electru/build/lib/electrum_plugins/labels.py/Plugin.do_full_push
|
4,317 |
def do_full_pull(self, force = False):
try:
connection = httplib.HTTPConnection(self.target_host)
connection.request("GET", ("/api/wallets/%s/labels.json?auth_token=%s" % (self.wallet_id, self.auth_token())),"", {'Content-Type': 'application/json'})
response = connection.getresponse()
if response.reason == httplib.responses[httplib.NOT_FOUND]:
return
try:
response = json.loads(response.read())
except __HOLE__ as e:
return False
if "error" in response:
QMessageBox.warning(None, _("Error"),_("Could not sync labels: %s" % response["error"]))
return False
for label in response:
decoded_key = self.decode(label["external_id"])
decoded_label = self.decode(label["text"])
if force or not self.wallet.labels.get(decoded_key):
self.wallet.labels[decoded_key] = decoded_label
return True
except socket.gaierror as e:
print_error('Error connecting to service: %s ' % e)
return False
|
ValueError
|
dataset/ETHPy150Open bitxbay/BitXBay/electru/build/lib/electrum_plugins/labels.py/Plugin.do_full_pull
|
4,318 |
def upgrade(self):
if not os.path.exists(self.folder_path):
print("Error: instance folder does not exist")
sys.exit(1)
try:
actual_path = iepy.setup(self.folder_path, _safe_mode=True)
except __HOLE__ as err:
print(err)
sys.exit(1)
finally:
self.folder_path = actual_path
self.abs_folder_path = os.path.abspath(self.folder_path)
from django.conf import settings
self.old_version = settings.IEPY_VERSION
if settings.IEPY_VERSION == iepy.__version__:
print("Iepy instance '{}' is already up to date.".format(self.folder_path))
return
print("Upgrading iepy instance '{}' from {} to {}".format(
self.folder_path, self.old_version, iepy.__version__))
self.creating = False
self.old_version_path = self.download_old_iepy_version()
self._run_steps()
|
ValueError
|
dataset/ETHPy150Open machinalis/iepy/iepy/instantiation/instance_admin.py/InstanceManager.upgrade
|
4,319 |
def RenderBranch(self, path, request):
"""Renders tree leafs for flows."""
# Retrieve the user's GUI mode preferences.
self.user = request.user
try:
user_record = aff4.FACTORY.Open(
aff4.ROOT_URN.Add("users").Add(self.user), "GRRUser",
token=request.token)
user_preferences = user_record.Get(user_record.Schema.GUI_SETTINGS)
except __HOLE__:
user_preferences = aff4.GRRUser.SchemaCls.GUI_SETTINGS()
flow_behaviors_to_render = (self.flow_behaviors_to_render +
user_preferences.mode)
categories, flows = self.EnumerateCategories(path, request,
flow_behaviors_to_render)
for category in sorted(categories):
self.AddElement(category)
for name, friendly_name in sorted(flows):
self.AddElement(name, behaviour="leaf", friendly_name=friendly_name)
|
IOError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/FlowTree.RenderBranch
|
4,320 |
def Layout(self, request, response):
"""Update the progress bar based on the progress reported."""
self.flow_name = request.REQ.get("flow_path", "").split("/")[-1]
try:
flow_class = flow.GRRFlow.classes[self.flow_name]
if not aff4.issubclass(flow_class, flow.GRRFlow):
return response
except __HOLE__:
return response
self.states = []
# Fill in information about each state
for state_method in flow_class.__dict__.values():
try:
next_states = state_method.next_states
# Only show the first line of the doc string.
try:
func_doc = state_method.func_doc.split("\n")[0].strip()
except AttributeError:
func_doc = ""
self.states.append((state_method.func_name,
func_doc, ", ".join(next_states)))
except AttributeError:
pass
# Now fill in information about each arg to this flow.
prototypes = []
for type_descriptor in flow_class.args_type.type_infos:
if not type_descriptor.hidden:
prototypes.append("%s" % (type_descriptor.name))
self.prototype = "%s(%s)" % (flow_class.__name__, ", ".join(prototypes))
self.flow_doc = flow_class.__doc__
return super(FlowInformation, self).Layout(request, response)
|
KeyError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/FlowInformation.Layout
|
4,321 |
def RenderAjax(self, request, response):
"""Parse the flow args from the form and launch the flow."""
self.flow_name = self._GetFlowName(request)
self.client_id = request.REQ.get("client_id", None)
self.dom_node = request.REQ.get("dom_node")
flow_cls = flow.GRRFlow.classes.get(self.flow_name)
if flow_cls is not None:
self.args = forms.SemanticProtoFormRenderer(
flow_cls.args_type(), prefix="args").ParseArgs(request)
try:
self.args.Validate()
except __HOLE__ as e:
return self.CallJavascript(
response, "SemanticProtoFlowForm.RenderAjaxError", error=str(e))
self.runner_args = forms.SemanticProtoFormRenderer(
flow_runner.FlowRunnerArgs(), prefix="runner_").ParseArgs(request)
self.runner_args.Validate()
self.flow_id = flow.GRRFlow.StartFlow(client_id=self.client_id,
flow_name=self.flow_name,
token=request.token,
args=self.args,
runner_args=self.runner_args)
self.args_html = semantic.FindRendererForObject(self.args).RawHTML(request)
self.runner_args_html = semantic.FindRendererForObject(
self.runner_args).RawHTML(request)
response = renderers.TemplateRenderer.Layout(
self, request, response, apply_template=self.ajax_template)
return self.CallJavascript(response, "SemanticProtoFlowForm.RenderAjax",
renderer=self.__class__.__name__,
dom_node=self.dom_node)
|
ValueError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/SemanticProtoFlowForm.RenderAjax
|
4,322 |
def Layout(self, request, response):
try:
self.icon, self.title = self.state_map[str(self.proxy)]
except (__HOLE__, ValueError):
pass
super(FlowStateIcon, self).Layout(request, response)
|
KeyError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/FlowStateIcon.Layout
|
4,323 |
def _GetCreationTime(self, obj):
try:
return obj.state.context.get("create_time")
except __HOLE__:
return obj.Get(obj.Schema.LAST, 0)
|
AttributeError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/ListFlowsTable._GetCreationTime
|
4,324 |
def BuildTable(self, start_row, end_row, request):
"""Renders the table."""
depth = request.REQ.get("depth", 0)
flow_urn = self.state.get("value", request.REQ.get("value"))
if flow_urn is None:
client_id = request.REQ.get("client_id")
if not client_id: return
flow_urn = rdf_client.ClientURN(client_id).Add("flows")
flow_root = aff4.FACTORY.Open(flow_urn, mode="r", token=request.token)
root_children_paths = sorted(flow_root.ListChildren(),
key=lambda x: x.age, reverse=True)
additional_rows = (depth == 0 and len(root_children_paths) > end_row)
if not depth:
root_children_paths = root_children_paths[start_row:end_row]
# TODO(user): should be able to specify aff4_type="GRRFlow" here.
# Currently this doesn't work because symlinks get filtered out.
# This is an aff4.FACTORY.MultiOpen's bug.
root_children = aff4.FACTORY.MultiOpen(
root_children_paths, token=request.token)
root_children = sorted(root_children, key=self._GetCreationTime,
reverse=True)
level2_children = dict(aff4.FACTORY.MultiListChildren(
[f.urn for f in root_children], token=request.token))
self.size = len(root_children)
row_index = start_row
for flow_obj in root_children:
if level2_children.get(flow_obj.urn, None):
row_type = "branch"
else:
row_type = "leaf"
row = {}
last = flow_obj.Get(flow_obj.Schema.LAST)
if last:
row["Last Active"] = last
if isinstance(flow_obj, aff4.AFF4Object.GRRFlow):
row_name = (flow_obj.symlink_urn or flow_obj.urn).Basename()
try:
if flow_obj.Get(flow_obj.Schema.CLIENT_CRASH):
row["State"] = "CLIENT_CRASHED"
else:
row["State"] = flow_obj.state.context.state
row["Flow Name"] = flow_obj.state.context.args.flow_name
row["Creation Time"] = flow_obj.state.context.create_time
row["Creator"] = flow_obj.state.context.creator
except __HOLE__:
row["Flow Name"] = "Failed to open flow."
elif isinstance(flow_obj, aff4.AFF4Object.GRRHunt):
row_name = flow_obj.urn.Dirname()
row["Flow Name"] = "Hunt"
else:
# A logs collection, skip, it will be rendered separately
continue
self.columns[1].AddElement(
# If flow object is symlinked, we want to use symlink path in the
# table. This way UI logic can make reasonable assumptions about
# client's flows URNs.
row_index, flow_obj.symlink_urn or flow_obj.urn, depth, row_type,
row_name)
self.AddRow(row, row_index)
row_index += 1
return additional_rows
|
AttributeError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/ListFlowsTable.BuildTable
|
4,325 |
def Layout(self, request, response):
"""Introspect the Schema for flow objects."""
try:
self.state["flow"] = session_id = request.REQ["flow"]
self.fd = aff4.FACTORY.Open(session_id, token=request.token,
age=aff4.ALL_TIMES)
self.classes = self.RenderAFF4Attributes(self.fd, request)
self.path = self.fd.urn
except (__HOLE__, IOError):
self.path = None
# Skip our parent's Layout method and install parent's javascript code.
response = super(fileview.AFF4Stats, self).Layout(request, response)
return self.CallJavascript(response, "AFF4Stats.Layout",
historical_renderer=self.historical_renderer,
historical_renderer_state=self.state)
|
KeyError
|
dataset/ETHPy150Open google/grr/grr/gui/plugins/flow_management.py/ShowFlowInformation.Layout
|
4,326 |
@wsgi.action("disassociate")
def _disassociate_host_and_project(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
# NOTE(shaohe-feng): back-compatible with db layer hard-code
# admin permission checks. call db API objects.Network.associate
nova_context.require_admin_context(context)
try:
self.network_api.associate(context, id, host=None, project=None)
except exception.NetworkNotFound:
msg = _("Network not found")
raise exc.HTTPNotFound(explanation=msg)
except __HOLE__:
msg = _('Disassociate network is not implemented by the '
'configured Network API')
raise exc.HTTPNotImplemented(explanation=msg)
return webob.Response(status_int=202)
|
NotImplementedError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/os_networks.py/NetworkController._disassociate_host_and_project
|
4,327 |
def add(self, req, body):
context = req.environ['nova.context']
authorize(context)
# NOTE(shaohe-feng): back-compatible with db layer hard-code
# admin permission checks. call db API objects.Network.associate
nova_context.require_admin_context(context)
if not body:
raise exc.HTTPUnprocessableEntity()
network_id = body.get('id', None)
project_id = context.project_id
try:
self.network_api.add_network_to_project(
context, project_id, network_id)
except __HOLE__:
msg = (_("VLAN support must be enabled"))
raise exc.HTTPNotImplemented(explanation=msg)
except (exception.NoMoreNetworks,
exception.NetworkNotFoundForUUID) as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
return webob.Response(status_int=202)
|
NotImplementedError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/os_networks.py/NetworkController.add
|
4,328 |
def legal_moves(self, position):
""" Obtain legal moves and where they lead.
Parameters
----------
position : tuple of int (x, y)
the position to start at
Returns
-------
legal_moves_dict : dict mapping strings (moves) to positions (x, y)
the legal moves and where they would lead.
"""
legal_moves_dict = {}
for move, new_pos in self.neighbourhood(position).items():
try:
if not self.maze[new_pos]:
legal_moves_dict[move] = new_pos
except __HOLE__:
# If we’re outside the maze, it is not a legal move.
pass
return legal_moves_dict
|
IndexError
|
dataset/ETHPy150Open ASPP/pelita/pelita/datamodel.py/CTFUniverse.legal_moves
|
4,329 |
def cache(time_to_wait = None, resource_manager = None):
""" cache(time in float seconds) -> decorator
Given "time" seconds (float), this decorator will cache during that
time the output of the decorated function. This way, if someone calls
the cache object with the same input within the next "time" time, the
function will not be called and the output will be returned instead.
"""
class cache_obj(object):
def __init__(self, func):
super(cache_obj,self).__init__()
self.func = (func,)
self.lock = threading.RLock()
self.dictionaries_per_inst = {
# inst : { # if it's not an inst, None is the key
# 'dict': {},
# 'list': []
# }
}
self._time = time_to_wait
self._inst = None
_cache_registry.append(self)
def __get__(self, inst, owner):
if inst is not None:
self._inst = weakref.ref(inst)
return self
def _generate_key(self, args, kargs):
if kargs == {}:
try:
hash(args)
except TypeError:
pass
else:
key = _HasheableKey(args)
return key
try:
pickled_key = pickle.dumps((args,kargs))
except:
key = _NotPicklableKey((args,kargs))
else:
key = _PicklableKey(pickled_key)
return key
def _get_time(self):
# For testing purposes
return time_module.time()
def __call__(self, *args, **kargs):
key = self._generate_key(args, kargs)
current_time = self._get_time()
found, (obj, storage_time) = key.load(self._get_dictionaries())
if found:
if self.time is None or current_time - storage_time < self.time:
return obj
if self._inst != None:
args = (self._inst(),) + args
return_value = self.func[0](*args, **kargs)
key.save(
self._get_dictionaries(),
( return_value, current_time)
)
return return_value
def _save_to_cache(self, key, value):
return_value, current_time = value
key.save(self._get_dictionaries(), (return_value, current_time))
def _get_dictionaries(self, inst = "this.is.not.an.instance"):
if inst == "this.is.not.an.instance":
inst = self._inst
if not inst in self.dictionaries_per_inst:
self.lock.acquire()
try:
# Double ask, just to avoid acquiring and releasing
# without need
if not inst in self.dictionaries_per_inst:
self.dictionaries_per_inst[inst] = {
'dict' : {},
'list' : []
}
finally:
self.lock.release()
return self.dictionaries_per_inst[inst]
def get_time(self):
return self._time
def set_time(self, value):
self._time = value
time = property(get_time, set_time)
def _remove_obj(self, key, inst):
try:
returnValue, _ = key.pop(self._get_dictionaries(inst))
except __HOLE__:
return
def wrapped_decorator(func):
o = cache_obj(func)
o.__name__ = func.__name__
o.__doc__ = func.__doc__
return o
return wrapped_decorator
|
KeyError
|
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/cache.py/cache
|
4,330 |
def __call__(self, *args):
try:
if self._inst is not None:
args = (self._inst(),) + args
if args in self.cache:
return self.cache[args]
else:
return_value = self.func(*args)
self.cache[args] = return_value
return return_value
except __HOLE__:
print("Using fast_cache with func {0}, a function that might receive unhashable arguments!!!".format(self.func), file=sys.stderr)
return self.func(*args)
|
TypeError
|
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/cache.py/fast_cache.__call__
|
4,331 |
@property
def status(self):
try:
return self.result.status
except __HOLE__:
return _('Undecided')
|
ObjectDoesNotExist
|
dataset/ETHPy150Open pinax/symposion/symposion/proposals/models.py/ProposalBase.status
|
4,332 |
def test_issue_7754(self):
try:
old_cwd = os.getcwd()
except OSError:
# Jenkins throws an OSError from os.getcwd()??? Let's not worry
# about it
old_cwd = None
config_dir = os.path.join(integration.TMP, 'issue-7754')
if not os.path.isdir(config_dir):
os.makedirs(config_dir)
os.chdir(config_dir)
config_file_name = 'master'
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
config = yaml.load(fhr.read())
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
fhw.write(
yaml.dump(config, default_flow_style=False)
)
ret = self.run_script(
self._call_binary_,
'--out pprint --config-dir {0} \'*\' foo {0}/foo'.format(
config_dir
),
catch_stderr=True,
with_retcode=True
)
try:
self.assertIn('minion', '\n'.join(ret[0]))
self.assertIn('sub_minion', '\n'.join(ret[0]))
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
except __HOLE__:
if os.path.exists('/dev/log') and ret[2] != 2:
# If there's a syslog device and the exit code was not 2, 'No
# such file or directory', raise the error
raise
self.assertIn(
'Failed to setup the Syslog logging handler', '\n'.join(ret[1])
)
self.assertEqual(ret[2], 2)
finally:
if old_cwd is not None:
self.chdir(old_cwd)
if os.path.isdir(config_dir):
shutil.rmtree(config_dir)
|
AssertionError
|
dataset/ETHPy150Open saltstack/salt/tests/integration/shell/cp.py/CopyTest.test_issue_7754
|
4,333 |
def decode(self, jwt, key='', verify=True, algorithms=None, options=None,
**kwargs):
payload, signing_input, header, signature = self._load(jwt)
decoded = super(PyJWT, self).decode(jwt, key, verify, algorithms,
options, **kwargs)
try:
payload = json.loads(decoded.decode('utf-8'))
except __HOLE__ as e:
raise DecodeError('Invalid payload string: %s' % e)
if not isinstance(payload, Mapping):
raise DecodeError('Invalid payload string: must be a json object')
if verify:
merged_options = merge_dict(self.options, options)
self._validate_claims(payload, merged_options, **kwargs)
return payload
|
ValueError
|
dataset/ETHPy150Open jpadilla/pyjwt/jwt/api_jwt.py/PyJWT.decode
|
4,334 |
def _validate_iat(self, payload, now, leeway):
try:
iat = int(payload['iat'])
except __HOLE__:
raise DecodeError('Issued At claim (iat) must be an integer.')
if iat > (now + leeway):
raise InvalidIssuedAtError('Issued At claim (iat) cannot be in'
' the future.')
|
ValueError
|
dataset/ETHPy150Open jpadilla/pyjwt/jwt/api_jwt.py/PyJWT._validate_iat
|
4,335 |
def _validate_nbf(self, payload, now, leeway):
try:
nbf = int(payload['nbf'])
except __HOLE__:
raise DecodeError('Not Before claim (nbf) must be an integer.')
if nbf > (now + leeway):
raise ImmatureSignatureError('The token is not yet valid (nbf)')
|
ValueError
|
dataset/ETHPy150Open jpadilla/pyjwt/jwt/api_jwt.py/PyJWT._validate_nbf
|
4,336 |
def _validate_exp(self, payload, now, leeway):
try:
exp = int(payload['exp'])
except __HOLE__:
raise DecodeError('Expiration Time claim (exp) must be an'
' integer.')
if exp < (now - leeway):
raise ExpiredSignatureError('Signature has expired')
|
ValueError
|
dataset/ETHPy150Open jpadilla/pyjwt/jwt/api_jwt.py/PyJWT._validate_exp
|
4,337 |
def _apply_skill(self, skill):
if skill.auto_start:
skill.invoke(self)
else:
self.enhance_soul(skill)
try:
self.fight.update_skills()
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/lpmud/entity.py/EntityLP._apply_skill
|
4,338 |
def remove_skill(self, skill_id):
try:
skill = self.skills.pop(skill_id)
if skill.auto_start:
skill.revoke(self)
else:
self.diminish_soul(skill)
self.fight.update_skills()
except __HOLE__:
raise ActionError('{} does not have that skill'.format(self.name))
|
KeyError
|
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/lpmud/entity.py/EntityLP.remove_skill
|
4,339 |
def check_status(self):
if self.health <= 0:
self._cancel_actions()
self.fight.end_all()
self.die()
else:
self.start_refresh()
self.status_change()
try:
self.last_opponent.status_change()
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/lpmud/entity.py/EntityLP.check_status
|
4,340 |
def _cancel_actions(self):
if self._current_action:
unregister(self._current_action[2])
del self._current_action
try:
del self._action_target
except __HOLE__:
pass
if self._next_command:
del self._next_command
|
AttributeError
|
dataset/ETHPy150Open genzgd/Lampost-Mud/lampost/lpmud/entity.py/EntityLP._cancel_actions
|
4,341 |
def parse_body(self):
try:
js = json.loads(self.body)
return js
except __HOLE__:
return self.body
|
ValueError
|
dataset/ETHPy150Open secondstory/dewpoint/libcloud/drivers/vpsnet.py/VPSNetResponse.parse_body
|
4,342 |
def parse_error(self):
try:
errors = json.loads(self.body)['errors'][0]
except __HOLE__:
return self.body
else:
return "\n".join(errors)
|
ValueError
|
dataset/ETHPy150Open secondstory/dewpoint/libcloud/drivers/vpsnet.py/VPSNetResponse.parse_error
|
4,343 |
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'EMAIL_FILE_PATH',None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, basestring):
raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured('Path for saving email messages exists, but is not a directory: %s' % self.file_path)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except __HOLE__, err:
raise ImproperlyConfigured('Could not create directory for saving email messages: %s (%s)' % (self.file_path, err))
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(EmailBackend, self).__init__(*args, **kwargs)
|
OSError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/core/mail/backends/filebased.py/EmailBackend.__init__
|
4,344 |
def __django_version_setup():
"""Selects a particular Django version to load."""
django_version = _config_handle.django_version
if django_version is not None:
from google.appengine.dist import use_library
use_library('django', str(django_version))
else:
from google.appengine.dist import _library
version, explicit = _library.installed.get('django', ('0.96', False))
if not explicit:
logging.warn('You are using the default Django version (%s). '
'The default Django version will change in an '
'App Engine release in the near future. '
'Please call use_library() to explicitly select a '
'Django version. '
'For more information see %s',
version,
'https://developers.google.com/appengine/docs/python/tools/'
'libraries#Django')
try:
import django
if not hasattr(django, 'VERSION'):
from django import v0_96
except __HOLE__:
pass
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/webapp/__init__.py/__django_version_setup
|
4,345 |
def _django_setup():
"""Imports and configures Django.
This can be overridden by defining a function named
webapp_django_setup() in the app's appengine_config.py file (see
lib_config docs). Such a function should import and configure
Django.
In the Python 2.5 runtime, you can also just configure the Django version to
be used by setting webapp_django_version in that file.
Finally, calling use_library('django', <version>) in that file
should also work:
# Example taken from from
# https://developers.google.com/appengine/docs/python/tools/libraries#Django
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from google.appengine.dist import use_library
use_library('django', '1.2')
In the Python 2.7 runtime, the Django version is specified in you app.yaml
file and use_library is not supported.
If your application also imports Django directly it should ensure
that the same code is executed before your app imports Django
(directly or indirectly). Perhaps the simplest way to ensure that
is to include the following in your main.py (and in each alternate
main script):
from google.appengine.ext.webapp import template
import django
This will ensure that whatever Django setup code you have included
in appengine_config.py is executed, as a side effect of importing
the webapp.template module.
"""
if os.environ.get('APPENGINE_RUNTIME') != 'python27':
__django_version_setup()
import django
import django.conf
try:
raise ImportError
# TODO: Right now the below line raises a
# django.core.exceptions.ImproperlyConfigured exception. Need to investigate
# why and address accordingly.
#getattr(django.conf.settings, 'FAKE_ATTR', None)
except (__HOLE__, EnvironmentError), e:
if os.getenv(django.conf.ENVIRONMENT_VARIABLE):
logging.warning(e)
try:
django.conf.settings.configure(
DEBUG=False,
TEMPLATE_DEBUG=False,
TEMPLATE_LOADERS=(
'django.template.loaders.filesystem.load_template_source',
),
)
except (EnvironmentError, RuntimeError):
pass
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/webapp/__init__.py/_django_setup
|
4,346 |
def read(self, n=None):
self.nbytes = n
try:
str = next(self.read_it)
except __HOLE__:
str = ""
return str
# required by postgres2 driver, but not used
|
StopIteration
|
dataset/ETHPy150Open PyTables/PyTables/bench/postgres_backend.py/StreamChar.read
|
4,347 |
def _toint(self, it):
try:
return int(it)
except __HOLE__:
return it
|
ValueError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/Win32/procfs.py/ProcStat._toint
|
4,348 |
def get_stat(self, name):
if not self.stats:
raise ValueError, "no stats - run read(pid)"
try:
return self.stats[self._STATINDEX[name]]
except __HOLE__:
raise ValueError, "no attribute %s" % name
|
KeyError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/Win32/procfs.py/ProcStat.get_stat
|
4,349 |
def __getattr__(self, name):
try:
return self.get_stat(name)
except __HOLE__, err:
raise AttributeError, err
|
ValueError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/Win32/procfs.py/ProcStat.__getattr__
|
4,350 |
def __getitem__(self, name):
try:
return getattr(self, name)
except __HOLE__, err:
raise KeyError, err
|
AttributeError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/Win32/procfs.py/ProcStat.__getitem__
|
4,351 |
def tree(self):
self.read()
for p in self._ptable.values():
try:
self._ptable[p.ppid]._children.append(p.pid)
except __HOLE__: # no child list yet
self._ptable[p.ppid]._children = sortedlist([p.pid])
pslist = self._tree_helper(self._ptable[0], 0, [])
return "\n".join(pslist)
# recursive helper to indent according to child depth
|
AttributeError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/Win32/procfs.py/ProcStatTable.tree
|
4,352 |
def __getattr__ (self, key):
try:
return getattr (self.cursor, key)
except __HOLE__:
try:
val = self.cursor[unicode (key)]
if (type (val) == list) or (type (val) == dict):
return MongoWrapper (self.cursor[unicode (key)])
else:
return val
except KeyError:
return None
|
AttributeError
|
dataset/ETHPy150Open dotskapes/dotSkapes/models/001_db.py/MongoWrapper.__getattr__
|
4,353 |
def validate_positive_float(option, value):
"""Validates that 'value' is a float, or can be converted to one, and is
positive.
"""
errmsg = "%s must be an integer or float" % (option,)
try:
value = float(value)
except __HOLE__:
raise ValueError(errmsg)
except TypeError:
raise TypeError(errmsg)
# float('inf') doesn't work in 2.4 or 2.5 on Windows, so just cap floats at
# one billion - this is a reasonable approximation for infinity
if not 0 < value < 1e9:
raise ValueError("%s must be greater than 0 and "
"less than one billion" % (option,))
return value
|
ValueError
|
dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/common.py/validate_positive_float
|
4,354 |
def validate_read_preference_mode(dummy, name):
"""Validate read preference mode for a MongoReplicaSetClient.
"""
try:
return read_pref_mode_from_name(name)
except __HOLE__:
raise ValueError("%s is not a valid read preference" % (name,))
|
ValueError
|
dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/common.py/validate_read_preference_mode
|
4,355 |
def validate_uuid_representation(dummy, value):
"""Validate the uuid representation option selected in the URI.
"""
try:
return _UUID_REPRESENTATIONS[value]
except __HOLE__:
raise ValueError("%s is an invalid UUID representation. "
"Must be one of "
"%s" % (value, tuple(_UUID_REPRESENTATIONS)))
|
KeyError
|
dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/common.py/validate_uuid_representation
|
4,356 |
def validate_auth_mechanism_properties(option, value):
"""Validate authMechanismProperties."""
value = validate_string(option, value)
props = {}
for opt in value.split(','):
try:
key, val = opt.split(':')
except __HOLE__:
raise ValueError("auth mechanism properties must be "
"key:value pairs like SERVICE_NAME:"
"mongodb, not %s." % (opt,))
if key not in _MECHANISM_PROPS:
raise ValueError("%s is not a supported auth "
"mechanism property. Must be one of "
"%s." % (key, tuple(_MECHANISM_PROPS)))
props[key] = val
return props
|
ValueError
|
dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/common.py/validate_auth_mechanism_properties
|
4,357 |
def get_validated_options(options, warn=True):
"""Validate each entry in options and raise a warning if it is not valid.
Returns a copy of options with invalid entries removed
"""
validated_options = {}
for opt, value in iteritems(options):
lower = opt.lower()
try:
validator = URI_VALIDATORS.get(lower, raise_config_error)
value = validator(opt, value)
except (__HOLE__, ConfigurationError) as exc:
if warn:
warnings.warn(str(exc))
else:
raise
else:
validated_options[lower] = value
return validated_options
|
ValueError
|
dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/common.py/get_validated_options
|
4,358 |
def makedir(path, ignored=None, uid=-1, gid=-1):
ignored = ignored or []
try:
os.makedirs(path)
except __HOLE__ as error:
if error.errno in ignored:
pass
else:
# re-raise the original exception
raise
else:
os.chown(path, uid, gid);
|
OSError
|
dataset/ETHPy150Open ceph/ceph-deploy/ceph_deploy/hosts/remotes.py/makedir
|
4,359 |
def get_file(path):
""" fetch remote file """
try:
with file(path, 'rb') as f:
return f.read()
except __HOLE__:
pass
|
IOError
|
dataset/ETHPy150Open ceph/ceph-deploy/ceph_deploy/hosts/remotes.py/get_file
|
4,360 |
def make_mon_removed_dir(path, file_name):
""" move old monitor data """
try:
os.makedirs('/var/lib/ceph/mon-removed')
except __HOLE__, e:
if e.errno != errno.EEXIST:
raise
shutil.move(path, os.path.join('/var/lib/ceph/mon-removed/', file_name))
|
OSError
|
dataset/ETHPy150Open ceph/ceph-deploy/ceph_deploy/hosts/remotes.py/make_mon_removed_dir
|
4,361 |
def safe_mkdir(path, uid=-1, gid=-1):
""" create path if it doesn't exist """
try:
os.mkdir(path)
except __HOLE__, e:
if e.errno == errno.EEXIST:
pass
else:
raise
else:
os.chown(path, uid, gid)
|
OSError
|
dataset/ETHPy150Open ceph/ceph-deploy/ceph_deploy/hosts/remotes.py/safe_mkdir
|
4,362 |
def safe_makedirs(path, uid=-1, gid=-1):
""" create path recursively if it doesn't exist """
try:
os.makedirs(path)
except __HOLE__, e:
if e.errno == errno.EEXIST:
pass
else:
raise
else:
os.chown(path, uid, gid)
|
OSError
|
dataset/ETHPy150Open ceph/ceph-deploy/ceph_deploy/hosts/remotes.py/safe_makedirs
|
4,363 |
def do_test_verbosity(self, parser, line, expected_verbosity):
try:
args = parser.parse_args(line.split())
except __HOLE__:
self.fail("Parsing arguments failed")
self.assertEqual(args.verbosity, expected_verbosity)
|
SystemExit
|
dataset/ETHPy150Open ViDA-NYU/reprozip/scripts/test_bug_23058.py/Test23058.do_test_verbosity
|
4,364 |
def popitem(self):
"""
Remove and return last item (key, value) duple
If odict is empty raise KeyError
"""
try:
key = self._keys[-1]
except __HOLE__:
raise KeyError('Empty odict.')
value = dict.__getitem__(self, key)
del self[key]
return (key, value)
|
IndexError
|
dataset/ETHPy150Open ioflo/ioflo/ioflo/aid/odicting.py/odict.popitem
|
4,365 |
def pop(self, key, *pa, **kwa):
"""
If key exists remove and return the indexed element of the key element
list else return the optional following positional argument.
If the optional positional arg is not provided and key does not exit
then raise KeyError. If provided the index keyword arg determines
which value in the key element list to return. Default is last element.
"""
index = kwa.get('index', -1)
try:
val = super(modict, self).pop(key)
except __HOLE__:
if pa:
return pa[0]
else:
raise
return val[index]
|
KeyError
|
dataset/ETHPy150Open ioflo/ioflo/ioflo/aid/odicting.py/modict.pop
|
4,366 |
def poplist(self, key, *pa):
"""
If key exists remove and return keyed item's value list,
else return the optional following positional argument.
If the optional positional arg is not provided and key does not exit
then raise KeyError.
"""
try:
val = super(modict, self).pop(key)
except __HOLE__:
if pa:
return pa[0]
else:
raise
return val
|
KeyError
|
dataset/ETHPy150Open ioflo/ioflo/ioflo/aid/odicting.py/modict.poplist
|
4,367 |
@expose('error.html')
def error(self, status):
try:
status = int(status)
except __HOLE__: # pragma: no cover
status = 500
message = getattr(status_map.get(status), 'explanation', '')
return dict(status=status, message=message)
|
ValueError
|
dataset/ETHPy150Open openstack/wsme/tests/pecantest/test/controllers/root.py/RootController.error
|
4,368 |
def __init__(self, maxsize=0):
try:
import threading
except __HOLE__:
import dummy_threading as threading
self._init(maxsize)
# mutex must be held whenever the queue is mutating. All methods
# that acquire mutex must release it before returning. mutex
# is shared between the three conditions, so acquiring and
# releasing the conditions also acquires and releases mutex.
self.mutex = threading.Lock()
# Notify not_empty whenever an item is added to the queue; a
# thread waiting to get is notified then.
self.not_empty = threading.Condition(self.mutex)
# Notify not_full whenever an item is removed from the queue;
# a thread waiting to put is notified then.
self.not_full = threading.Condition(self.mutex)
# Notify all_tasks_done whenever the number of unfinished tasks
# drops to zero; thread waiting to join() is notified to resume
self.all_tasks_done = threading.Condition(self.mutex)
self.unfinished_tasks = 0
|
ImportError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/Queue.py/Queue.__init__
|
4,369 |
def load(self, stream):
""" Load properties from an open file stream """
# For the time being only accept file input streams
if type(stream) is not file:
raise TypeError,'Argument should be a file object!'
# Check for the opened mode
if stream.mode != 'r':
raise ValueError,'Stream should be opened in read-only mode!'
try:
lines = stream.readlines()
self.__parse(lines)
except __HOLE__, e:
raise
|
IOError
|
dataset/ETHPy150Open GluuFederation/community-edition-setup/Properties.py/Properties.load
|
4,370 |
def store(self, out, header=""):
""" Write the properties list to the stream 'out' along
with the optional 'header' """
if out.mode[0] != 'w':
raise ValueError,'Steam should be opened in write mode!'
try:
out.write(''.join(('#',header,'\n')))
# Write timestamp
tstamp = time.strftime('%a %b %d %H:%M:%S %Z %Y', time.localtime())
out.write(''.join(('#',tstamp,'\n')))
# Write properties from the pristine dictionary
for prop, val in self._origprops.items():
out.write(''.join((prop,'=',self.escape(val),'\n')))
out.close()
except __HOLE__, e:
raise
|
IOError
|
dataset/ETHPy150Open GluuFederation/community-edition-setup/Properties.py/Properties.store
|
4,371 |
def __getattr__(self, name):
""" For attributes not found in self, redirect
to the properties dictionary """
try:
return self.__dict__[name]
except __HOLE__:
if hasattr(self._props,name):
return getattr(self._props, name)
|
KeyError
|
dataset/ETHPy150Open GluuFederation/community-edition-setup/Properties.py/Properties.__getattr__
|
4,372 |
def static_image(relative_path, alt, **html_attrs):
"""Create an <img> tag for a path relative to the public directory.
If keyword arg ``use_cache`` is false, don't use the global dimensions
cache.
"""
use_cache = html_attrs.pop("use_cache", True)
if "width" not in html_attrs or "height" not in html_attrs:
try:
path = Path(config["pylons.paths"]["public_files"], relative_path)
width, height = get_dimensions(path, use_cache)
except __HOLE__:
pass
else:
if width:
html_attrs.setdefault("width", width)
if height:
html_attrs.setdefault("height", height)
# @@MO Temporary kludge due to url_for ambiguity in Routes 1.
src = "/" + relative_path
return image(src, alt=alt, **html_attrs)
|
IOError
|
dataset/ETHPy150Open mikeorr/WebHelpers2/unfinished/multimedia.py/static_image
|
4,373 |
def open_image(image_path):
"""Open an image file in PIL, return the Image object.
Return None if PIL doesn't recognize the file type.
"""
try:
im = Image.open(image_path)
except __HOLE__, e:
if str(e) == "cannot identify image file":
return None
else:
raise
except:
m = "caught exception identifying '%s', assuming non-image:\n%s"
e = traceback.format_exc()
warn(m, image_path, e)
return None
return im
|
IOError
|
dataset/ETHPy150Open mikeorr/WebHelpers2/unfinished/multimedia.py/open_image
|
4,374 |
def make_thumb(image_path, width):
"""Make a thumbnail and save it in the same directory as the original.
See get_thumb_path() for the arguments.
@return The thumbnail filename, or None if PIL
didn't recognize the image type.
Does NOT work with PDF originals; use make_thumb_from_pdf for those.
"""
dst = get_thumb_path(image_path, width)
im = open_image(image_path)
if im is None:
return None
orig_width, orig_height = im.size
height = choose_height(width, orig_width, orig_height)
if im.mode == 'P':
im = im.convert() # Convert GIF palette to RGB mode.
try:
im.thumbnail((width, height), Image.ANTIALIAS)
except __HOLE__, e:
reason = str(e)
if RX_DECODER_NOT_AVAILABLE.search(reason):
return None # PIL error, cannot thumbnail.
else:
raise
im.save(dst, THUMB_PIL_TYPE)
return dst
|
IOError
|
dataset/ETHPy150Open mikeorr/WebHelpers2/unfinished/multimedia.py/make_thumb
|
4,375 |
def test_empty_cell(self):
def f(): print(a)
try:
f.__closure__[0].cell_contents
except __HOLE__:
pass
else:
self.fail("shouldn't be able to read an empty cell")
a = 12
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/FunctionPropertiesTest.test_empty_cell
|
4,376 |
def test___code__(self):
num_one, num_two = 7, 8
def a(): pass
def b(): return 12
def c(): return num_one
def d(): return num_two
def e(): return num_one, num_two
for func in [a, b, c, d, e]:
self.assertEqual(type(func.__code__), types.CodeType)
self.assertEqual(c(), 7)
self.assertEqual(d(), 8)
d.__code__ = c.__code__
self.assertEqual(c.__code__, d.__code__)
self.assertEqual(c(), 7)
# self.assertEqual(d(), 7)
try:
b.__code__ = c.__code__
except __HOLE__:
pass
else:
self.fail("__code__ with different numbers of free vars should "
"not be possible")
try:
e.__code__ = d.__code__
except ValueError:
pass
else:
self.fail("__code__ with different numbers of free vars should "
"not be possible")
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/FunctionPropertiesTest.test___code__
|
4,377 |
def test_func_default_args(self):
def first_func(a, b):
return a+b
def second_func(a=1, b=2):
return a+b
self.assertEqual(first_func.__defaults__, None)
self.assertEqual(second_func.__defaults__, (1, 2))
first_func.__defaults__ = (1, 2)
self.assertEqual(first_func.__defaults__, (1, 2))
self.assertEqual(first_func(), 3)
self.assertEqual(first_func(3), 5)
self.assertEqual(first_func(3, 5), 8)
del second_func.__defaults__
self.assertEqual(second_func.__defaults__, None)
try:
second_func()
except __HOLE__:
pass
else:
self.fail("__defaults__ does not update; deleting it does not "
"remove requirement")
|
TypeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/FunctionPropertiesTest.test_func_default_args
|
4,378 |
def test___func___non_method(self):
# Behavior should be the same when a method is added via an attr
# assignment
self.fi.id = types.MethodType(id, self.fi)
self.assertEqual(self.fi.id(), id(self.fi))
# Test usage
try:
self.fi.id.unknown_attr
except __HOLE__:
pass
else:
self.fail("using unknown attributes should raise AttributeError")
# Test assignment and deletion
self.cannot_set_attr(self.fi.id, 'unknown_attr', 2, AttributeError)
|
AttributeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/InstancemethodAttrTest.test___func___non_method
|
4,379 |
def test_set_attr(self):
self.b.known_attr = 7
self.assertEqual(self.b.known_attr, 7)
try:
self.fi.a.known_attr = 7
except __HOLE__:
pass
else:
self.fail("setting attributes on methods should raise error")
|
AttributeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/ArbitraryFunctionAttrTest.test_set_attr
|
4,380 |
def test_delete_unknown_attr(self):
try:
del self.b.unknown_attr
except __HOLE__:
pass
else:
self.fail("deleting unknown attribute should raise TypeError")
|
AttributeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/ArbitraryFunctionAttrTest.test_delete_unknown_attr
|
4,381 |
def test_unset_attr(self):
for func in [self.b, self.fi.a]:
try:
func.non_existent_attr
except __HOLE__:
pass
else:
self.fail("using unknown attributes should raise "
"AttributeError")
|
AttributeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/ArbitraryFunctionAttrTest.test_unset_attr
|
4,382 |
def test_delete___dict__(self):
try:
del self.b.__dict__
except __HOLE__:
pass
else:
self.fail("deleting function dictionary should raise TypeError")
|
TypeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_funcattrs.py/FunctionDictsTest.test_delete___dict__
|
4,383 |
def got_cancel(self, index, begin, length):
try:
self.buffer.remove((index, begin, length))
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open Cclleemm/FriendlyTorrent/src/tornado/BitTornado/BT1/Uploader.py/Upload.got_cancel
|
4,384 |
def __init__(self, filename, width=None, height=None, kind='direct',
mask=None, lazy=True, srcinfo=None):
client, uri = srcinfo
cache = self.source_filecache.setdefault(client, {})
pdffname = cache.get(filename)
if pdffname is None:
tmpf, pdffname = tempfile.mkstemp(suffix='.pdf')
os.close(tmpf)
client.to_unlink.append(pdffname)
cache[filename] = pdffname
cmd = [progname, os.path.abspath(filename), '-A', pdffname]
try:
subprocess.call(cmd)
except __HOLE__, e:
log.error("Failed to run command: %s", ' '.join(cmd))
raise
self.load_xobj((client, pdffname))
pdfuri = uri.replace(filename, pdffname)
pdfsrc = client, pdfuri
VectorPdf.__init__(self, pdfuri, width, height, kind, mask, lazy, pdfsrc)
|
OSError
|
dataset/ETHPy150Open rst2pdf/rst2pdf/rst2pdf/extensions/inkscape_r2p.py/InkscapeImage.__init__
|
4,385 |
@classmethod
def raster(self, filename, client):
"""Returns a URI to a rasterized version of the image"""
cache = self.source_filecache.setdefault(client, {})
pngfname = cache.get(filename+'_raster')
if pngfname is None:
tmpf, pngfname = tempfile.mkstemp(suffix='.png')
os.close(tmpf)
client.to_unlink.append(pngfname)
cache[filename+'_raster'] = pngfname
cmd = [progname, os.path.abspath(filename), '-e', pngfname, '-d', str(client.def_dpi)]
try:
subprocess.call(cmd)
return pngfname
except __HOLE__, e:
log.error("Failed to run command: %s", ' '.join(cmd))
raise
return None
|
OSError
|
dataset/ETHPy150Open rst2pdf/rst2pdf/rst2pdf/extensions/inkscape_r2p.py/InkscapeImage.raster
|
4,386 |
def formatday(
self, day, weekday,
day_template='happenings/partials/calendar/day_cell.html',
noday_template='happenings/partials/calendar/day_noday_cell.html',
popover_template='happenings/partials/calendar/popover.html',
):
"""Return a day as a table cell."""
super(EventCalendar, self).formatday(day, weekday)
now = get_now()
context = self.get_context()
context['events'] = []
context['day'] = day
context['day_url'] = self.get_day_url(day)
context['month_start_date'] = date(self.yr, self.mo, 1)
context['weekday'] = weekday
context['cssclass'] = self.cssclasses[weekday]
context['popover_template'] = popover_template
context['num_events'] = len(self.count.get(day, [])),
try:
processed_date = date(self.yr, self.mo, day)
except __HOLE__:
# day is out of range for month
processed_date = None
context['month_start_date'] = date(self.yr, self.mo, 1)
if day == 0:
template = noday_template
else:
template = day_template
if now.date() == processed_date:
context['is_current_day'] = True
if processed_date and (day in self.count):
for item in self.count[day]:
self.pk = item[1]
self.title = item[0]
for event in self.events:
if event.pk == self.pk:
event.check_if_cancelled(processed_date)
# allow to use event.last_check_if_cancelled and populate event.title.extra
context['events'].append(event)
return render_to_string(template, context)
|
ValueError
|
dataset/ETHPy150Open wreckage/django-happenings/happenings/utils/calendars.py/EventCalendar.formatday
|
4,387 |
def _load_assets(self):
try:
with open(self.config['STATS_FILE']) as f:
return json.load(f)
except __HOLE__:
raise IOError(
'Error reading {0}. Are you sure webpack has generated '
'the file and the path is correct?'.format(
self.config['STATS_FILE']))
|
IOError
|
dataset/ETHPy150Open owais/django-webpack-loader/webpack_loader/loader.py/WebpackLoader._load_assets
|
4,388 |
def sort_fields(fields):
"""Sort fields by their column_number but put children after parents.
"""
fathers = [(key, val) for key, val in
sorted(fields.items(), key=lambda k: k[1]['column_number'])
if 'auto_generated' not in val]
children = [(key, val) for key, val in
sorted(fields.items(), key=lambda k: k[1]['column_number'])
if 'auto_generated' in val]
children.reverse()
fathers_keys = [father[0] for father in fathers]
for child in children:
try:
index = fathers_keys.index(child[1]['parent_ids'][0])
except __HOLE__:
index = -1
if index >= 0:
fathers.insert(index + 1, child)
else:
fathers.append(child)
return fathers
|
ValueError
|
dataset/ETHPy150Open bigmlcom/python/bigml/util.py/sort_fields
|
4,389 |
def cast(input_data, fields):
"""Checks expected type in input data values, strips affixes and casts
"""
for (key, value) in input_data.items():
if (
(fields[key]['optype'] == 'numeric' and
isinstance(value, basestring)) or
(fields[key]['optype'] != 'numeric' and
not isinstance(value, basestring))):
try:
if fields[key]['optype'] == 'numeric':
value = strip_affixes(value, fields[key])
input_data.update({key:
map_type(fields[key]
['optype'])(value)})
except __HOLE__:
raise ValueError(u"Mismatch input data type in field "
u"\"%s\" for value %s." %
(fields[key]['name'],
value))
|
ValueError
|
dataset/ETHPy150Open bigmlcom/python/bigml/util.py/cast
|
4,390 |
def maybe_save(resource_id, path,
code=None, location=None,
resource=None, error=None):
"""Builds the resource dict response and saves it if a path is provided.
The resource is saved in a local repo json file in the given path.
"""
resource = resource_structure(code, resource_id, location, resource, error)
if path is not None and resource_id is not None:
try:
resource_json = json.dumps(resource)
except __HOLE__:
print "The resource has an invalid JSON format"
try:
resource_file_name = "%s%s%s" % (path, os.sep,
resource_id.replace('/', '_'))
with open(resource_file_name, "wb", 0) as resource_file:
resource_file.write(resource_json.encode('UTF-8'))
except IOError:
print "Failed writing resource to %s" % resource_file_name
return resource
|
ValueError
|
dataset/ETHPy150Open bigmlcom/python/bigml/util.py/maybe_save
|
4,391 |
def synchronized(obj, lock=None):
assert not isinstance(obj, SynchronizedBase), 'object already synchronized'
if isinstance(obj, ctypes._SimpleCData):
return Synchronized(obj, lock)
elif isinstance(obj, ctypes.Array):
if obj._type_ is ctypes.c_char:
return SynchronizedString(obj, lock)
return SynchronizedArray(obj, lock)
else:
cls = type(obj)
try:
scls = class_cache[cls]
except __HOLE__:
names = [field[0] for field in cls._fields_]
d = dict((name, make_property(name)) for name in names)
classname = 'Synchronized' + cls.__name__
scls = class_cache[cls] = type(classname, (SynchronizedBase,), d)
return scls(obj, lock)
#
# Functions for pickling/unpickling
#
|
KeyError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/multiprocessing/sharedctypes.py/synchronized
|
4,392 |
def make_property(name):
try:
return prop_cache[name]
except __HOLE__:
d = {}
exec template % ((name,)*7) in d
prop_cache[name] = d[name]
return d[name]
|
KeyError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/multiprocessing/sharedctypes.py/make_property
|
4,393 |
def test_elementwise_multiply_broadcast(self):
A = array([4])
B = array([[-9]])
C = array([1,-1,0])
D = array([[7,9,-9]])
E = array([[3],[2],[1]])
F = array([[8,6,3],[-4,3,2],[6,6,6]])
G = [1, 2, 3]
H = np.ones((3, 4))
J = H.T
K = array([[0]])
# Rank 1 arrays can't be cast as spmatrices (A and C) so leave
# them out.
Bsp = self.spmatrix(B)
Dsp = self.spmatrix(D)
Esp = self.spmatrix(E)
Fsp = self.spmatrix(F)
Hsp = self.spmatrix(H)
Hspp = self.spmatrix(H[0,None])
Jsp = self.spmatrix(J)
Jspp = self.spmatrix(J[:,0,None])
Ksp = self.spmatrix(K)
matrices = [A, B, C, D, E, F, G, H, J, K]
spmatrices = [Bsp, Dsp, Esp, Fsp, Hsp, Hspp, Jsp, Jspp, Ksp]
# sparse/sparse
for i in spmatrices:
for j in spmatrices:
try:
dense_mult = np.multiply(i.todense(), j.todense())
except __HOLE__:
assert_raises(ValueError, i.multiply, j)
continue
sp_mult = i.multiply(j)
if isspmatrix(sp_mult):
assert_almost_equal(sp_mult.todense(), dense_mult)
else:
assert_almost_equal(sp_mult, dense_mult)
# sparse/dense
for i in spmatrices:
for j in matrices:
try:
dense_mult = np.multiply(i.todense(), j)
except ValueError:
assert_raises(ValueError, i.multiply, j)
continue
sp_mult = i.multiply(j)
if isspmatrix(sp_mult):
assert_almost_equal(sp_mult.todense(), dense_mult)
else:
assert_almost_equal(sp_mult, dense_mult)
|
ValueError
|
dataset/ETHPy150Open scipy/scipy/scipy/sparse/tests/test_base.py/_TestCommon.test_elementwise_multiply_broadcast
|
4,394 |
def test_copy(self):
# Check whether the copy=True and copy=False keywords work
A = self.datsp
# check that copy preserves format
assert_equal(A.copy().format, A.format)
assert_equal(A.__class__(A,copy=True).format, A.format)
assert_equal(A.__class__(A,copy=False).format, A.format)
assert_equal(A.copy().todense(), A.todense())
assert_equal(A.__class__(A,copy=True).todense(), A.todense())
assert_equal(A.__class__(A,copy=False).todense(), A.todense())
# check that XXX_matrix.toXXX() works
toself = getattr(A,'to' + A.format)
assert_equal(toself().format, A.format)
assert_equal(toself(copy=True).format, A.format)
assert_equal(toself(copy=False).format, A.format)
assert_equal(toself().todense(), A.todense())
assert_equal(toself(copy=True).todense(), A.todense())
assert_equal(toself(copy=False).todense(), A.todense())
# check whether the data is copied?
# TODO: deal with non-indexable types somehow
B = A.copy()
try:
B[0,0] += 1
assert_(B[0,0] != A[0,0])
except __HOLE__:
# not all sparse matrices can be indexed
pass
except TypeError:
# not all sparse matrices can be indexed
pass
# test that __iter__ is compatible with NumPy matrix
|
NotImplementedError
|
dataset/ETHPy150Open scipy/scipy/scipy/sparse/tests/test_base.py/_TestCommon.test_copy
|
4,395 |
def test_binary_ufunc_overrides(self):
# data
a = np.array([[1, 2, 3],
[4, 5, 0],
[7, 8, 9]])
b = np.array([[9, 8, 7],
[6, 0, 0],
[3, 2, 1]])
c = 1.0
d = 1 + 2j
e = 5
asp = self.spmatrix(a)
bsp = self.spmatrix(b)
a_items = dict(dense=a, scalar=c, cplx_scalar=d, int_scalar=e, sparse=asp)
b_items = dict(dense=b, scalar=c, cplx_scalar=d, int_scalar=e, sparse=bsp)
@dec.skipif(not HAS_NUMPY_UFUNC, "feature requires Numpy with __numpy_ufunc__")
def check(i, j, dtype):
ax = a_items[i]
bx = b_items[j]
if issparse(ax):
ax = ax.astype(dtype)
if issparse(bx):
bx = bx.astype(dtype)
a = todense(ax)
b = todense(bx)
def check_one(ufunc, allclose=False):
# without out argument
expected = ufunc(a, b)
got = ufunc(ax, bx)
if allclose:
assert_allclose(todense(got), expected,
rtol=5e-15, atol=0)
else:
assert_array_equal(todense(got), expected)
# with out argument
out = np.zeros(got.shape, dtype=got.dtype)
out.fill(np.nan)
got = ufunc(ax, bx, out=out)
assert_(got is out)
if allclose:
assert_allclose(todense(got), expected,
rtol=5e-15, atol=0)
else:
assert_array_equal(todense(got), expected)
out = csr_matrix(got.shape, dtype=out.dtype)
out[0,:] = 999
got = ufunc(ax, bx, out=out)
assert_(got is out)
if allclose:
assert_allclose(todense(got), expected,
rtol=5e-15, atol=0)
else:
assert_array_equal(todense(got), expected)
# -- associative
# multiply
check_one(np.multiply)
# add
if isscalarlike(ax) or isscalarlike(bx):
try:
check_one(np.add)
except __HOLE__:
# Not implemented for all spmatrix types
pass
else:
check_one(np.add)
# maximum
check_one(np.maximum)
# minimum
check_one(np.minimum)
# -- non-associative
# dot
check_one(np.dot)
# subtract
if isscalarlike(ax) or isscalarlike(bx):
try:
check_one(np.subtract)
except NotImplementedError:
# Not implemented for all spmatrix types
pass
else:
check_one(np.subtract)
# divide
with np.errstate(divide='ignore', invalid='ignore'):
if isscalarlike(bx):
# Rounding error may be different, as the sparse implementation
# computes a/b -> a * (1/b) if b is a scalar
check_one(np.divide, allclose=True)
else:
check_one(np.divide)
# true_divide
if isscalarlike(bx):
check_one(np.true_divide, allclose=True)
else:
check_one(np.true_divide)
for i in a_items.keys():
for j in b_items.keys():
for dtype in [np.int_, np.float_, np.complex_]:
if i == 'sparse' or j == 'sparse':
yield check, i, j, dtype
|
NotImplementedError
|
dataset/ETHPy150Open scipy/scipy/scipy/sparse/tests/test_base.py/_TestCommon.test_binary_ufunc_overrides
|
4,396 |
def test_non_unit_stride_2d_indexing(self):
# Regression test -- used to silently ignore the stride.
v0 = np.random.rand(50, 50)
try:
v = self.spmatrix(v0)[0:25:2, 2:30:3]
except __HOLE__:
# if unsupported
raise nose.SkipTest("feature not implemented")
assert_array_equal(v.todense(),
v0[0:25:2, 2:30:3])
|
ValueError
|
dataset/ETHPy150Open scipy/scipy/scipy/sparse/tests/test_base.py/_TestSlicing.test_non_unit_stride_2d_indexing
|
4,397 |
def _possibly_unimplemented(cls, require=True):
"""
Construct a class that either runs tests as usual (require=True),
or each method raises SkipTest if it encounters a common error.
"""
if require:
return cls
else:
def wrap(fc):
def wrapper(*a, **kw):
try:
return fc(*a, **kw)
except (NotImplementedError, TypeError, __HOLE__,
IndexError, AttributeError):
raise nose.SkipTest("feature not implemented")
wrapper.__name__ = fc.__name__
return wrapper
new_dict = dict(cls.__dict__)
for name, func in cls.__dict__.items():
if name.startswith('test_'):
new_dict[name] = wrap(func)
return type(cls.__name__ + "NotImplemented",
cls.__bases__,
new_dict)
|
ValueError
|
dataset/ETHPy150Open scipy/scipy/scipy/sparse/tests/test_base.py/_possibly_unimplemented
|
4,398 |
def test_scalar_idx_dtype(self):
# Check that index dtype takes into account all parameters
# passed to sparsetools, including the scalar ones
indptr = np.zeros(2, dtype=np.int32)
indices = np.zeros(0, dtype=np.int32)
vals = np.zeros((0, 1, 1))
a = bsr_matrix((vals, indices, indptr), shape=(1, 2**31-1))
b = bsr_matrix((vals, indices, indptr), shape=(1, 2**31))
c = bsr_matrix((1, 2**31-1))
d = bsr_matrix((1, 2**31))
assert_equal(a.indptr.dtype, np.int32)
assert_equal(b.indptr.dtype, np.int64)
assert_equal(c.indptr.dtype, np.int32)
assert_equal(d.indptr.dtype, np.int64)
try:
vals2 = np.zeros((0, 1, 2**31-1))
vals3 = np.zeros((0, 1, 2**31))
e = bsr_matrix((vals2, indices, indptr), shape=(1, 2**31-1))
f = bsr_matrix((vals3, indices, indptr), shape=(1, 2**31))
assert_equal(e.indptr.dtype, np.int32)
assert_equal(f.indptr.dtype, np.int64)
except (MemoryError, __HOLE__):
# May fail on 32-bit Python
e = 0
f = 0
# These shouldn't fail
for x in [a, b, c, d, e, f]:
x + x
#------------------------------------------------------------------------------
# Tests for non-canonical representations (with duplicates, unsorted indices)
#------------------------------------------------------------------------------
|
ValueError
|
dataset/ETHPy150Open scipy/scipy/scipy/sparse/tests/test_base.py/TestBSR.test_scalar_idx_dtype
|
4,399 |
def clean(self, value):
super(CAProvinceField, self).clean(value)
if value in EMPTY_VALUES:
return ''
try:
value = value.strip().lower()
except __HOLE__:
pass
else:
# Load data in memory only when it is required, see also #17275
from .ca_provinces import PROVINCES_NORMALIZED
try:
return PROVINCES_NORMALIZED[value.strip().lower()]
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/localflavor/ca/forms.py/CAProvinceField.clean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.