Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,600 |
def generate_matches(self, nodes):
"""
Generator yielding matches for a sequence of nodes.
Args:
nodes: sequence of nodes
Yields:
(count, results) tuples where:
count: the match comprises nodes[:count];
results: dict containing named submatches.
"""
if self.content is None:
# Shortcut for special case (see __init__.__doc__)
for count in xrange(self.min, 1 + min(len(nodes), self.max)):
r = {}
if self.name:
r[self.name] = nodes[:count]
yield count, r
elif self.name == "bare_name":
yield self._bare_name_matches(nodes)
else:
# The reason for this is that hitting the recursion limit usually
# results in some ugly messages about how RuntimeErrors are being
# ignored. We don't do this on non-CPython implementation because
# they don't have this problem.
if hasattr(sys, "getrefcount"):
save_stderr = sys.stderr
sys.stderr = StringIO()
try:
for count, r in self._recursive_matches(nodes, 0):
if self.name:
r[self.name] = nodes[:count]
yield count, r
except __HOLE__:
# We fall back to the iterative pattern matching scheme if the recursive
# scheme hits the recursion limit.
for count, r in self._iterative_matches(nodes):
if self.name:
r[self.name] = nodes[:count]
yield count, r
finally:
if hasattr(sys, "getrefcount"):
sys.stderr = save_stderr
|
RuntimeError
|
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/lib2to3/pytree.py/WildcardPattern.generate_matches
|
4,601 |
def _detect_environment():
# ## -eventlet-
if 'eventlet' in sys.modules:
try:
from eventlet.patcher import is_monkey_patched as is_eventlet
import socket
if is_eventlet(socket):
return 'eventlet'
except __HOLE__:
pass
# ## -gevent-
if 'gevent' in sys.modules:
try:
from gevent import socket as _gsocket
import socket
if socket.socket is _gsocket.socket:
return 'gevent'
except ImportError:
pass
return 'default'
|
ImportError
|
dataset/ETHPy150Open celery/kombu/kombu/syn.py/_detect_environment
|
4,602 |
def interaction_plot(x, trace, response, func=np.mean, ax=None, plottype='b',
xlabel=None, ylabel=None, colors=[], markers=[],
linestyles=[], legendloc='best', legendtitle=None,
**kwargs):
"""
Interaction plot for factor level statistics.
Note. If categorial factors are supplied levels will be internally
recoded to integers. This ensures matplotlib compatiblity.
uses pandas.DataFrame to calculate an `aggregate` statistic for each
level of the factor or group given by `trace`.
Parameters
----------
x : array-like
The `x` factor levels constitute the x-axis. If a `pandas.Series` is
given its name will be used in `xlabel` if `xlabel` is None.
trace : array-like
The `trace` factor levels will be drawn as lines in the plot.
If `trace` is a `pandas.Series` its name will be used as the
`legendtitle` if `legendtitle` is None.
response : array-like
The reponse or dependent variable. If a `pandas.Series` is given
its name will be used in `ylabel` if `ylabel` is None.
func : function
Anything accepted by `pandas.DataFrame.aggregate`. This is applied to
the response variable grouped by the trace levels.
plottype : str {'line', 'scatter', 'both'}, optional
The type of plot to return. Can be 'l', 's', or 'b'
ax : axes, optional
Matplotlib axes instance
xlabel : str, optional
Label to use for `x`. Default is 'X'. If `x` is a `pandas.Series` it
will use the series names.
ylabel : str, optional
Label to use for `response`. Default is 'func of response'. If
`response` is a `pandas.Series` it will use the series names.
colors : list, optional
If given, must have length == number of levels in trace.
linestyles : list, optional
If given, must have length == number of levels in trace.
markers : list, optional
If given, must have length == number of lovels in trace
kwargs
These will be passed to the plot command used either plot or scatter.
If you want to control the overall plotting options, use kwargs.
Returns
-------
fig : Figure
The figure given by `ax.figure` or a new instance.
Examples
--------
>>> import numpy as np
>>> np.random.seed(12345)
>>> weight = np.random.randint(1,4,size=60)
>>> duration = np.random.randint(1,3,size=60)
>>> days = np.log(np.random.randint(1,30, size=60))
>>> fig = interaction_plot(weight, duration, days,
... colors=['red','blue'], markers=['D','^'], ms=10)
>>> import matplotlib.pyplot as plt
>>> plt.show()
.. plot::
import numpy as np
from statsmodels.graphics.factorplots import interaction_plot
np.random.seed(12345)
weight = np.random.randint(1,4,size=60)
duration = np.random.randint(1,3,size=60)
days = np.log(np.random.randint(1,30, size=60))
fig = interaction_plot(weight, duration, days,
colors=['red','blue'], markers=['D','^'], ms=10)
import matplotlib.pyplot as plt
#plt.show()
"""
from pandas import DataFrame
fig, ax = utils.create_mpl_ax(ax)
response_name = ylabel or getattr(response, 'name', 'response')
ylabel = '%s of %s' % (get_function_name(func), response_name)
xlabel = xlabel or getattr(x, 'name', 'X')
legendtitle = legendtitle or getattr(trace, 'name', 'Trace')
ax.set_ylabel(ylabel)
ax.set_xlabel(xlabel)
x_values = x_levels = None
if isinstance(x[0], str):
x_levels = [l for l in np.unique(x)]
x_values = lrange(len(x_levels))
x = _recode(x, dict(zip(x_levels, x_values)))
data = DataFrame(dict(x=x, trace=trace, response=response))
plot_data = data.groupby(['trace', 'x']).aggregate(func).reset_index()
# return data
# check plot args
n_trace = len(plot_data['trace'].unique())
if linestyles:
try:
assert len(linestyles) == n_trace
except AssertionError as err:
raise ValueError("Must be a linestyle for each trace level")
else: # set a default
linestyles = ['-'] * n_trace
if markers:
try:
assert len(markers) == n_trace
except __HOLE__ as err:
raise ValueError("Must be a linestyle for each trace level")
else: # set a default
markers = ['.'] * n_trace
if colors:
try:
assert len(colors) == n_trace
except AssertionError as err:
raise ValueError("Must be a linestyle for each trace level")
else: # set a default
#TODO: how to get n_trace different colors?
colors = rainbow(n_trace)
if plottype == 'both' or plottype == 'b':
for i, (values, group) in enumerate(plot_data.groupby(['trace'])):
# trace label
label = str(group['trace'].values[0])
ax.plot(group['x'], group['response'], color=colors[i],
marker=markers[i], label=label,
linestyle=linestyles[i], **kwargs)
elif plottype == 'line' or plottype == 'l':
for i, (values, group) in enumerate(plot_data.groupby(['trace'])):
# trace label
label = str(group['trace'].values[0])
ax.plot(group['x'], group['response'], color=colors[i],
label=label, linestyle=linestyles[i], **kwargs)
elif plottype == 'scatter' or plottype == 's':
for i, (values, group) in enumerate(plot_data.groupby(['trace'])):
# trace label
label = str(group['trace'].values[0])
ax.scatter(group['x'], group['response'], color=colors[i],
label=label, marker=markers[i], **kwargs)
else:
raise ValueError("Plot type %s not understood" % plottype)
ax.legend(loc=legendloc, title=legendtitle)
ax.margins(.1)
if all([x_levels, x_values]):
ax.set_xticks(x_values)
ax.set_xticklabels(x_levels)
return fig
|
AssertionError
|
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/graphics/factorplots.py/interaction_plot
|
4,603 |
def _post_hook(self, msg, urls):
body = json.dumps(msg)
headers = { "Content-Length": str(len(body)),
"Content-Type": "application/json" }
client = HTTPClient()
for url in urls:
try:
client.fetch(url, method="POST", headers=headers,
body=body)
except __HOLE__:
# for now we ignore all http errors.
pass
|
HTTPError
|
dataset/ETHPy150Open benoitc/gaffer/gaffer/webhooks.py/WebHooks._post_hook
|
4,604 |
def realm_list(request, id=None):
"""List all realms, or the given realm if id!=None."""
if id == None:
realms = Policy.objects.all()
else:
try:
realms = [Policy.objects.get(id=id)]
except ValueError:
# id wasn't an int, which it should be
realms = []
except __HOLE__:
# id not found in database
realms = []
return render_to_response(
'dcmux/realms.xml',
{
"realms_uri": uri_lookup(request, "realms"),
"realms": realms,
},
mimetype="application/xml",
)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open bmbouter/Opus/opus/project/dcmux/views.py/realm_list
|
4,605 |
def image_list(request, id=None):
"""List all aggregate images, or the given image if id!=None."""
if id == None:
images = AggregateImage.objects.all()
else:
try:
images = [AggregateImage.objects.get(id=id)]
except __HOLE__:
# id wasn't an int, which it should be
images = []
except ObjectDoesNotExist:
# id not found in database
images = []
return render_to_response(
'dcmux/images.xml',
{
"images_uri": uri_lookup(request, "images"),
"images": images,
},
mimetype="application/xml",
)
|
ValueError
|
dataset/ETHPy150Open bmbouter/Opus/opus/project/dcmux/views.py/image_list
|
4,606 |
def instance_list(request, id=None):
"""List all instances, or the given instance if id!=None."""
if id == None:
instances = Instance.objects.all()
else:
try:
instances = [Instance.objects.get(id=id)]
except __HOLE__:
# id wasn't an int, which it should be
instances = []
except ObjectDoesNotExist:
# id not found in database
instances = []
return render_to_response(
'dcmux/instances.xml',
{
"images_uri": uri_lookup(request, "images"),
"instances_uri": uri_lookup(request, "instances"),
"instances": instances,
},
mimetype="application/xml",
)
|
ValueError
|
dataset/ETHPy150Open bmbouter/Opus/opus/project/dcmux/views.py/instance_list
|
4,607 |
def instance_create(request):
"""Creates an instance with the policy given by realm_id.
Uses image_id and realm_id from the application/x-www-form-urlencoded
format. Both of these fields are required.
"""
# Get multipart-form data: image_id, realm_id, hwp_name, name
try:
image_id = request.POST["image_id"]
realm_id = request.POST["realm_id"]
except __HOLE__:
return HttpResponseBadRequest("Both an image_id and a realm_id must " \
"be specified.")
# Get the libcloud driver
try:
policy = Policy.objects.get(id=realm_id)
except ObjectDoesNotExist:
return HttpResponseBadRequest("The requested realm_id was not found.")
provider = policy.get_next_provider(image_id)
driver = provider.get_client()
# Get the RealImage object
try:
aggregate_image = AggregateImage.objects.get(id=image_id)
except ObjectDoesNotExist:
return HttpResponseBadRequest("The requested image_id was not found.")
try:
real_image = RealImage.objects.get(
aggregate_image=aggregate_image,
provider=provider,
)
except ObjectDoesNotExist:
return HttpResponseBadRequest("There is no aggregate image image " \
"matching this provider.")
# Get the libcloud node object
image = None
for node in driver.list_images():
if node.id == real_image.image_id:
image = node
break
if not image:
#TODO: This should probably return an HTTP error code instead of
# raising an exception
raise ValueError("Image was not found in the provider: %s" % \
real_image
)
# Get an instance size
size = driver.list_sizes()[0]
# Add instance to database
# We do this before the actual creating of the image so we can get the
# instance_id
database_instance = Instance(
image = aggregate_image,
owner_id = "",
name = "",
provider = provider,
instance_id = -1,
policy = policy,
)
database_instance.save()
# Start the instance!
instance = driver.create_node(
image=image,
name="dcmux-%s" % database_instance.id,
size=size,
)
# Update instance_id in database
database_instance.instance_id = instance.id
database_instance.save()
return render_to_response(
'dcmux/instances.xml',
{
"images_uri": uri_lookup(request, "images"),
"instances_uri": uri_lookup(request, "instances"),
"instances": [instance],
},
mimetype="application/xml",
)
|
KeyError
|
dataset/ETHPy150Open bmbouter/Opus/opus/project/dcmux/views.py/instance_create
|
4,608 |
def test():
"""Interactive test run."""
try:
while 1:
x, digs = input('Enter (x, digs): ')
print x, fix(x, digs), sci(x, digs)
except (EOFError, __HOLE__):
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/fpformat.py/test
|
4,609 |
def handle(self, addrport, **options):
# setup unbuffered I/O
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
interactive_debug_listen()
import django
from tornado import httpserver, web
try:
addr, port = addrport.split(':')
except __HOLE__:
addr, port = '', addrport
if not addr:
addr = '127.0.0.1'
if not port.isdigit():
raise CommandError("%r is not a valid port number." % (port,))
xheaders = options.get('xheaders', True)
no_keep_alive = options.get('no_keep_alive', False)
quit_command = 'CTRL-C'
if settings.DEBUG:
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)-8s %(message)s')
def inner_run():
from django.conf import settings
from django.utils import translation
translation.activate(settings.LANGUAGE_CODE)
print("Validating Django models.py...")
self.validate(display_num_errors=True)
print("\nDjango version %s" % (django.get_version()))
print("Tornado server is running at http://%s:%s/" % (addr, port))
print("Quit the server with %s." % (quit_command,))
if settings.USING_RABBITMQ:
queue_client = get_queue_client()
# Process notifications received via RabbitMQ
queue_client.register_json_consumer('notify_tornado', process_notification)
queue_client.register_json_consumer('tornado_return', respond_send_message)
try:
urls = (r"/notify_tornado",
r"/json/get_events",
r"/json/events",
r"/api/v1/events",
)
# Application is an instance of Django's standard wsgi handler.
application = web.Application([(url, AsyncDjangoHandler) for url in urls]
+ get_sockjs_router().urls,
debug=django.conf.settings.DEBUG,
# Disable Tornado's own request logging, since we have our own
log_function=lambda x: None)
# start tornado web server in single-threaded mode
http_server = httpserver.HTTPServer(application,
xheaders=xheaders,
no_keep_alive=no_keep_alive)
http_server.listen(int(port), address=addr)
if django.conf.settings.DEBUG:
ioloop.IOLoop.instance().set_blocking_log_threshold(5)
setup_event_queue()
add_client_gc_hook(missedmessage_hook)
setup_tornado_rabbitmq()
ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
sys.exit(0)
inner_run()
#
# Modify the base Tornado handler for Django
#
|
ValueError
|
dataset/ETHPy150Open zulip/zulip/zerver/management/commands/runtornado.py/Command.handle
|
4,610 |
def get_response(self, request):
"Returns an HttpResponse object for the given HttpRequest"
try:
try:
# Setup default url resolver for this thread.
urlconf = settings.ROOT_URLCONF
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
response = None
# Apply request middleware
for middleware_method in self._request_middleware:
response = middleware_method(request)
if response:
break
if hasattr(request, "urlconf"):
# Reset url resolver with a custom urlconf.
urlconf = request.urlconf
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
### ADDED BY ZULIP
request._resolver = resolver
### END ADDED BY ZULIP
callback, callback_args, callback_kwargs = resolver.resolve(
request.path_info)
# Apply view middleware
if response is None:
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
### THIS BLOCK MODIFIED BY ZULIP
if response is None:
from ...decorator import RespondAsynchronously
try:
response = callback(request, *callback_args, **callback_kwargs)
if response is RespondAsynchronously:
async_request_stop(request)
return None
clear_handler_by_id(self.handler_id)
except Exception as e:
clear_handler_by_id(self.handler_id)
# If the view raised an exception, run it through exception
# middleware, and if the exception middleware returns a
# response, use that. Otherwise, reraise the exception.
for middleware_method in self._exception_middleware:
response = middleware_method(request, e)
if response:
break
if response is None:
raise
if response is None:
try:
view_name = callback.__name__
except AttributeError:
view_name = callback.__class__.__name__ + '.__call__'
raise ValueError("The view %s.%s returned None." %
(callback.__module__, view_name))
# If the response supports deferred rendering, apply template
# response middleware and the render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
response = response.render()
except http.Http404 as e:
if settings.DEBUG:
from django.views import debug
response = debug.technical_404_response(request, e)
else:
try:
callback, param_dict = resolver.resolve404()
response = callback(request, **param_dict)
except:
try:
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
finally:
signals.got_request_exception.send(sender=self.__class__, request=request)
except exceptions.PermissionDenied:
logging.warning(
'Forbidden (Permission denied): %s', request.path,
extra={
'status_code': 403,
'request': request
})
try:
callback, param_dict = resolver.resolve403()
response = callback(request, **param_dict)
except:
try:
response = self.handle_uncaught_exception(request,
resolver, sys.exc_info())
finally:
signals.got_request_exception.send(
sender=self.__class__, request=request)
except __HOLE__:
# See https://code.djangoproject.com/ticket/4701
raise
except Exception as e:
exc_info = sys.exc_info()
signals.got_request_exception.send(sender=self.__class__, request=request)
return self.handle_uncaught_exception(request, resolver, exc_info)
finally:
# Reset urlconf on the way out for isolation
urlresolvers.set_urlconf(None)
### ZULIP CHANGE: The remainder of this function was moved
### into its own function, just below, so we can call it from
### finish().
response = self.apply_response_middleware(request, response, resolver)
return response
### Copied from get_response (above in this file)
|
SystemExit
|
dataset/ETHPy150Open zulip/zulip/zerver/management/commands/runtornado.py/AsyncDjangoHandler.get_response
|
4,611 |
def contribute_to_state(self, state):
try:
contribute = state.contribute_to_state
except __HOLE__:
# set default state attributes.
return self.contribute_to_object(state, {
'actor': self,
'agent': self.agent,
'connection': self.connection,
'log': self.log,
'Next': self.Next,
'NoRouteError': self.NoRouteError,
'NoReplyError': self.NoReplyError,
'add_binding': self._add_binding,
'remove_binding': self._remove_binding,
})
else:
return contribute(self)
|
AttributeError
|
dataset/ETHPy150Open celery/cell/cell/actors.py/Actor.contribute_to_state
|
4,612 |
def lookup_action(self, name):
try:
if not name:
method = self.default_receive
else:
method = getattr(self.state, name)
except __HOLE__:
raise KeyError(name)
if not callable(method) or name.startswith('_'):
raise KeyError(method)
return method
|
AttributeError
|
dataset/ETHPy150Open celery/cell/cell/actors.py/Actor.lookup_action
|
4,613 |
def __call__(self, *args, **kw):
if not args:
raise WrongNumberOfArguments(
'No arguments given to %s' % self.func)
try:
meth = getattr(self.parent.state, args[0]).__name__
except __HOLE__:
if kw.get('typed', True):
raise
else:
meth = args[0]
return self.func(meth, *args[1:], **kw)
|
AttributeError
|
dataset/ETHPy150Open celery/cell/cell/actors.py/ActorProxy.state.__call__
|
4,614 |
def setup_user_files():
""" Returns nothing
Create a path for the users prefs files to be stored in their
home folder. Create default config files and place them in the relevant
directory.
"""
print('Setting up dmenu-extended prefs files...')
try:
os.makedirs(path_plugins)
print('Plugins directory created')
except OSError:
print('Plugins directory exists - skipped')
try:
os.makedirs(path_cache)
print('Cache directory created')
except OSError:
print('Cache directory exists - skipped')
try:
os.makedirs(path_prefs)
print('prefs directory created')
except __HOLE__:
print('prefs directory exists - skipped')
# If relevant binaries exist, swap them out for the more appropriate items
# It has been decided against setting gnome-open or gvfs-open as a default
# file handler to prevent intermittent failure to open a text editor
# required to edit the configuration file.
# if os.path.exists('/usr/bin/gnome-open'):
# default_prefs['fileopener'] = 'gnome-open'
# default_prefs['webbrowser'] = 'gnome-open'
# default_prefs['filebrowser'] = 'gnome-open'
if os.path.exists('/usr/bin/gnome-terminal'):
default_prefs['terminal'] = 'gnome-terminal'
if os.path.exists('/usr/bin/urxvt'):
default_prefs['terminal'] = 'urxvt'
# Dump the prefs file
if os.path.exists(file_prefs) == False:
with open(file_prefs,'w') as f:
json.dump(default_prefs, f, sort_keys=True, indent=4)
print('Preferences file created at: ' + file_prefs)
else:
print('Existing preferences file found, will not overwrite.')
# Create package __init__ - for easy access to the plugins
with open(path_plugins + '/__init__.py','w') as f:
f.write('import os\n')
f.write('import glob\n')
f.write('__all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")]')
|
OSError
|
dataset/ETHPy150Open markjones112358/dmenu-extended/dmenu_extended.py/setup_user_files
|
4,615 |
def get_plugins(self, force=False):
""" Returns a list of loaded plugins
This method will load plugins in the plugins directory if they
havent already been loaded. Optionally, you may force the
reloading of plugins by setting the parameter 'force' to true.
"""
if self.plugins_loaded == False:
self.plugins_loaded = load_plugins(self.debug)
elif force:
if self.debug:
print("Forced reloading of plugins")
# For Python2/3 compatibility
try:
# Python2
reload(plugins)
except __HOLE__:
# Python3
from imp import reload
reload(plugins)
self.plugins_loaded = load_plugins(self.debug)
return self.plugins_loaded
|
NameError
|
dataset/ETHPy150Open markjones112358/dmenu-extended/dmenu_extended.py/dmenu.get_plugins
|
4,616 |
def system_path(self):
"""
Array containing system paths
"""
# Get the PATH environmental variable
path = os.environ.get('PATH')
# If we're in Python <3 (less-than-three), we want this to be a unicode string
# In python 3, all strings are unicode already, trying to decode gives AttributeError
try:
path = path.decode(sys.getfilesystemencoding())
except __HOLE__:
pass
# Split and remove duplicates
path = list(set(path.split(':')))
# Some paths contain an extra separator, remove the empty path
try:
path.remove('')
except ValueError:
pass
return path
|
AttributeError
|
dataset/ETHPy150Open markjones112358/dmenu-extended/dmenu_extended.py/dmenu.system_path
|
4,617 |
def try_remove(self, needle, haystack):
"""
Gracefully try to remove an item from an array. It not found, fire no
errors. This is a convenience function to reduce code size.
"""
try:
haystack.remove(needle)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open markjones112358/dmenu-extended/dmenu_extended.py/dmenu.try_remove
|
4,618 |
def cache_build(self):
self.load_preferences()
valid_extensions = []
if 'valid_extensions' in self.prefs:
for extension in self.prefs['valid_extensions']:
if extension == '*':
valid_extensions = True
break
elif extension == '':
valid_extensions.append('')
elif extension[0] != '.':
extension = '.' + extension
valid_extensions.append(extension.lower())
applications = []
# Holds what binaries have been found
binaries = []
# Holds the directly searchable "# Htop (htop;)" lines
aliased_items = []
# Holds the [command, name] pairs for future lookup
aliases = []
# If we're going to include the applications or we want them for
# filtering purposes, scan the .desktop files and get the applications
if self.prefs['include_applications'] or self.prefs['filter_binaries']:
applications = self.scan_applications()
# Do we want to add binaries into the cache?
if self.prefs['include_binaries'] is True:
if self.prefs['filter_binaries'] is True:
binaries_raw = self.scan_binaries()
filterlist = [x['command'] for x in applications] + [x['descriptor'] for x in applications]
for item in filterlist:
if item in binaries_raw:
binaries.append(item)
else:
binaries = self.scan_binaries()
binaries = list(set(binaries))
# Do we want to add applications from .desktop files into the cache?
if self.prefs['include_applications']:
if self.prefs['alias_applications']:
if os.path.exists(file_cache_aliases):
os.remove(file_cache_aliases)
for app in applications:
command = app['command']
if app['terminal']:
command += ';'
if app['name'].lower() != app['command'].lower():
title = self.format_alias(app['name'], command)
self.try_remove(app['command'], binaries)
aliased_items.append(title)
aliases.append([title, command])
else:
binaries.append(command)
if app['terminal']:
# Remove any non-terminal invoking versions from cache
self.try_remove(app['command'], binaries)
else:
for app in applications:
command = app['command']
# Add the "run in terminal" indicator to the command
if app['terminal']:
command += ';'
binaries.append(command)
# Remove any non-terminal invoking versions from cache
if app['terminal']:
self.try_remove(app['command'], binaries)
binaries = list(set(binaries))
watch_folders = []
if 'watch_folders' in self.prefs:
watch_folders = self.prefs['watch_folders']
watch_folders = map(lambda x: x.replace('~', os.path.expanduser('~')), watch_folders)
if self.debug:
print('Done!')
print('Watch folders:')
print('Loading the list of folders to be excluded from the index...')
ignore_folders = []
if 'ignore_folders' in self.prefs:
for exclude_folder in self.prefs['ignore_folders']:
ignore_folders.append(exclude_folder.replace('~', os.path.expanduser('~')))
if self.debug:
print('Done!')
print('Excluded folders:')
print('First 5 items: ')
print(ignore_folders[:5])
print(str(len(ignore_folders)) + ' ignore_folders loaded in total')
print('')
filenames = []
foldernames = []
follow_symlinks = False
try:
if 'follow_symlinks' in self.prefs:
follow_symlinks = self.prefs['follow_symlinks']
except:
pass
if self.debug:
if follow_symlinks:
print('Indexing will not follow linked folders')
else:
print('Indexing will follow linked folders')
print('Scanning files and folders, this may take a while...')
for watchdir in watch_folders:
for root, dirs , files in os.walk(watchdir, followlinks=follow_symlinks):
dirs[:] = [d for d in dirs if os.path.join(root,d) not in ignore_folders]
if self.prefs['scan_hidden_folders'] or root.find('/.') == -1:
for name in files:
if self.prefs['include_hidden_files'] or name.startswith('.') == False:
if valid_extensions == True or os.path.splitext(name)[1].lower() in valid_extensions:
filenames.append(os.path.join(root,name))
for name in dirs:
if self.prefs['include_hidden_folders'] or name.startswith('.') == False:
foldernames.append(os.path.join(root,name) + '/')
foldernames = list(filter(lambda x: x not in ignore_folders, foldernames))
include_items = []
if 'include_items' in self.prefs:
include_items = []
for item in self.prefs['include_items']:
if type(item) == list:
if len(item) > 1:
title = self.prefs['indicator_alias']
title += ' ' + item[0]
aliased_items.append(title)
aliases.append([title, item[1]])
else:
if self.debug:
print("There are aliased items in the configuration with no command.")
else:
include_items.append(item)
else:
include_items = []
# Remove any manually added include items differing by a colon
# e.g. ["htop", "htop;"] becomes just ["htop;"]
for item in include_items:
if item[-1] == ';' and item[0:-1] in binaries:
binaries.remove(item[0:-1])
plugins = self.plugins_available()
# Save the alias lookup file and aliased_items
self.save_json(file_cache_aliasesLookup, aliases)
self.cache_save(aliased_items, file_cache_aliases)
self.cache_save(binaries, file_cache_binaries)
self.cache_save(foldernames, file_cache_folders)
self.cache_save(filenames, file_cache_files)
other = self.sort_shortest(include_items + aliased_items + binaries + foldernames + filenames)
if 'exclude_items' in self.prefs:
for item in self.prefs['exclude_items']:
try:
other.remove(item)
except __HOLE__:
pass
other += ['rebuild cache']
self.cache_save(other, file_cache)
out = plugins
out += other
if self.debug:
print('Done!')
print('Cache building has finished.')
print('')
return out
|
ValueError
|
dataset/ETHPy150Open markjones112358/dmenu-extended/dmenu_extended.py/dmenu.cache_build
|
4,619 |
def run(debug=False):
d = dmenu()
if debug:
d.debug = True
cache = d.cache_load()
out = d.menu(cache,'Open:').strip()
if len(out) > 0:
if debug:
print("Menu closed with user input: " + out)
# Check if the action relates to a plugin
plugins = load_plugins(debug)
plugin_hook = False
for plugin in plugins:
if hasattr(plugin['plugin'], 'is_submenu') and plugin['plugin'].is_submenu == True:
pluginTitle = d.prefs['indicator_submenu'] + ' ' + plugin['plugin'].title.strip()
else:
pluginTitle = plugin['plugin'].title.strip()
if out[:len(pluginTitle)] == pluginTitle:
plugin_hook = plugin["plugin"]
# Check for plugin call
if plugin_hook != False:
plugin_hook.run(out[len(pluginTitle):])
if d.debug:
print("This command refers to a plugin")
else:
if d.debug:
print("This command is not related to a plugin")
# Check to see if the command begins with the alias indicator
if out[0:len(d.prefs['indicator_alias'])] == d.prefs['indicator_alias']:
out = d.retrieve_aliased_command(out)
if d.debug:
print("An aliased command was called")
print("The command was swapped out for: " + str(out))
else:
# Check for store modifications
# Dont allow command aliases that add new commands
if out[0] in "+-":
action = out[0]
out = out[1:]
aliased = False
# Check for aliased command
if out.find(d.prefs['indicator_alias']) != -1 and action == '+':
aliased = True
tmp = out.split(d.prefs['indicator_alias'])
# out = [tmp[1].lstrip(), tmp[0].rstrip()]
command = tmp[0].rstrip()
if command is not '':
out = tmp[1].lstrip() + ' (' + command.replace(';', '') + ')'
else:
out = tmp[1].lstrip()
if len(out) == 0:
item = command
else:
item = [out, command]
elif out[:len(d.prefs['indicator_alias'])] == d.prefs['indicator_alias']:
item = out[len(d.prefs['indicator_alias']):].lstrip()
aliased = True
else:
item = out
found_in_store = False
for store_item in d.prefs['include_items']:
if d.debug:
print("is " + str(store_item) + " = " + str(item) + " ?")
if type(store_item) == list and out == store_item[0]:
found_in_store = True
break;
elif item == store_item:
found_in_store = True
break;
if action == '+' and found_in_store:
option = d.prefs['indicator_submenu'] + " Remove from store"
answer = d.menu("Item '" + str(item) + "' already in store\n"+option)
if answer != option:
sys.exit()
action = '-'
elif action == '-' and found_in_store == False:
option = d.prefs['indicator_submenu'] + " Add to store"
answer = d.menu("Item '" + (item) + "' was not found in store\n"+option)
if answer != option:
sys.exit()
action = '+'
if action == '+':
d.prefs['include_items'].append(item)
# Add the item to the alias lookup file
if aliased:
aliases = d.load_json(file_cache_aliasesLookup)
if item not in aliases:
aliases.append([
d.prefs['indicator_alias'] + ' ' + item[0],
item[1]
])
d.save_json(file_cache_aliasesLookup, aliases)
elif action == '-':
if aliased:
to_remove = None
for include_item in d.prefs['include_items']:
if include_item[0] == out:
to_remove = include_item
if to_remove is not None:
if d.debug:
print("Item found and is")
print(to_remove)
d.prefs['include_items'].remove(to_remove)
else:
if d.debug:
print("Couldn't remove the item (item could not be located)")
else:
d.prefs['include_items'].remove(item)
else:
d.message_close()
d.menu("An error occured while servicing your request.\nYou may need to delete your configuration file.")
sys.exit()
d.save_preferences()
# Recreate the cache
cache_scanned = d.cache_open(file_cache)[:-1]
if cache_scanned == False:
d.cache_regenerate()
d.message_close()
sys.exit()
else:
cache_scanned = cache_scanned.split("\n")
if action == '+':
if d.debug:
print("Adding item to store: " + out)
d.message_open("Adding item to store: " + out)
if aliased:
cache_scanned = [d.prefs['indicator_alias'] + ' ' + out] + cache_scanned
else:
cache_scanned = [out] + cache_scanned
cache_scanned.sort(key=len)
else:
if aliased:
to_remove = d.prefs['indicator_alias'] + ' ' + out
if d.debug:
print("Removing item from store: " + to_remove)
else:
to_remove = out
d.message_open("Removing item from store: " + to_remove)
try:
cache_scanned.remove(to_remove)
except __HOLE__:
if d.debug:
print("Couldnt actually remove item from the cache")
else:
pass
d.cache_save(cache_scanned, file_cache)
d.message_close()
if action == '+':
if aliased == True:
message = "New item (" + command + " aliased as '" + out + "') added to cache."
else:
message = "New item (" + out + ") added to cache."
else:
message = "Existing item (" + out + ") removed from cache."
d.menu(message)
sys.exit()
# Detect if the command is a web address and pass to handle_command
if out[:7] == 'http://' or out[:8] == 'https://':
handle_command(d, out)
elif out.find(':') != -1:
tmp = out.split(':')
if len(tmp) != 2:
if d.debug:
print('Input command not understood')
sys.exit()
else:
cmds = list(map(lambda x: x.strip(), tmp))
run_withshell = False
shell_hold = False
if cmds[0][-1] == ';':
if cmds[0][-2] == ';':
shell_hold = True
if d.debug:
print('Will hold')
else:
if d.debug:
print('Wont hold')
cmds[0] = cmds[0].replace(';','')
run_withshell = True
if cmds[0] == '':
items = list(filter(lambda x: x.find(cmds[1]) != -1, cache.split('\n')))
item = d.menu(items)
handle_command(d, item)
elif cmds[0] in d.scan_binaries():
if d.debug:
print('Item[0] (' + cmds[0] + ') found in binaries')
# Get paths from cache
items = list(filter(lambda x: x.find('/') != -1, cache.split('\n')))
# If extension passed, filter by this
if cmds[1] != '':
items = list(filter(lambda x: x.find(cmds[1]) != -1, items))
filename = d.menu(items)
filename = os.path.expanduser(filename)
command = cmds[0] + " '" + filename + "'"
if run_withshell:
d.open_terminal(command, shell_hold)
else:
d.execute(command)
elif cmds[0].find('/') != -1:
# Path came first, assume user wants of open it with a bin
if cmds[1] != '':
command = cmds[1] + " '" + os.path.expanduser(cmds[0]) + "'"
else:
binary = d.menu(d.scan_binaries())
command = binary + " '" + os.path.expanduser(cmds[0]) + "'"
d.execute(command)
else:
d.menu(["Cant find " + cmds[0] + ", is it installed?"])
if d.debug:
print('Input command not understood')
sys.exit()
elif out == "rebuild cache":
result = d.cache_regenerate()
if result == 0:
d.menu(['Cache could not be saved'])
elif result == 2:
d.menu(['Cache rebuilt','Performance issues were detected - some paths contained invalid characters'])
else:
d.menu(['Success!'])
else:
handle_command(d, out)
|
ValueError
|
dataset/ETHPy150Open markjones112358/dmenu-extended/dmenu_extended.py/run
|
4,620 |
def __contains__(self, key):
try:
if key in self._dict:
state = self._dict[key]
o = state.obj()
else:
return False
except __HOLE__:
return False
else:
return o is not None
|
KeyError
|
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/orm/identity.py/WeakInstanceDict.__contains__
|
4,621 |
def add(self, state):
key = state.key
# inline of self.__contains__
if key in self._dict:
try:
existing_state = self._dict[key]
if existing_state is not state:
o = existing_state.obj()
if o is not None:
raise sa_exc.InvalidRequestError(
"Can't attach instance "
"%s; another instance with key %s is already "
"present in this session." % (
orm_util.state_str(state), state.key))
else:
return False
except __HOLE__:
pass
self._dict[key] = state
self._manage_incoming_state(state)
return True
|
KeyError
|
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/orm/identity.py/WeakInstanceDict.add
|
4,622 |
def test_unicode(self):
G = nx.Graph()
try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
except __HOLE__: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', {name2: 3})
fd, fname = tempfile.mkstemp()
nx.write_multiline_adjlist(G, fname)
H = nx.read_multiline_adjlist(fname)
assert_equal(G.adj, H.adj)
os.close(fd)
os.unlink(fname)
|
ValueError
|
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/tests/test_adjlist.py/TestAdjlist.test_unicode
|
4,623 |
def test_latin1_error(self):
G = nx.Graph()
try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
except __HOLE__: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', {name2: 3})
fd, fname = tempfile.mkstemp()
assert_raises(UnicodeEncodeError,
nx.write_multiline_adjlist,
G, fname, encoding = 'latin-1')
os.close(fd)
os.unlink(fname)
|
ValueError
|
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/tests/test_adjlist.py/TestAdjlist.test_latin1_error
|
4,624 |
def test_latin1(self):
G = nx.Graph()
try: # Python 3.x
blurb = chr(1245) # just to trigger the exception
name1 = 'Bj' + chr(246) + 'rk'
name2 = chr(220) + 'ber'
except __HOLE__: # Python 2.6+
name1 = 'Bj' + unichr(246) + 'rk'
name2 = unichr(220) + 'ber'
G.add_edge(name1, 'Radiohead', {name2: 3})
fd, fname = tempfile.mkstemp()
nx.write_multiline_adjlist(G, fname, encoding = 'latin-1')
H = nx.read_multiline_adjlist(fname, encoding = 'latin-1')
assert_equal(G.adj, H.adj)
os.close(fd)
os.unlink(fname)
|
ValueError
|
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/tests/test_adjlist.py/TestAdjlist.test_latin1
|
4,625 |
def db_command(self, cmd, dbname):
# try without auth first if server allows it (i.e. version >= 3.0.0)
if self.try_on_auth_failures():
need_auth = False
else:
need_auth = self.command_needs_auth(dbname, cmd)
log_verbose("Server '%s': DB Command requested on db %s, need auth ? %s, command: %s" %
(self.id, dbname, need_auth, document_pretty_string(cmd)))
db = self.get_db(dbname, no_auth=not need_auth)
try:
return db.command(cmd)
except (__HOLE__,Exception), e:
if is_auth_error(e) and self.try_on_auth_failures():
db = self.get_db(dbname, no_auth=False)
return db.command(cmd)
else:
raise
###########################################################################
|
RuntimeError
|
dataset/ETHPy150Open mongolab/mongoctl/mongoctl/objects/server.py/Server.db_command
|
4,626 |
def needs_to_auth(self, dbname):
"""
Determines if the server needs to authenticate to the database.
NOTE: we stopped depending on is_auth() since its only a configuration
and may not be accurate
"""
log_debug("Checking if server '%s' needs to auth on db '%s'...." %
(self.id, dbname))
try:
client = self.get_mongo_client()
db = client.get_database(dbname)
db.collection_names()
result = False
except (__HOLE__,Exception), e:
log_exception(e)
result = "authorized" in str(e)
log_debug("needs_to_auth check for server '%s' on db '%s' : %s" %
(self.id, dbname, result))
return result
###########################################################################
|
RuntimeError
|
dataset/ETHPy150Open mongolab/mongoctl/mongoctl/objects/server.py/Server.needs_to_auth
|
4,627 |
def get_status(self, admin=False):
status = {}
## check if the server is online
try:
ping(self.get_mongo_client())
status['connection'] = True
# grab status summary if it was specified + if i am not an arbiter
if admin:
server_summary = self.get_server_status_summary()
status["serverStatusSummary"] = server_summary
except (__HOLE__, Exception), e:
log_exception(e)
status['connection'] = False
status['error'] = "%s" % e
if "timed out" in status['error']:
status['timedOut'] = True
return status
###########################################################################
|
RuntimeError
|
dataset/ETHPy150Open mongolab/mongoctl/mongoctl/objects/server.py/Server.get_status
|
4,628 |
def get_rs_config(self):
rs_conf = None
try:
if self.version_greater_than_3_0():
rs_conf = self.db_command(SON([('replSetGetConfig', 1)]), "admin")["config"]
else:
rs_conf = self.get_db('local')['system.replset'].find_one()
except (Exception,__HOLE__), e:
log_debug("Error whille trying to read rs config from "
"server '%s': %s" % (self.id, e))
log_exception(e)
if type(e) == MongoctlException:
raise e
else:
log_verbose("Cannot get rs config from server '%s'. "
"cause: %s" % (self.id, e))
log_verbose("get_rs_config() for server '%s': Returning: %s" % (self.id, document_pretty_string(rs_conf)))
return rs_conf
###########################################################################
|
RuntimeError
|
dataset/ETHPy150Open mongolab/mongoctl/mongoctl/objects/server.py/Server.get_rs_config
|
4,629 |
def save(self, *args, **kwargs):
if not self.pk:
try:
# Get the last slide order of the list
last_slide = list(self.plugin.slides_list)[-1]
self.order = last_slide.order + 1
except __HOLE__:
self.order = 1
return super(Slide, self).save(*args, **kwargs)
|
IndexError
|
dataset/ETHPy150Open ionyse/ionyweb/ionyweb/plugin_app/plugin_slideshow/models.py/Slide.save
|
4,630 |
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except __HOLE__:
return ''
|
IOError
|
dataset/ETHPy150Open bitmazk/cmsplugin-image-gallery/setup.py/read
|
4,631 |
def test_reading_and_writing_to_file_like_objects(self):
"""
Tests reading and writing to and from file like objects.
"""
# Create some random document.
document = ProvDocument()
document.entity(EX2_NS["test"])
objects = [io.BytesIO, io.StringIO]
Registry.load_serializers()
formats = Registry.serializers.keys()
for obj in objects:
for format in formats:
try:
buf = obj()
document.serialize(destination=buf, format=format)
buf.seek(0, 0)
new_document = ProvDocument.deserialize(source=buf,
format=format)
self.assertEqual(document, new_document)
except __HOLE__:
# Some serializers might not implement serialize or
# deserialize method
pass # and this is fine in the context of this test
finally:
buf.close()
# def test_document_unification(self):
# # TODO: Improve testing of this...
# document = ProvDocument()
# bundle = document.bundle(identifier=EX_NS['b'])
# e1 = bundle.entity(EX_NS['e'])
# e2 = bundle.entity(EX_NS['e'])
# unified = document.unified()
#
# self.assertEqual(len(unified._bundles[0]._records), 1)
|
NotImplementedError
|
dataset/ETHPy150Open trungdong/prov/prov/tests/test_extras.py/TestExtras.test_reading_and_writing_to_file_like_objects
|
4,632 |
@cached_property
def max_w(self):
try:
max_w = int(self.request.GET.get('max_w')) or None
except (TypeError, __HOLE__):
pass
else:
orig_w = getattr(self.orig_image, 'width', None) or 0
if not orig_w or max_w < orig_w:
return max_w
return None
|
ValueError
|
dataset/ETHPy150Open theatlantic/django-cropduster/cropduster/standalone/views.py/CropDusterStandaloneIndex.max_w
|
4,633 |
def _url(self, endpoint, path=None):
"""The complete URL we will end up querying. Depending on the
endpoint we pass in this will result in different URL's with
different prefixes.
:param endpoint: The PuppetDB API endpoint we want to query.
:type endpoint: :obj:`string`
:param path: An additional path if we don't wish to query the\
bare endpoint.
:type path: :obj:`string`
:returns: A URL constructed from :func:`base_url` with the\
apropraite API version/prefix and the rest of the path added\
to it.
:rtype: :obj:`string`
"""
log.debug('_url called with endpoint: {0} and path: {1}'.format(
endpoint, path))
try:
endpoint = ENDPOINTS[endpoint]
except __HOLE__:
# If we reach this we're trying to query an endpoint that doesn't
# exist. This shouldn't happen unless someone made a booboo.
raise APIError
url = '{base_url}/{endpoint}'.format(
base_url=self.base_url,
endpoint=endpoint,
)
if path is not None:
url = '{0}/{1}'.format(url, path)
return url
|
KeyError
|
dataset/ETHPy150Open voxpupuli/pypuppetdb/pypuppetdb/api/__init__.py/BaseAPI._url
|
4,634 |
def nodes(self, unreported=2, with_status=False, **kwargs):
"""Query for nodes by either name or query. If both aren't
provided this will return a list of all nodes. This method
also fetches the nodes status and event counts of the latest
report from puppetdb.
:param with_status: (optional) include the node status in the\
returned nodes
:type with_status: :bool:
:param unreported: (optional) amount of hours when a node gets
marked as unreported
:type unreported: :obj:`None` or integer
:param \*\*kwargs: The rest of the keyword arguments are passed
to the _query function
:returns: A generator yieling Nodes.
:rtype: :class:`pypuppetdb.types.Node`
"""
nodes = self._query('nodes', **kwargs)
# If we happen to only get one node back it
# won't be inside a list so iterating over it
# goes boom. Therefor we wrap a list around it.
if type(nodes) == dict:
nodes = [nodes, ]
if with_status:
latest_events = self.event_counts(
query='["=", "latest_report?", true]',
summarize_by='certname'
)
for node in nodes:
node['unreported_time'] = None
node['status'] = None
node['events'] = None
latest_report_hash = None
if with_status:
status = [s for s in latest_events
if s['subject']['title'] == node['certname']]
try:
node['status'] = node['latest_report_status']
latest_report_hash = node['latest_report_hash']
if status:
node['events'] = status = status[0]
if status['noops'] > 0:
node['status'] = 'noop'
except __HOLE__:
if status:
node['events'] = status = status[0]
if status['successes'] > 0:
node['status'] = 'changed'
if status['noops'] > 0:
node['status'] = 'noop'
if status['failures'] > 0:
node['status'] = 'failed'
else:
node['status'] = 'unchanged'
# node report age
if node['report_timestamp'] is not None:
try:
last_report = json_to_datetime(
node['report_timestamp'])
last_report = last_report.replace(tzinfo=None)
now = datetime.utcnow()
unreported_border = now - timedelta(hours=unreported)
if last_report < unreported_border:
delta = (datetime.utcnow() - last_report)
node['status'] = 'unreported'
node['unreported_time'] = '{0}d {1}h {2}m'.format(
delta.days,
int(delta.seconds / 3600),
int((delta.seconds % 3600) / 60)
)
except AttributeError:
node['status'] = 'unreported'
if not node['report_timestamp']:
node['status'] = 'unreported'
yield Node(self,
name=node['certname'],
deactivated=node['deactivated'],
expired=node['expired'],
report_timestamp=node['report_timestamp'],
catalog_timestamp=node['catalog_timestamp'],
facts_timestamp=node['facts_timestamp'],
status=node['status'],
events=node['events'],
unreported_time=node['unreported_time'],
report_environment=node['report_environment'],
catalog_environment=node['catalog_environment'],
facts_environment=node['facts_environment'],
latest_report_hash=latest_report_hash
)
|
KeyError
|
dataset/ETHPy150Open voxpupuli/pypuppetdb/pypuppetdb/api/__init__.py/BaseAPI.nodes
|
4,635 |
def UpdateIndexYaml(self, openfile=open):
"""Update index.yaml.
Args:
openfile: Used for dependency injection.
We only ever write to index.yaml if either:
- it doesn't exist yet; or
- it contains an 'AUTOGENERATED' comment.
All indexes *before* the AUTOGENERATED comment will be written
back unchanged. All indexes *after* the AUTOGENERATED comment
will be updated with the latest query counts (query counts are
reset by --clear_datastore). Indexes that aren't yet in the file
will be appended to the AUTOGENERATED section.
We keep track of some data in order to avoid doing repetitive work:
- if index.yaml is fully manual, we keep track of its mtime to
avoid parsing it over and over;
- we keep track of the number of keys in the history dict since
the last time we updated index.yaml (or decided there was
nothing to update).
"""
index_yaml_file = os.path.join(self.root_path, 'index.yaml')
try:
index_yaml_mtime = os.path.getmtime(index_yaml_file)
except os.error:
index_yaml_mtime = None
index_yaml_changed = (index_yaml_mtime != self.index_yaml_mtime)
self.index_yaml_mtime = index_yaml_mtime
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
query_history = datastore_stub.QueryHistory()
history_changed = (len(query_history) != self.last_history_size)
self.last_history_size = len(query_history)
if not (index_yaml_changed or history_changed):
logging.debug('No need to update index.yaml')
return
if self.index_yaml_is_manual and not index_yaml_changed:
logging.debug('Will not update manual index.yaml')
return
if index_yaml_mtime is None:
index_yaml_data = None
else:
try:
fh = open(index_yaml_file, 'r')
except __HOLE__:
index_yaml_data = None
else:
try:
index_yaml_data = fh.read()
finally:
fh.close()
self.index_yaml_is_manual = (index_yaml_data is not None and
AUTO_MARKER not in index_yaml_data)
if self.index_yaml_is_manual:
logging.info('Detected manual index.yaml, will not update')
return
if index_yaml_data is None:
all_indexes = None
else:
try:
all_indexes = datastore_index.ParseIndexDefinitions(index_yaml_data)
except yaml_errors.EventListenerError, e:
logging.error('Error parsing %s:\n%s', index_yaml_file, e)
return
except Exception, err:
logging.error('Error parsing %s:\n%s.%s: %s', index_yaml_file,
err.__class__.__module__, err.__class__.__name__, err)
return
if index_yaml_data is None:
manual_part, automatic_part = 'indexes:\n', ''
manual_indexes = None
else:
manual_part, automatic_part = index_yaml_data.split(AUTO_MARKER, 1)
try:
manual_indexes = datastore_index.ParseIndexDefinitions(manual_part)
except Exception, err:
logging.error('Error parsing manual part of %s: %s',
index_yaml_file, err)
return
automatic_part = GenerateIndexFromHistory(query_history,
all_indexes, manual_indexes)
try:
fh = openfile(index_yaml_file, 'w')
except IOError, err:
logging.error('Can\'t write index.yaml: %s', err)
return
try:
logging.info('Updating %s', index_yaml_file)
fh.write(manual_part)
fh.write(AUTO_MARKER)
fh.write(AUTO_COMMENT)
fh.write(automatic_part)
finally:
fh.close()
try:
self.index_yaml_mtime = os.path.getmtime(index_yaml_file)
except os.error, err:
logging.error('Can\'t stat index.yaml we just wrote: %s', err)
self.index_yaml_mtime = None
|
IOError
|
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/tools/dev_appserver_index.py/IndexYamlUpdater.UpdateIndexYaml
|
4,636 |
def SetupIndexes(app_id, root_path):
"""Ensure that the set of existing composite indexes matches index.yaml.
Note: this is similar to the algorithm used by the admin console for
the same purpose.
Args:
app_id: Application ID being served.
root_path: Path to the root of the application.
"""
index_yaml_file = os.path.join(root_path, 'index.yaml')
try:
fh = open(index_yaml_file, 'r')
except __HOLE__:
index_yaml_data = None
else:
try:
index_yaml_data = fh.read()
finally:
fh.close()
indexes = []
if index_yaml_data is not None:
index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
if index_defs is not None:
indexes = index_defs.indexes
if indexes is None:
indexes = []
requested_indexes = datastore_index.IndexDefinitionsToProtos(app_id, indexes)
existing_indexes = datastore_admin.GetIndices(app_id)
requested = dict((x.definition().Encode(), x) for x in requested_indexes)
existing = dict((x.definition().Encode(), x) for x in existing_indexes)
created = 0
for key, index in requested.iteritems():
if key not in existing:
datastore_admin.CreateIndex(index)
created += 1
deleted = 0
for key, index in existing.iteritems():
if key not in requested:
datastore_admin.DeleteIndex(index)
deleted += 1
if created or deleted:
logging.info("Created %d and deleted %d index(es); total %d",
created, deleted, len(requested))
|
IOError
|
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/tools/dev_appserver_index.py/SetupIndexes
|
4,637 |
def filter_metadata(self, queryset, value):
try:
value = json.loads(value)
except __HOLE__:
raise GenericAPIException(400, 'metadata must be valid JSON.')
for name, values in value.items():
if not isinstance(values, list):
values = [values]
query = Q()
for v in values:
if v is None:
query = query | ~Q(metadata_set__name=name)
else:
query = query | Q(metadata_set__name=name, metadata_set__value=v)
queryset = queryset.filter(query)
return queryset
|
ValueError
|
dataset/ETHPy150Open mozilla/kitsune/kitsune/questions/api.py/QuestionFilter.filter_metadata
|
4,638 |
@require('numpy')
@not_implemented_for('directed')
def laplacian_matrix(G, nodelist=None, weight='weight'):
"""Return the Laplacian matrix of G.
The graph Laplacian is the matrix L = D - A, where
A is the adjacency matrix and D is the diagonal matrix of node degrees.
Parameters
----------
G : graph
A NetworkX graph
nodelist : list, optional
The rows and columns are ordered according to the nodes in nodelist.
If nodelist is None, then the ordering is produced by G.nodes().
weight : string or None, optional (default='weight')
The edge data key used to compute each value in the matrix.
If None, then each edge has weight 1.
Returns
-------
L : NumPy matrix
The Laplacian matrix of G.
Notes
-----
For MultiGraph/MultiDiGraph, the edges weights are summed.
See to_numpy_matrix for other options.
See Also
--------
to_numpy_matrix
normalized_laplacian_matrix
"""
import numpy as np
if nodelist is None:
nodelist = G.nodes()
if G.is_multigraph():
# this isn't the fastest way to do this...
A = np.asarray(nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight))
I = np.identity(A.shape[0])
D = I*np.sum(A,axis=1)
L = D - A
else:
# Graph or DiGraph, this is faster than above
n = len(nodelist)
index = dict( (n,i) for i,n in enumerate(nodelist) )
L = np.zeros((n,n))
for ui,u in enumerate(nodelist):
totalwt = 0.0
for v,d in G[u].items():
try:
vi = index[v]
except __HOLE__:
continue
wt = d.get(weight,1)
L[ui,vi] = -wt
totalwt += wt
L[ui,ui] = totalwt
return np.asmatrix(L)
|
KeyError
|
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/linalg/laplacianmatrix.py/laplacian_matrix
|
4,639 |
def init ( self, parent ):
""" Finishes initializing the editor by creating the underlying toolkit
widget.
"""
factory = self.factory
self._editor = editor = PythonEditor( parent,
show_line_numbers = factory.show_line_numbers )
self.control = control = editor.control
# There are a number of events which aren't well documented that look
# to be useful in future implmentations, below are a subset of the
# events that look interesting:
# EVT_STC_AUTOCOMP_SELECTION
# EVT_STC_HOTSPOT_CLICK
# EVT_STC_HOTSPOT_DCLICK
# EVT_STC_DOUBLECLICK
# EVT_STC_MARGINCLICK
control.SetSize( wx.Size( 300, 124 ) )
# Clear out the goofy hotkeys for zooming text
control.CmdKeyClear(ord('B'), stc.STC_SCMOD_CTRL)
control.CmdKeyClear(ord('N'), stc.STC_SCMOD_CTRL)
# Set up the events
wx.EVT_KILL_FOCUS( control, self.wx_update_object )
stc.EVT_STC_CALLTIP_CLICK( control, control.GetId(),
self._calltip_clicked )
if factory.auto_scroll and (factory.selected_line != ''):
wx.EVT_SIZE( control, self._update_selected_line )
if factory.auto_set:
editor.on_trait_change( self.update_object, 'changed',
dispatch = 'ui' )
if factory.key_bindings is not None:
editor.on_trait_change( self.key_pressed, 'key_pressed',
dispatch = 'ui' )
if self.readonly:
control.SetReadOnly( True )
# Set up the lexer
control.SetLexer(stc.STC_LEX_CONTAINER)
control.Bind(stc.EVT_STC_STYLENEEDED, self._style_needed)
try:
self.lexer = getattr(stc, 'STC_LEX_' + self.factory.lexer.upper())
except __HOLE__:
self.lexer = stc.STC_LEX_NULL
# Define the markers we use:
control.MarkerDefine( MARK_MARKER, stc.STC_MARK_BACKGROUND,
background = factory.mark_color_ )
control.MarkerDefine( SEARCH_MARKER, stc.STC_MARK_BACKGROUND,
background = factory.search_color_ )
control.MarkerDefine( SELECTED_MARKER, stc.STC_MARK_BACKGROUND,
background = factory.selected_color_ )
# Make sure the editor has been initialized:
self.update_editor()
# Set up any event listeners:
self.sync_value( factory.mark_lines, 'mark_lines', 'from',
is_list = True )
self.sync_value( factory.selected_line, 'selected_line', 'from' )
self.sync_value( factory.selected_text, 'selected_text', 'to' )
self.sync_value( factory.line, 'line' )
self.sync_value( factory.column, 'column' )
self.sync_value( factory.calltip_clicked, 'calltip_clicked')
self.sync_value(factory.dim_lines, 'dim_lines', 'from', is_list=True)
if self.factory.dim_color == '':
self.dim_color = 'dark grey'
else:
self.sync_value(factory.dim_color, 'dim_color', 'from')
self.sync_value(factory.squiggle_lines, 'squiggle_lines', 'from',
is_list=True)
if factory.squiggle_color == '':
self.squiggle_color = 'red'
else:
self.sync_value(factory.squiggle_color, 'squiggle_color', 'from')
# Check if we need to monitor the line or column position being changed:
if (factory.line != '') or (factory.column != '') or \
(factory.selected_text != ''):
stc.EVT_STC_UPDATEUI( control, control.GetId(),
self._position_changed )
self.set_tooltip()
#---------------------------------------------------------------------------
# Handles the user entering input data in the edit control:
#---------------------------------------------------------------------------
|
AttributeError
|
dataset/ETHPy150Open enthought/traitsui/traitsui/wx/code_editor.py/SourceEditor.init
|
4,640 |
def expr(self, model, data, ** kwargs):
"""
Returns a theano expression for the cost function.
Returns a symbolic expression for a cost function applied to the
minibatch of data.
Optionally, may return None. This represents that the cost function
is intractable but may be optimized via the get_gradients method.
Parameters
----------
model : a pylearn2 Model instance
data : a batch in cost.get_data_specs() form
kwargs : dict
Optional extra arguments. Not used by the base class.
"""
# Fall back to cost_per_example implementation if possible
try:
per_example = self.cost_per_example(self, model, data, **kwargs)
except __HOLE__:
raise NotImplementedError(str(type(self)) + " does not implement "
"expr.")
# Handle explicitly undefined costs
if per_example is None:
return None
assert per_example.ndim == 1
return per_example.mean()
|
NotImplementedError
|
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/costs/cost.py/Cost.expr
|
4,641 |
def get_gradients(self, model, data, ** kwargs):
"""
Provides the gradients of the cost function with respect to the model
parameters.
These are not necessarily those obtained by theano.tensor.grad
--you may wish to use approximate or even intentionally incorrect
gradients in some cases.
Parameters
----------
model : a pylearn2 Model instance
data : a batch in cost.get_data_specs() form
kwargs : dict
Optional extra arguments, not used by the base class.
Returns
-------
gradients : OrderedDict
a dictionary mapping from the model's parameters
to their gradients
The default implementation is to compute the gradients
using T.grad applied to the value returned by expr.
However, subclasses may return other values for the gradient.
For example, an intractable cost may return a sampling-based
approximation to its gradient.
updates : OrderedDict
a dictionary mapping shared variables to updates that must
be applied to them each time these gradients are computed.
This is to facilitate computation of sampling-based approximate
gradients.
The parameters should never appear in the updates dictionary.
This would imply that computing their gradient changes
their value, thus making the gradient value outdated.
"""
try:
cost = self.expr(model=model, data=data, **kwargs)
except __HOLE__:
# If anybody knows how to add type(self) to the exception message
# but still preserve the stack trace, please do so
# The current code does neither
message = "Error while calling " + str(type(self)) + ".expr"
reraise_as(TypeError(message))
if cost is None:
raise NotImplementedError(str(type(self)) +
" represents an intractable cost and "
"does not provide a gradient "
"approximation scheme.")
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs='ignore')
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
return gradients, updates
|
TypeError
|
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/costs/cost.py/Cost.get_gradients
|
4,642 |
@functools.wraps(Cost.get_monitoring_channels)
def get_monitoring_channels(self, model, data, ** kwargs):
self.get_data_specs(model)[0].validate(data)
rval = OrderedDict()
composite_specs, mapping = self.get_composite_specs_and_mapping(model)
nested_data = mapping.nest(data)
for i, cost in enumerate(self.costs):
cost_data = nested_data[i]
try:
channels = cost.get_monitoring_channels(model, cost_data,
**kwargs)
rval.update(channels)
except __HOLE__:
reraise_as(Exception('SumOfCosts.get_monitoring_channels '
'encountered TypeError while calling {0}'
'.get_monitoring_channels'.format(
type(cost))))
value = cost.expr(model, cost_data, ** kwargs)
if value is not None:
name = ''
if hasattr(value, 'name') and value.name is not None:
name = '_' + value.name
rval['term_' + str(i) + name] = value
return rval
|
TypeError
|
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/costs/cost.py/SumOfCosts.get_monitoring_channels
|
4,643 |
def safe_repr(value):
"""Hopefully pretty robust repr equivalent."""
# this is pretty horrible but should always return *something*
try:
return pydoc.text.repr(value)
except KeyboardInterrupt:
raise
except:
try:
return repr(value)
except KeyboardInterrupt:
raise
except:
try:
# all still in an except block so we catch
# getattr raising
name = getattr(value, '__name__', None)
if name:
# ick, recursion
return safe_repr(name)
klass = getattr(value, '__class__', None)
if klass:
return '%s instance' % safe_repr(klass)
except __HOLE__:
raise
except:
return 'UNRECOVERABLE REPR FAILURE'
|
KeyboardInterrupt
|
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/format_stack.py/safe_repr
|
4,644 |
def _fixed_getframes(etb, context=1, tb_offset=0):
LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
# If the error is at the console, don't build any context, since it would
# otherwise produce 5 blank lines printed out (there is no file at the
# console)
rec_check = records[tb_offset:]
try:
rname = rec_check[0][1]
if rname == '<ipython console>' or rname.endswith('<string>'):
return rec_check
except __HOLE__:
pass
aux = traceback.extract_tb(etb)
assert len(records) == len(aux)
for i, (file, lnum, _, _) in enumerate(aux):
maybeStart = lnum - 1 - context // 2
start = max(maybeStart, 0)
end = start + context
lines = linecache.getlines(file)[start:end]
# pad with empty lines if necessary
if maybeStart < 0:
lines = (['\n'] * -maybeStart) + lines
if len(lines) < context:
lines += ['\n'] * (context - len(lines))
buf = list(records[i])
buf[LNUM_POS] = lnum
buf[INDEX_POS] = lnum - 1 - start
buf[LINES_POS] = lines
records[i] = tuple(buf)
return records[tb_offset:]
|
IndexError
|
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/format_stack.py/_fixed_getframes
|
4,645 |
def format_records(records): # , print_globals=False):
# Loop over all records printing context and info
frames = []
abspath = os.path.abspath
for frame, file, lnum, func, lines, index in records:
try:
file = file and abspath(file) or '?'
except OSError:
# if file is '<console>' or something not in the filesystem,
# the abspath call will throw an OSError. Just ignore it and
# keep the original file string.
pass
if file.endswith('.pyc'):
file = file[:-4] + '.py'
link = file
args, varargs, varkw, locals = inspect.getargvalues(frame)
if func == '?':
call = ''
else:
# Decide whether to include variable details or not
try:
call = 'in %s%s' % (func, inspect.formatargvalues(args,
varargs, varkw, locals,
formatvalue=eq_repr))
except KeyError:
# Very odd crash from inspect.formatargvalues(). The
# scenario under which it appeared was a call to
# view(array,scale) in NumTut.view.view(), where scale had
# been defined as a scalar (it should be a tuple). Somehow
# inspect messes up resolving the argument list of view()
# and barfs out. At some point I should dig into this one
# and file a bug report about it.
print("\nJoblib's exception reporting continues...\n")
call = 'in %s(***failed resolving arguments***)' % func
# Initialize a list of names on the current line, which the
# tokenizer below will populate.
names = []
def tokeneater(token_type, token, start, end, line):
"""Stateful tokeneater which builds dotted names.
The list of names it appends to (from the enclosing scope) can
contain repeated composite names. This is unavoidable, since
there is no way to disambiguate partial dotted structures until
the full list is known. The caller is responsible for pruning
the final list of duplicates before using it."""
# build composite names
if token == '.':
try:
names[-1] += '.'
# store state so the next token is added for x.y.z names
tokeneater.name_cont = True
return
except IndexError:
pass
if token_type == tokenize.NAME and token not in keyword.kwlist:
if tokeneater.name_cont:
# Dotted names
names[-1] += token
tokeneater.name_cont = False
else:
# Regular new names. We append everything, the caller
# will be responsible for pruning the list later. It's
# very tricky to try to prune as we go, b/c composite
# names can fool us. The pruning at the end is easy
# to do (or the caller can print a list with repeated
# names if so desired.
names.append(token)
elif token_type == tokenize.NEWLINE:
raise IndexError
# we need to store a bit of state in the tokenizer to build
# dotted names
tokeneater.name_cont = False
def linereader(file=file, lnum=[lnum], getline=linecache.getline):
line = getline(file, lnum[0])
lnum[0] += 1
return line
# Build the list of names on this line of code where the exception
# occurred.
try:
# This builds the names list in-place by capturing it from the
# enclosing scope.
for token in generate_tokens(linereader):
tokeneater(*token)
except (IndexError, __HOLE__):
# signals exit of tokenizer
pass
except tokenize.TokenError as msg:
_m = ("An unexpected error occurred while tokenizing input file %s\n"
"The following traceback may be corrupted or invalid\n"
"The error message is: %s\n" % (file, msg))
print(_m)
# prune names list of duplicates, but keep the right order
unique_names = uniq_stable(names)
# Start loop over vars
lvals = []
for name_full in unique_names:
name_base = name_full.split('.', 1)[0]
if name_base in frame.f_code.co_varnames:
if name_base in locals.keys():
try:
value = safe_repr(eval(name_full, locals))
except:
value = "undefined"
else:
value = "undefined"
name = name_full
lvals.append('%s = %s' % (name, value))
#elif print_globals:
# if frame.f_globals.has_key(name_base):
# try:
# value = safe_repr(eval(name_full,frame.f_globals))
# except:
# value = "undefined"
# else:
# value = "undefined"
# name = 'global %s' % name_full
# lvals.append('%s = %s' % (name,value))
if lvals:
lvals = '%s%s' % (INDENT, ('\n%s' % INDENT).join(lvals))
else:
lvals = ''
level = '%s\n%s %s\n' % (75 * '.', link, call)
if index is None:
frames.append(level)
else:
frames.append('%s%s' % (level, ''.join(
_format_traceback_lines(lnum, index, lines, lvals))))
return frames
###############################################################################
|
UnicodeDecodeError
|
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/format_stack.py/format_records
|
4,646 |
def format_exc(etype, evalue, etb, context=5, tb_offset=0):
""" Return a nice text document describing the traceback.
Parameters
-----------
etype, evalue, etb: as returned by sys.exc_info
context: number of lines of the source file to plot
tb_offset: the number of stack frame not to use (0 = use all)
"""
# some locals
try:
etype = etype.__name__
except __HOLE__:
pass
# Header with the exception type, python version, and date
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
date = time.ctime(time.time())
pid = 'PID: %i' % os.getpid()
head = '%s%s%s\n%s%s%s' % (
etype, ' ' * (75 - len(str(etype)) - len(date)),
date, pid, ' ' * (75 - len(str(pid)) - len(pyver)),
pyver)
# Drop topmost frames if requested
try:
records = _fixed_getframes(etb, context, tb_offset)
except:
raise
print('\nUnfortunately, your original traceback can not be '
'constructed.\n')
return ''
# Get (safely) a string form of the exception info
try:
etype_str, evalue_str = map(str, (etype, evalue))
except:
# User exception is improperly defined.
etype, evalue = str, sys.exc_info()[:2]
etype_str, evalue_str = map(str, (etype, evalue))
# ... and format it
exception = ['%s: %s' % (etype_str, evalue_str)]
frames = format_records(records)
return '%s\n%s\n%s' % (head, '\n'.join(frames), ''.join(exception[0]))
###############################################################################
|
AttributeError
|
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/format_stack.py/format_exc
|
4,647 |
def __getattr__(self, name):
try:
return self[name]
except __HOLE__:
raise AttributeError(name)
|
KeyError
|
dataset/ETHPy150Open arokem/python-matlab-bridge/tools/gh_api.py/Obj.__getattr__
|
4,648 |
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest, 'rbU')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if sys.version_info >= (3,):
try:
line = line.decode('UTF-8')
except __HOLE__:
log.warn("%r not UTF-8 decodable -- skipping" % line)
continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
|
UnicodeDecodeError
|
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/setuptools/command/sdist.py/sdist.read_manifest
|
4,649 |
def test_openFDs(self):
"""
File descriptors returned by L{_listOpenFDs} are mostly open.
This test assumes that zero-legth writes fail with EBADF on closed
file descriptors.
"""
for fd in process._listOpenFDs():
try:
fcntl.fcntl(fd, fcntl.F_GETFL)
except __HOLE__, err:
self.assertEquals(
errno.EBADF, err.errno,
"fcntl(%d, F_GETFL) failed with unexpected errno %d" % (
fd, err.errno))
|
IOError
|
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/internet/test/test_posixprocess.py/FileDescriptorTests.test_openFDs
|
4,650 |
def removeMicroconsensusDownloadCallback(self, callback):
try:
self._consensus_download_callbacks.remove(callback)
except __HOLE__:
msg = ("MicroconsensusManager got request to remove callback "
"{} but has no reference to this function."
.format(callback))
logging.debug(msg)
|
KeyError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/netstatus/microconsensusmanager.py/MicroconsensusManager.removeMicroconsensusDownloadCallback
|
4,651 |
def null_or_int(value):
try:
return int(value)
except __HOLE__:
return None
|
TypeError
|
dataset/ETHPy150Open kvesteri/wtforms-components/wtforms_components/utils.py/null_or_int
|
4,652 |
@csrf_exempt
def login(request):
"""
Authenticates given 'username' and 'password_hash' against user in database.
"""
if request.method != 'POST':
r = HttpResponse('Invalid method. Only POST method accepted.', status=405)
r['Allow'] = 'POST'
return r
try:
in_json = json.loads(request.body)
assert "username" in in_json
assert "password" in in_json
except AssertionError:
return HttpResponseBadRequest("argument mismatch")
except __HOLE__ as e:
return HttpResponseBadRequest("invalid JSON")
dbc = db_model.connect()
user = dbc.hbuser.find_one({"username": in_json['username']})
if user is None:
# not returning "user not found" to avoid attackers to guess valid users
return HttpResponse(status=401)
else:
m = hashlib.sha512()
m.update(in_json['password'])
password_hash = m.hexdigest()
if user['password'] == password_hash:
m = hashlib.sha512()
m.update(os.urandom(64))
token_string = m.hexdigest()
from api.models import redis_wrapper
r = redis_wrapper.init_redis()
r.set(token_string, user['username'], settings.TOKEN_TTL) # Store tokens to expire in 1 hour
r = HttpResponse()
r['X-Auth-Token'] = token_string
logger.info("login success for user '%s'" % in_json['username'])
return r
else:
logger.info("login failed for user '%s'" % in_json['username'])
return HttpResponse(status=401)
|
ValueError
|
dataset/ETHPy150Open emccode/heliosburn/heliosburn/django/hbproject/api/views/auth.py/login
|
4,653 |
@method_accepts(TypeError,
compnames=(str, list, tuple),
index=(int, NoneType),
check=bool)
def add(self, compnames, index=None, check=False):
"""
add(self, compnames, index=None, check=False)
Add new component(s) to the end of the workflow by name.
"""
if isinstance(compnames, basestring):
nodes = [compnames]
else:
nodes = compnames
try:
iter(nodes)
except TypeError:
raise TypeError("Components must be added by name to a workflow.")
# workflow deriv graph, etc. must be recalculated
self.config_changed()
for node in nodes:
if isinstance(node, basestring):
if check:
# check whether each node is valid and if not then
# construct a useful error message.
parent = self.parent
name = parent.parent.name
if not name:
name = "the top assembly."
# Components in subassys are never allowed.
if '.' in node:
msg = "Component '%s' is not" % node + \
" in the scope of %s" % name
raise AttributeError(msg)
# Does the component really exist?
try:
target = parent.parent.get(node)
except __HOLE__:
msg = "Component '%s'" % node + \
" does not exist in %s" % name
raise AttributeError(msg)
# Don't add yourself to your own workflow
if target == parent:
msg = "You cannot add a driver to its own workflow"
raise AttributeError(msg)
if index is None:
self._explicit_names.append(node)
else:
self._explicit_names.insert(index, node)
index += 1
else:
msg = "Components must be added by name to a workflow."
raise TypeError(msg)
|
AttributeError
|
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/workflow.py/Workflow.add
|
4,654 |
def remove(self, compname):
"""Remove a component from the workflow by name. Do not report an
error if the specified component is not found.
"""
if not isinstance(compname, basestring):
msg = "Components must be removed by name from a workflow."
raise TypeError(msg)
try:
self._explicit_names.remove(compname)
except __HOLE__:
pass
self.config_changed()
|
ValueError
|
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/workflow.py/Workflow.remove
|
4,655 |
def print_extended_help(self, filename=None):
old_suppress_help = {}
for group in self.option_groups:
try:
old_suppress_help[group] = group.suppress_help
group.suppress_help = False
except __HOLE__ as e:
old_suppress_help[group] = None
_LOGGER.debug("Option group does not have option to "
"suppress help; exception is " + e.message)
self.print_help(file=filename)
for group in self.option_groups:
# Restore the suppressed help when applicable
if old_suppress_help[group]:
group.suppress_help = True
|
AttributeError
|
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/util/parser.py/LoggingOptionParser.print_extended_help
|
4,656 |
def parse(self, format=None, tz_in=None, tz_out=None):
# 'unix'
if format == "unix":
return str(time.time()).split('.')[0]
# anything else
dt = self.date_gen(tz_in, tz_out)
text = self.date_format(dt, format)
# Fix potential unicode/codepage issues
if ST2 and isinstance(text, str):
try:
text = text.decode(locale.getpreferredencoding())
except __HOLE__:
text = text.decode('utf-8')
return text
|
UnicodeDecodeError
|
dataset/ETHPy150Open FichteFoll/InsertDate/format_date/__init__.py/FormatDate.parse
|
4,657 |
def date_gen(self, tz_in=None, tz_out=None):
"""Generates the according datetime object using given parameters"""
# Check parameters and gather tzinfo data (and raise a few exceptions)
if tz_in is None:
tz_in = self.default['tz_in']
if tz_in == "local":
tz_in = self.local_tz
tz_in = self.check_tzparam(tz_in, 'tz_in')
tz_out = self.check_tzparam(tz_out, 'tz_out')
# Get timedata
try:
dt = tz_in.localize(datetime.now())
except AttributeError:
# Fallback for non-pytz timezones ('local')
dt = datetime.now(tz=tz_in)
# Process timedata
# TODO: shift datetime here | split into other function(s)
if not tz_out:
return dt
# Adjust timedata for target timezone
dt = dt.astimezone(tz_out)
try:
return tz_out.normalize(dt)
except __HOLE__:
# Fallback for non-pytz timezones ('local')
return dt
|
AttributeError
|
dataset/ETHPy150Open FichteFoll/InsertDate/format_date/__init__.py/FormatDate.date_gen
|
4,658 |
@staticmethod
def translate_to_python(h):
try:
return PersistentHash(h)
except (__HOLE__, ValueError):
return None
|
TypeError
|
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/packages/persistent_archive/common.py/PersistentHash.translate_to_python
|
4,659 |
def compute(self):
if self.has_input('value') == self.has_input('hash'):
raise ModuleError(self, "Set either 'value' or 'hash'")
if self.has_input('value'):
self._hash = self.get_input('value')._hash
else:
try:
self._set_hash(self.get_input('hash'))
except __HOLE__, e:
raise ModuleError(self, e.message)
|
ValueError
|
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/packages/persistent_archive/common.py/PersistentHash.compute
|
4,660 |
@utils.allow_tableset_proxy
def bins(self, column_name, count=10, start=None, end=None):
"""
Generates (approximately) evenly sized bins for the values in a column.
Bins may not be perfectly even if the spread of the data does not divide
evenly, but all values will always be included in some bin.
The resulting table will have two columns. The first will have
the same name as the specified column, but will be type :class:`.Text`.
The second will be named :code:`count` and will be of type
:class:`.Number`.
:param column_name:
The name of the column to bin. Must be of type :class:`.Number`
:param count:
The number of bins to create. If not specified then each value will
be counted as its own bin.
:param start:
The minimum value to start the bins at. If not specified the
minimum value in the column will be used.
:param end:
The maximum value to end the bins at. If not specified the maximum
value in the column will be used.
:returns:
A new :class:`Table`.
"""
minimum, maximum = utils.round_limits(
Min(column_name).run(self),
Max(column_name).run(self)
)
# Infer bin start/end positions
start = minimum if not start else Decimal(start)
end = maximum if not end else Decimal(end)
# Calculate bin size
spread = abs(end - start)
size = spread / count
breaks = [start]
# Calculate breakpoints
for i in range(1, count + 1):
top = start + (size * i)
breaks.append(top)
# Format bin names
decimal_places = utils.max_precision(breaks)
break_formatter = utils.make_number_formatter(decimal_places)
def name_bin(i, j, first_exclusive=True, last_exclusive=False):
inclusive = format_decimal(i, format=break_formatter)
exclusive = format_decimal(j, format=break_formatter)
output = u'[' if first_exclusive else u'('
output += u'%s - %s' % (inclusive, exclusive)
output += u']' if last_exclusive else u')'
return output
# Generate bins
bin_names = []
for i in range(1, len(breaks)):
last_exclusive = (i == len(breaks) - 1)
if i == 1 and minimum < start:
name = name_bin(minimum, breaks[i], last_exclusive=last_exclusive)
elif i == len(breaks) - 1 and maximum > end:
name = name_bin(breaks[i - 1], maximum, last_exclusive=last_exclusive)
else:
name = name_bin(breaks[i - 1], breaks[i], last_exclusive=last_exclusive)
bin_names.append(name)
bin_names.append(None)
# Lambda method for actually assigning values to bins
def binner(row):
value = row[column_name]
if value is None:
return None
i = 1
try:
while value >= breaks[i]:
i += 1
except __HOLE__:
i -= 1
return bin_names[i - 1]
# Pivot by lambda
table = self.pivot(binner, key_name=column_name)
# Sort by bin order
return table.order_by(lambda r: bin_names.index(r[column_name]))
|
IndexError
|
dataset/ETHPy150Open wireservice/agate/agate/table/bins.py/bins
|
4,661 |
def main():
import sys
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:], 'td')
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print "usage: quopri [-t | -d] [file] ..."
print "-t: quote tabs"
print "-d: decode; default encode"
sys.exit(2)
deco = 0
tabs = 0
for o, a in opts:
if o == '-t': tabs = 1
if o == '-d': deco = 1
if tabs and deco:
sys.stdout = sys.stderr
print "-t and -d are mutually exclusive"
sys.exit(2)
if not args: args = ['-']
sts = 0
for file in args:
if file == '-':
fp = sys.stdin
else:
try:
fp = open(file)
except __HOLE__, msg:
sys.stderr.write("%s: can't open (%s)\n" % (file, msg))
sts = 1
continue
if deco:
decode(fp, sys.stdout)
else:
encode(fp, sys.stdout, tabs)
if fp is not sys.stdin:
fp.close()
if sts:
sys.exit(sts)
|
IOError
|
dataset/ETHPy150Open LarsMichelsen/pmatic/ccu_pkg/python/lib/python2.7/quopri.py/main
|
4,662 |
def parse_post(self, required_keys=None, optional_keys=None):
"""
Clean and validate POSTed JSON data by defining sets of required and
optional keys.
"""
if request.headers.get('content-type') == 'application/json':
data = request.data
elif 'data' not in request.form:
error('Missing correct content-type or missing "data" field.')
else:
data = request.form['data']
if data:
try:
data = json.loads(data)
except __HOLE__:
error('Unable to parse JSON data from request.')
else:
data = {}
required = set(required_keys or ())
optional = set(optional_keys or ())
all_keys = required | optional
keys_present = set(key for key in data if data[key] not in ('', None))
missing = required - keys_present
if missing:
error('Missing required fields: %s' % ', '.join(sorted(missing)))
invalid_keys = keys_present - all_keys
if invalid_keys:
error('Invalid keys: %s' % ', '.join(sorted(invalid_keys)))
return data
|
ValueError
|
dataset/ETHPy150Open coleifer/scout/scout.py/RequestValidator.parse_post
|
4,663 |
def __import_vars(self, env_file):
with open(env_file, "r") as f:
for line in f:
try:
key, val = line.strip().split('=', 1)
key = key.lstrip('export ')
except __HOLE__: # Take care of blank or comment lines
pass
else:
if not callable(val):
if self.verbose_mode:
if key in self.app.config:
print(
" * Overwriting an existing config var:"
" {0}".format(key))
else:
print(
" * Setting an entirely new config var:"
" {0}".format(key))
self.app.config[key] = re.sub(
r"\A[\"']|[\"']\Z", "", val)
|
ValueError
|
dataset/ETHPy150Open grauwoelfchen/flask-dotenv/flask_dotenv.py/DotEnv.__import_vars
|
4,664 |
def eval(self, keys):
"""
Examples:
Specify type literal for key.
>>> env.eval({MAIL_PORT: int})
"""
for k, v in keys.items():
if k in self.app.config:
try:
val = ast.literal_eval(self.app.config[k])
if type(val) == v:
if self.verbose_mode:
print(
" * Casting a specified var as literal:"
" {0} => {1}".format(k, v)
)
self.app.config[k] = val
else:
print(
" ! Does not match with specified type:"
" {0} => {1}".format(k, v))
except (__HOLE__, SyntaxError):
print(" ! Could not evaluate as literal type:"
" {0} => {1}".format(k, v))
|
ValueError
|
dataset/ETHPy150Open grauwoelfchen/flask-dotenv/flask_dotenv.py/DotEnv.eval
|
4,665 |
def validate_yes_no(answer):
VALIDATION_TABLE = {'y':True, 'n':False, 'yes':True, 'no':True, 'Y':True, 'N':False}
try:
return VALIDATION_TABLE[answer]
except __HOLE__:
return None
|
KeyError
|
dataset/ETHPy150Open cidadania/ecidadania-ng/src/apps/managecommands/management/commands/resource_addnewapp/_commons.py/validate_yes_no
|
4,666 |
def enable_logging(hass, verbose=False, daemon=False, log_rotate_days=None):
"""Setup the logging."""
if not daemon:
logging.basicConfig(level=logging.INFO)
fmt = ("%(log_color)s%(asctime)s %(levelname)s (%(threadName)s) "
"[%(name)s] %(message)s%(reset)s")
try:
from colorlog import ColoredFormatter
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
fmt,
datefmt='%y-%m-%d %H:%M:%S',
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
))
except __HOLE__:
pass
# Log errors to a file if we have write access to file or config dir
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
err_path_exists = os.path.isfile(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or \
(not err_path_exists and os.access(hass.config.config_dir, os.W_OK)):
if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when='midnight', backupCount=log_rotate_days)
else:
err_handler = logging.FileHandler(
err_log_path, mode='w', delay=True)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(
logging.Formatter('%(asctime)s %(name)s: %(message)s',
datefmt='%y-%m-%d %H:%M:%S'))
logger = logging.getLogger('')
logger.addHandler(err_handler)
logger.setLevel(logging.INFO)
else:
_LOGGER.error(
'Unable to setup error log %s (access denied)', err_log_path)
|
ImportError
|
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/bootstrap.py/enable_logging
|
4,667 |
def save_as(self, version):
"""the save action for houdini environment
"""
if not version:
return
from stalker import Version
assert isinstance(version, Version)
# get the current version, and store it as the parent of the new version
current_version = self.get_current_version()
# initialize path variables by using update_paths()
version.update_paths()
# set the extension to hip
if not hou.isApprentice():
version.extension = '.hip'
else:
version.extension = '.hipnc'
# define that this version is created with Houdini
version.created_with = self.name
# create the folder if it doesn't exists
try:
os.makedirs(
os.path.dirname(
version.absolute_full_path
)
)
except __HOLE__:
# dirs exist
pass
# houdini uses / instead of \ under windows
# lets fix it
# set the environment variables
self.set_environment_variables(version)
# set the render file name
self.set_render_filename(version)
# houdini accepts only strings as file name, no unicode support as I
# see
hou.hipFile.save(file_name=str(version.absolute_full_path))
# set the environment variables again
self.set_environment_variables(version)
# append it to the recent file list
self.append_to_recent_files(
version.absolute_full_path
)
# update the parent info
if current_version:
version.parent = current_version
# update database with new version info
DBSession.commit()
return True
|
OSError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/houdini.py/Houdini.save_as
|
4,668 |
@classmethod
def set_environment_variable(cls, var, value):
"""sets environment var
:param str var: The name of the var
:param value: The value of the variable
"""
os.environ[var] = value
try:
hou.allowEnvironmentVariableToOverwriteVariable(var, True)
except __HOLE__:
# should be Houdini 12
hou.allowEnvironmentToOverwriteVariable(var, True)
hscript_command = "set -g %s = '%s'" % (var, value)
hou.hscript(str(hscript_command))
|
AttributeError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/houdini.py/Houdini.set_environment_variable
|
4,669 |
def set_render_filename(self, version):
"""sets the render file name
"""
output_filename = \
'{version.absolute_path}/Outputs/`$OS`/' \
'{version.task.project.code}_{version.nice_name}_' \
'v{version.version_number:03d}.$F4.exr'
output_filename = \
output_filename.format(version=version).replace('\\', '/')
# compute a $JOB relative file path
# which is much safer if the file is going to be render in multiple OSes
# $HIP = the current asset path
# $JOB = the current sequence path
#hip = self._asset.path
#hip = hou.getenv("HIP")
job = hou.getenv("JOB")
# eliminate environment vars
while "$" in job:
job = os.path.expandvars(job)
job_relative_output_file_path = \
"$JOB/%s" % utils.relpath(job, output_filename, "/", "..")
output_nodes = self.get_output_nodes()
for output_node in output_nodes:
# get only the ifd nodes for now
if output_node.type().name() == 'ifd':
# set the file name
try:
output_node.setParms(
{'vm_picture': str(job_relative_output_file_path)}
)
except hou.PermissionError:
# node is locked
pass
# set the compression to zips (zip, single scanline)
output_node.setParms({"vm_image_exr_compression": "zips"})
# also create the folders
output_file_full_path = output_node.evalParm('vm_picture')
output_file_path = os.path.dirname(output_file_full_path)
flat_output_file_path = output_file_path
while "$" in flat_output_file_path:
flat_output_file_path = os.path.expandvars(
flat_output_file_path
)
try:
os.makedirs(flat_output_file_path)
except __HOLE__:
# dirs exists
pass
|
OSError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/houdini.py/Houdini.set_render_filename
|
4,670 |
def _read(self):
"""reads the history file to a buffer
"""
try:
history_file = open(self._history_file_full_path)
except __HOLE__:
self._buffer = []
return
self._buffer = history_file.readlines()
# strip all the lines
self._buffer = [line.strip() for line in self._buffer]
history_file.close()
|
IOError
|
dataset/ETHPy150Open eoyilmaz/anima/anima/env/houdini.py/FileHistory._read
|
4,671 |
def _str2time(day, mon, yr, hr, min, sec, tz):
# translate month name to number
# month numbers start with 1 (January)
try:
mon = MONTHS_LOWER.index(mon.lower())+1
except ValueError:
# maybe it's already a number
try:
imon = int(mon)
except __HOLE__:
return None
if 1 <= imon <= 12:
mon = imon
else:
return None
# make sure clock elements are defined
if hr is None: hr = 0
if min is None: min = 0
if sec is None: sec = 0
yr = int(yr)
day = int(day)
hr = int(hr)
min = int(min)
sec = int(sec)
if yr < 1000:
# find "obvious" year
cur_yr = time.localtime(time.time())[0]
m = cur_yr % 100
tmp = yr
yr = yr + cur_yr - m
m = m - tmp
if abs(m) > 50:
if m > 0: yr = yr + 100
else: yr = yr - 100
# convert UTC time tuple to seconds since epoch (not timezone-adjusted)
t = _timegm((yr, mon, day, hr, min, sec, tz))
if t is not None:
# adjust time using timezone string, to get absolute time since epoch
if tz is None:
tz = "UTC"
tz = tz.upper()
offset = offset_from_tz_string(tz)
if offset is None:
return None
t = t - offset
return t
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/_str2time
|
4,672 |
def request_port(request):
host = request.get_host()
i = host.find(':')
if i >= 0:
port = host[i+1:]
try:
int(port)
except __HOLE__:
_debug("nonnumeric port: '%s'", port)
return None
else:
port = DEFAULT_HTTP_PORT
return port
# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't
# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738).
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/request_port
|
4,673 |
def set_ok_port(self, cookie, request):
if cookie.port_specified:
req_port = request_port(request)
if req_port is None:
req_port = "80"
else:
req_port = str(req_port)
for p in cookie.port.split(","):
try:
int(p)
except __HOLE__:
_debug(" bad port %s (not numeric)", p)
return False
if p == req_port:
break
else:
_debug(" request port (%s) not found in %s",
req_port, cookie.port)
return False
return True
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/DefaultCookiePolicy.set_ok_port
|
4,674 |
def deepvalues(mapping):
"""Iterates over nested mapping, depth-first, in sorted order by key."""
values = vals_sorted_by_key(mapping)
for obj in values:
mapping = False
try:
obj.items
except __HOLE__:
pass
else:
mapping = True
for subobj in deepvalues(obj):
yield subobj
if not mapping:
yield obj
# Used as second parameter to dict.get() method, to distinguish absent
# dict key from one with a None value.
|
AttributeError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/deepvalues
|
4,675 |
def _normalized_cookie_tuples(self, attrs_set):
"""Return list of tuples containing normalised cookie information.
attrs_set is the list of lists of key,value pairs extracted from
the Set-Cookie or Set-Cookie2 headers.
Tuples are name, value, standard, rest, where name and value are the
cookie name and value, standard is a dictionary containing the standard
cookie-attributes (discard, secure, version, expires or max-age,
domain, path and port) and rest is a dictionary containing the rest of
the cookie-attributes.
"""
cookie_tuples = []
boolean_attrs = "discard", "secure"
value_attrs = ("version",
"expires", "max-age",
"domain", "path", "port",
"comment", "commenturl")
for cookie_attrs in attrs_set:
name, value = cookie_attrs[0]
# Build dictionary of standard cookie-attributes (standard) and
# dictionary of other cookie-attributes (rest).
# Note: expiry time is normalised to seconds since epoch. V0
# cookies should have the Expires cookie-attribute, and V1 cookies
# should have Max-Age, but since V1 includes RFC 2109 cookies (and
# since V0 cookies may be a mish-mash of Netscape and RFC 2109), we
# accept either (but prefer Max-Age).
max_age_set = False
bad_cookie = False
standard = {}
rest = {}
for k, v in cookie_attrs[1:]:
lc = k.lower()
# don't lose case distinction for unknown fields
if lc in value_attrs or lc in boolean_attrs:
k = lc
if k in boolean_attrs and v is None:
# boolean cookie-attribute is present, but has no value
# (like "discard", rather than "port=80")
v = True
if k in standard:
# only first value is significant
continue
if k == "domain":
if v is None:
_debug(" missing value for domain attribute")
bad_cookie = True
break
# RFC 2965 section 3.3.3
v = v.lower()
if k == "expires":
if max_age_set:
# Prefer max-age to expires (like Mozilla)
continue
if v is None:
_debug(" missing or invalid value for expires "
"attribute: treating as session cookie")
continue
if k == "max-age":
max_age_set = True
try:
v = int(v)
except __HOLE__:
_debug(" missing or invalid (non-numeric) value for "
"max-age attribute")
bad_cookie = True
break
# convert RFC 2965 Max-Age to seconds since epoch
# XXX Strictly you're supposed to follow RFC 2616
# age-calculation rules. Remember that zero Max-Age is a
# is a request to discard (old and new) cookie, though.
k = "expires"
v = self._now + v
if (k in value_attrs) or (k in boolean_attrs):
if (v is None and
k not in ("port", "comment", "commenturl")):
_debug(" missing value for %s attribute" % k)
bad_cookie = True
break
standard[k] = v
else:
rest[k] = v
if bad_cookie:
continue
cookie_tuples.append((name, value, standard, rest))
return cookie_tuples
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/CookieJar._normalized_cookie_tuples
|
4,676 |
def _cookie_from_cookie_tuple(self, tup, request):
# standard is dict of standard cookie-attributes, rest is dict of the
# rest of them
name, value, standard, rest = tup
domain = standard.get("domain", Absent)
path = standard.get("path", Absent)
port = standard.get("port", Absent)
expires = standard.get("expires", Absent)
# set the easy defaults
version = standard.get("version", None)
if version is not None: version = int(version)
secure = standard.get("secure", False)
# (discard is also set if expires is Absent)
discard = standard.get("discard", False)
comment = standard.get("comment", None)
comment_url = standard.get("commenturl", None)
# set default path
if path is not Absent and path != "":
path_specified = True
path = escape_path(path)
else:
path_specified = False
path = request_path(request)
i = path.rfind("/")
if i != -1:
if version == 0:
# Netscape spec parts company from reality here
path = path[:i]
else:
path = path[:i+1]
if len(path) == 0: path = "/"
# set default domain
domain_specified = domain is not Absent
# but first we have to remember whether it starts with a dot
domain_initial_dot = False
if domain_specified:
domain_initial_dot = bool(domain.startswith("."))
if domain is Absent:
req_host, erhn = eff_request_host(request)
domain = erhn
elif not domain.startswith("."):
domain = "."+domain
# set default port
port_specified = False
if port is not Absent:
if port is None:
# Port attr present, but has no value: default to request port.
# Cookie should then only be sent back on that port.
port = request_port(request)
else:
port_specified = True
port = re.sub(r"\s+", "", port)
else:
# No port attr present. Cookie can be sent back on any port.
port = None
# set default expires and discard
if expires is Absent:
expires = None
discard = True
elif expires <= self._now:
# Expiry date in past is request to delete cookie. This can't be
# in DefaultCookiePolicy, because can't delete cookies there.
try:
self.clear(domain, path, name)
except __HOLE__:
pass
_debug("Expiring cookie, domain='%s', path='%s', name='%s'",
domain, path, name)
return None
return Cookie(version,
name, value,
port, port_specified,
domain, domain_specified, domain_initial_dot,
path, path_specified,
secure,
expires,
discard,
comment,
comment_url,
rest)
|
KeyError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/CookieJar._cookie_from_cookie_tuple
|
4,677 |
def revert(self, filename=None,
ignore_discard=False, ignore_expires=False):
"""Clear all cookies and reload cookies from a saved file.
Raises LoadError (or IOError) if reversion is not successful; the
object's state will not be altered if this happens.
"""
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
self._cookies_lock.acquire()
old_state = copy.deepcopy(self._cookies)
self._cookies = {}
try:
self.load(filename, ignore_discard, ignore_expires)
except (LoadError, __HOLE__):
self._cookies = old_state
raise
self._cookies_lock.release()
|
IOError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/cookielib.py/FileCookieJar.revert
|
4,678 |
def _parseHeaderValue(self, header_value):
"""
Parse out a complex header value (such as Content-Type, with a value
like "text/html; charset=utf-8") into a main value and a dictionary of
extra information (in this case, 'text/html' and {'charset': 'utf8'}).
"""
values = header_value.split(';', 1)
if len(values) == 1:
# There's no extra info -- return the main value and an empty dict
return values[0], {}
main_value, extra_values = values[0], values[1].split(';')
extra_dict = {}
for value_string in extra_values:
try:
key, value = value_string.split('=', 1)
extra_dict[key.strip()] = value.strip()
except __HOLE__:
# Can't unpack it -- must be malformed. Ignore
pass
return main_value, extra_dict
|
ValueError
|
dataset/ETHPy150Open necaris/python3-openid/openid/fetchers.py/Urllib2Fetcher._parseHeaderValue
|
4,679 |
def _parseHeaders(self, header_file):
header_file.seek(0)
# Remove the status line from the beginning of the input
unused_http_status_line = header_file.readline().lower()
if unused_http_status_line.startswith(b'http/1.1 100 '):
unused_http_status_line = header_file.readline()
unused_http_status_line = header_file.readline()
lines = [line.decode().strip() for line in header_file]
# and the blank line from the end
empty_line = lines.pop()
if empty_line:
raise HTTPError("No blank line at end of headers: %r" % (line,))
headers = {}
for line in lines:
try:
name, value = line.split(':', 1)
except __HOLE__:
raise HTTPError(
"Malformed HTTP header line in response: %r" % (line,))
value = value.strip()
# HTTP headers are case-insensitive
name = name.lower()
headers[name] = value
return headers
|
ValueError
|
dataset/ETHPy150Open necaris/python3-openid/openid/fetchers.py/CurlHTTPFetcher._parseHeaders
|
4,680 |
def fetch(self, url, body=None, headers=None):
"""Perform an HTTP request
@raises Exception: Any exception that can be raised by httplib2
@see: C{L{HTTPFetcher.fetch}}
"""
if body:
method = 'POST'
else:
method = 'GET'
if headers is None:
headers = {}
# httplib2 doesn't check to make sure that the URL's scheme is
# 'http' so we do it here.
if not (url.startswith('http://') or url.startswith('https://')):
raise ValueError('URL is not a HTTP URL: %r' % (url,))
httplib2_response, content = self.httplib2.request(
url, method, body=body, headers=headers)
# Translate the httplib2 response to our HTTP response abstraction
# When a 400 is returned, there is no "content-location"
# header set. This seems like a bug to me. I can't think of a
# case where we really care about the final URL when it is an
# error response, but being careful about it can't hurt.
try:
final_url = httplib2_response['content-location']
except __HOLE__:
# We're assuming that no redirects occurred
assert not httplib2_response.previous
# And this should never happen for a successful response
assert httplib2_response.status != 200
final_url = url
return HTTPResponse(
body=content.decode(), # TODO Don't assume ASCII
final_url=final_url,
headers=dict(list(httplib2_response.items())),
status=httplib2_response.status,
)
|
KeyError
|
dataset/ETHPy150Open necaris/python3-openid/openid/fetchers.py/HTTPLib2Fetcher.fetch
|
4,681 |
def parse_config_h(fp, g=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if g is None:
g = {}
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
#
while 1:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try: v = int(v)
except __HOLE__: pass
g[n] = v
else:
m = undef_rx.match(line)
if m:
g[m.group(1)] = 0
return g
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/sysconfig.py/parse_config_h
|
4,682 |
def parse_makefile(fn, g=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
from distutils.text_file import TextFile
fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1)
if g is None:
g = {}
done = {}
notdone = {}
while 1:
line = fp.readline()
if line is None: # eof
break
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = string.strip(v)
if "$" in v:
notdone[n] = v
else:
try: v = int(v)
except ValueError: pass
done[n] = v
# do variable interpolation here
while notdone:
for name in notdone.keys():
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m:
n = m.group(1)
found = True
if done.has_key(n):
item = str(done[n])
elif notdone.has_key(n):
# get it on a subsequent round
found = False
elif os.environ.has_key(n):
# do it like make: fall back to environment
item = os.environ[n]
else:
done[n] = item = ""
if found:
after = value[m.end():]
value = value[:m.start()] + item + after
if "$" in after:
notdone[name] = value
else:
try: value = int(value)
except __HOLE__:
done[name] = string.strip(value)
else:
done[name] = value
del notdone[name]
else:
# bogus variable reference; just drop it since we can't deal
del notdone[name]
fp.close()
# save the results in the global dictionary
g.update(done)
return g
|
ValueError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/sysconfig.py/parse_makefile
|
4,683 |
def _init_posix():
"""Initialize the module as appropriate for POSIX systems."""
g = {}
# load the installed Makefile:
try:
filename = get_makefile_filename()
parse_makefile(filename, g)
except __HOLE__, msg:
my_msg = "invalid Python installation: unable to open %s" % filename
if hasattr(msg, "strerror"):
my_msg = my_msg + " (%s)" % msg.strerror
raise DistutilsPlatformError(my_msg)
# load the installed pyconfig.h:
try:
filename = get_config_h_filename()
parse_config_h(file(filename), g)
except IOError, msg:
my_msg = "invalid Python installation: unable to open %s" % filename
if hasattr(msg, "strerror"):
my_msg = my_msg + " (%s)" % msg.strerror
raise DistutilsPlatformError(my_msg)
# On MacOSX we need to check the setting of the environment variable
# MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
# it needs to be compatible.
# If it isn't set we set it to the configure-time value
if sys.platform == 'darwin' and g.has_key('MACOSX_DEPLOYMENT_TARGET'):
cfg_target = g['MACOSX_DEPLOYMENT_TARGET']
cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
if cur_target == '':
cur_target = cfg_target
os.putenv('MACOSX_DEPLOYMENT_TARGET', cfg_target)
elif map(int, cfg_target.split('.')) > map(int, cur_target.split('.')):
my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
% (cur_target, cfg_target))
raise DistutilsPlatformError(my_msg)
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
if python_build:
g['LDSHARED'] = g['BLDSHARED']
elif get_python_version() < '2.1':
# The following two branches are for 1.5.2 compatibility.
if sys.platform == 'aix4': # what about AIX 3.x ?
# Linker script is in the config directory, not in Modules as the
# Makefile says.
python_lib = get_python_lib(standard_lib=1)
ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
python_exp = os.path.join(python_lib, 'config', 'python.exp')
g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp)
elif sys.platform == 'beos':
# Linker script is in the config directory. In the Makefile it is
# relative to the srcdir, which after installation no longer makes
# sense.
python_lib = get_python_lib(standard_lib=1)
linkerscript_path = string.split(g['LDSHARED'])[0]
linkerscript_name = os.path.basename(linkerscript_path)
linkerscript = os.path.join(python_lib, 'config',
linkerscript_name)
# XXX this isn't the right place to do this: adding the Python
# library to the link, if needed, should be in the "build_ext"
# command. (It's also needed for non-MS compilers on Windows, and
# it's taken care of for them by the 'build_ext.get_libraries()'
# method.)
g['LDSHARED'] = ("%s -L%s/lib -lpython%s" %
(linkerscript, PREFIX, get_python_version()))
global _config_vars
_config_vars = g
|
IOError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/sysconfig.py/_init_posix
|
4,684 |
def multiple_action_transactions(self, args):
action = None
# TODO: catch that we don't mix in requisiton and stock report keywords in the same multi-action message?
_args = iter(args)
def next():
return _args.next()
found_product_for_action = True
while True:
try:
keyword = next()
except StopIteration:
if not found_product_for_action:
raise SMSError('product expected for action "%s"' % action)
break
old_action = action
_next_action = self.C.action_by_keyword(keyword)
if _next_action:
action = _next_action
if not found_product_for_action:
raise SMSError('product expected for action "%s"' % old_action.keyword)
found_product_for_action = False
continue
try:
product = self.product_from_code(keyword)
found_product_for_action = True
except:
product = None
if product:
if not action:
raise SMSError('need to specify an action before product')
elif action.action == const.StockActions.STOCKOUT:
value = 0
else:
try:
value = int(next())
except (ValueError, __HOLE__):
raise SMSError('quantity expected for product "%s"' % product.code)
yield StockTransactionHelper(
domain=self.domain.name,
location_id=self.location.location_id,
case_id=self.case_id,
product_id=product.get_id,
action=action.action,
subaction=action.subaction,
quantity=value,
)
continue
raise SMSError('do not recognize keyword "%s"' % keyword)
|
StopIteration
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/commtrack/sms.py/StockReportParser.multiple_action_transactions
|
4,685 |
def looks_like_prod_code(self, code):
try:
int(code)
return False
except __HOLE__:
return True
|
ValueError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/commtrack/sms.py/StockReportParser.looks_like_prod_code
|
4,686 |
def looks_like_prod_code(self, code):
"""
Special for EWS, this version doesn't consider "10.20"
as an invalid quantity.
"""
try:
float(code)
return False
except __HOLE__:
return True
|
ValueError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/commtrack/sms.py/StockAndReceiptParser.looks_like_prod_code
|
4,687 |
def can_import(name):
"""Attempt to __import__ the specified package/module, returning
True when succeeding, otherwise False"""
try:
__import__(name)
return True
except __HOLE__:
return False
|
ImportError
|
dataset/ETHPy150Open Pylons/pylons/pylons/commands.py/can_import
|
4,688 |
def is_minimal_template(package, fail_fast=False):
"""Determine if the specified Pylons project (package) uses the
Pylons Minimal Template.
fail_fast causes ImportErrors encountered during detection to be
raised.
"""
minimal_template = False
try:
# Check if PACKAGE.lib.base exists
__import__(package + '.lib.base')
except __HOLE__, ie:
if 'No module named lib.base' in str(ie):
minimal_template = True
except:
# PACKAGE.lib.base exists but throws an error
if fail_fast:
raise
return minimal_template
|
ImportError
|
dataset/ETHPy150Open Pylons/pylons/pylons/commands.py/is_minimal_template
|
4,689 |
def command(self):
"""Main command to create a new shell"""
self.verbose = 3
if len(self.args) == 0:
# Assume the .ini file is ./development.ini
config_file = 'development.ini'
if not os.path.isfile(config_file):
raise BadCommand('%sError: CONFIG_FILE not found at: .%s%s\n'
'Please specify a CONFIG_FILE' % \
(self.parser.get_usage(), os.path.sep,
config_file))
else:
config_file = self.args[0]
config_name = 'config:%s' % config_file
here_dir = os.getcwd()
locs = dict(__name__="pylons-admin")
if not self.options.quiet:
# Configure logging from the config file
self.logging_file_config(config_file)
# Load locals and populate with objects for use in shell
sys.path.insert(0, here_dir)
# Load the wsgi app first so that everything is initialized right
wsgiapp = loadapp(config_name, relative_to=here_dir)
test_app = paste.fixture.TestApp(wsgiapp)
# Query the test app to setup the environment
tresponse = test_app.get('/_test_vars')
request_id = int(tresponse.body)
# Disable restoration during test_app requests
test_app.pre_request_hook = lambda self: \
paste.registry.restorer.restoration_end()
test_app.post_request_hook = lambda self: \
paste.registry.restorer.restoration_begin(request_id)
# Restore the state of the Pylons special objects
# (StackedObjectProxies)
paste.registry.restorer.restoration_begin(request_id)
# Determine the package name from the pylons.config object
pkg_name = pylons.config['pylons.package']
# Start the rest of our imports now that the app is loaded
if is_minimal_template(pkg_name, True):
model_module = None
helpers_module = pkg_name + '.helpers'
base_module = pkg_name + '.controllers'
else:
model_module = pkg_name + '.model'
helpers_module = pkg_name + '.lib.helpers'
base_module = pkg_name + '.lib.base'
if model_module and can_import(model_module):
locs['model'] = sys.modules[model_module]
if can_import(helpers_module):
locs['h'] = sys.modules[helpers_module]
exec ('from pylons import app_globals, config, request, response, '
'session, tmpl_context, url') in locs
exec ('from pylons.controllers.util import abort, redirect') in locs
exec 'from pylons.i18n import _, ungettext, N_' in locs
locs.pop('__builtins__', None)
# Import all objects from the base module
__import__(base_module)
base = sys.modules[base_module]
base_public = [__name for __name in dir(base) if not \
__name.startswith('_') or __name == '_']
locs.update((name, getattr(base, name)) for name in base_public)
locs.update(dict(wsgiapp=wsgiapp, app=test_app))
mapper = tresponse.config.get('routes.map')
if mapper:
locs['mapper'] = mapper
banner = " All objects from %s are available\n" % base_module
banner += " Additional Objects:\n"
if mapper:
banner += " %-10s - %s\n" % ('mapper', 'Routes mapper object')
banner += " %-10s - %s\n" % ('wsgiapp',
"This project's WSGI App instance")
banner += " %-10s - %s\n" % ('app',
'paste.fixture wrapped around wsgiapp')
try:
if self.options.disable_ipython:
raise ImportError()
# try to use IPython if possible
try:
try:
# 1.0 <= ipython
from IPython.terminal.embed import InteractiveShellEmbed
except ImportError:
# 0.11 <= ipython < 1.0
from IPython.frontend.terminal.embed import InteractiveShellEmbed
shell = InteractiveShellEmbed(banner2=banner)
except ImportError:
# ipython < 0.11
from IPython.Shell import IPShellEmbed
shell = IPShellEmbed(argv=self.args)
shell.set_banner(shell.IP.BANNER + '\n\n' + banner)
try:
shell(local_ns=locs, global_ns={})
finally:
paste.registry.restorer.restoration_end()
except ImportError:
import code
py_prefix = sys.platform.startswith('java') and 'J' or 'P'
newbanner = "Pylons Interactive Shell\n%sython %s\n\n" % \
(py_prefix, sys.version)
banner = newbanner + banner
shell = code.InteractiveConsole(locals=locs)
try:
import readline
except __HOLE__:
pass
try:
shell.interact(banner)
finally:
paste.registry.restorer.restoration_end()
|
ImportError
|
dataset/ETHPy150Open Pylons/pylons/pylons/commands.py/ShellCommand.command
|
4,690 |
def _get_start_shift(self, shift):
if shift == '':
return 0
time_formats = ['%Y-%m-%d %H:%M:%S',
'%Y-%m-%d %H:%M',
'%H:%M:%S',
'%H:%M']
for time_format in time_formats:
try:
date = datetime.datetime.strptime(shift, time_format)
except ValueError:
continue
except __HOLE__:
self.log.warning('Start time must be string type ("%s"), ignored', time_format[0])
break
today = datetime.date.today()
if today > date.date():
date = datetime.datetime(today.year, today.month, today.day, date.hour, date.minute, date.second)
return time.mktime(date.timetuple()) - self.start_time
else:
self.log.warning('Unrecognized time format: %s ("%s" required), ignored', shift, time_formats[0])
return 0
|
TypeError
|
dataset/ETHPy150Open Blazemeter/taurus/bzt/modules/provisioning.py/Local._get_start_shift
|
4,691 |
def parse_date(string_date):
"""
Parse the given date as one of the following
* Git internal format: timestamp offset
* RFC 2822: Thu, 07 Apr 2005 22:13:13 +0200.
* ISO 8601 2005-04-07T22:13:13
The T can be a space as well
:return: Tuple(int(timestamp_UTC), int(offset)), both in seconds since epoch
:raise ValueError: If the format could not be understood
:note: Date can also be YYYY.MM.DD, MM/DD/YYYY and DD.MM.YYYY.
"""
# git time
try:
if string_date.count(' ') == 1 and string_date.rfind(':') == -1:
timestamp, offset = string_date.split()
timestamp = int(timestamp)
return timestamp, utctz_to_altz(verify_utctz(offset))
else:
offset = "+0000" # local time by default
if string_date[-5] in '-+':
offset = verify_utctz(string_date[-5:])
string_date = string_date[:-6] # skip space as well
# END split timezone info
offset = utctz_to_altz(offset)
# now figure out the date and time portion - split time
date_formats = list()
splitter = -1
if ',' in string_date:
date_formats.append("%a, %d %b %Y")
splitter = string_date.rfind(' ')
else:
# iso plus additional
date_formats.append("%Y-%m-%d")
date_formats.append("%Y.%m.%d")
date_formats.append("%m/%d/%Y")
date_formats.append("%d.%m.%Y")
splitter = string_date.rfind('T')
if splitter == -1:
splitter = string_date.rfind(' ')
# END handle 'T' and ' '
# END handle rfc or iso
assert splitter > -1
# split date and time
time_part = string_date[splitter + 1:] # skip space
date_part = string_date[:splitter]
# parse time
tstruct = time.strptime(time_part, "%H:%M:%S")
for fmt in date_formats:
try:
dtstruct = time.strptime(date_part, fmt)
utctime = calendar.timegm((dtstruct.tm_year, dtstruct.tm_mon, dtstruct.tm_mday,
tstruct.tm_hour, tstruct.tm_min, tstruct.tm_sec,
dtstruct.tm_wday, dtstruct.tm_yday, tstruct.tm_isdst))
return int(utctime), offset
except __HOLE__:
continue
# END exception handling
# END for each fmt
# still here ? fail
raise ValueError("no format matched")
# END handle format
except Exception:
raise ValueError("Unsupported date format: %s" % string_date)
# END handle exceptions
# precompiled regex
|
ValueError
|
dataset/ETHPy150Open gitpython-developers/GitPython/git/objects/util.py/parse_date
|
4,692 |
def memoize(f):
arg_cache = {}
def _new_f(arg):
cache_it = True
try:
cached = arg_cache.get(arg, None)
if cached is not None:
return cached
except __HOLE__:
cache_it = False
uncached = f(arg)
if cache_it:
arg_cache[arg] = uncached
return uncached
return _new_f
|
TypeError
|
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/lib/bebop/util.py/memoize
|
4,693 |
def test_AptInstaller():
from rosdep2.platforms.debian import AptInstaller
@patch.object(AptInstaller, 'get_packages_to_install')
def test(mock_method):
installer = AptInstaller()
mock_method.return_value = []
assert [] == installer.get_install_command(['fake'])
mock_method.return_value = ['a', 'b']
expected = [['sudo', '-H', 'apt-get', 'install', '-y', 'a'],
['sudo', '-H', 'apt-get', 'install', '-y', 'b']]
val = installer.get_install_command(['whatever'], interactive=False)
print("VAL", val)
assert val == expected, val
expected = [['sudo', '-H', 'apt-get', 'install', 'a'],
['sudo', '-H', 'apt-get', 'install', 'b']]
val = installer.get_install_command(['whatever'], interactive=True)
assert val == expected, val
try:
test()
except __HOLE__:
traceback.print_exc()
raise
|
AssertionError
|
dataset/ETHPy150Open ros-infrastructure/rosdep/test/test_rosdep_debian.py/test_AptInstaller
|
4,694 |
def handle(self, *args, **options):
username = options.get(self.UserModel.USERNAME_FIELD, None)
interactive = options.get('interactive')
verbosity = int(options.get('verbosity', 1))
database = options.get('database')
# If not provided, create the user with an unusable password
password = None
user_data = {}
# Do quick and dirty validation if --noinput
if not interactive:
try:
if not username:
raise CommandError("You must use --%s with --noinput." %
self.UserModel.USERNAME_FIELD)
username = self.username_field.clean(username, None)
for field_name in self.UserModel.REQUIRED_FIELDS:
if options.get(field_name):
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = field.clean(options[field_name], None)
else:
raise CommandError("You must use --%s with --noinput." % field_name)
except exceptions.ValidationError as e:
raise CommandError('; '.join(e.messages))
else:
# Prompt for username/password, and any other required fields.
# Enclose this whole thing in a try/except to trap for a
# keyboard interrupt and exit gracefully.
default_username = get_default_username()
try:
# Get a username
verbose_field_name = self.username_field.verbose_name
while username is None:
if not username:
input_msg = capfirst(verbose_field_name)
if default_username:
input_msg = "%s (leave blank to use '%s')" % (
input_msg, default_username)
raw_value = input(force_str('%s: ' % input_msg))
if default_username and raw_value == '':
raw_value = default_username
try:
username = self.username_field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
username = None
continue
try:
self.UserModel._default_manager.db_manager(database).get_by_natural_key(username)
except self.UserModel.DoesNotExist:
pass
else:
self.stderr.write("Error: That %s is already taken." %
verbose_field_name)
username = None
for field_name in self.UserModel.REQUIRED_FIELDS:
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = options.get(field_name)
while user_data[field_name] is None:
raw_value = input(force_str('%s: ' % capfirst(field.verbose_name)))
try:
user_data[field_name] = field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
user_data[field_name] = None
# Get a password
while password is None:
if not password:
password = getpass.getpass()
password2 = getpass.getpass(force_str('Password (again): '))
if password != password2:
self.stderr.write("Error: Your passwords didn't match.")
password = None
continue
if password.strip() == '':
self.stderr.write("Error: Blank passwords aren't allowed.")
password = None
continue
except __HOLE__:
self.stderr.write("\nOperation cancelled.")
sys.exit(1)
user_data[self.UserModel.USERNAME_FIELD] = username
user_data['password'] = password
self.UserModel._default_manager.db_manager(database).create_superuser(**user_data)
if verbosity >= 1:
self.stdout.write("Superuser created successfully.")
|
KeyboardInterrupt
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/contrib/auth/management/commands/createsuperuser.py/Command.handle
|
4,695 |
def __init__(self, *args, **kwargs):
try:
self.base_fields['image_file'].initial = kwargs['instance'].image.pk
except (__HOLE__, KeyError):
self.base_fields['image_file'].initial = None
self.base_fields['image_file'].widget = AdminFileWidget(ManyToOneRel(FilerImageField, Image, 'file_ptr'), site)
super(ImageFormMixin, self).__init__(*args, **kwargs)
|
AttributeError
|
dataset/ETHPy150Open jrief/djangocms-cascade/cmsplugin_cascade/bootstrap3/image.py/ImageFormMixin.__init__
|
4,696 |
@classmethod
def get_identifier(cls, obj):
identifier = super(BootstrapImagePlugin, cls).get_identifier(obj)
try:
content = force_text(obj.image)
except __HOLE__:
content = _("No Image")
return format_html('{0}{1}', identifier, content)
|
AttributeError
|
dataset/ETHPy150Open jrief/djangocms-cascade/cmsplugin_cascade/bootstrap3/image.py/BootstrapImagePlugin.get_identifier
|
4,697 |
def _zerorpc_args(self):
try:
args_spec = self._functor._zerorpc_args()
except AttributeError:
try:
args_spec = inspect.getargspec(self._functor)
except TypeError:
try:
args_spec = inspect.getargspec(self._functor.__call__)
except (__HOLE__, TypeError):
args_spec = None
return args_spec
|
AttributeError
|
dataset/ETHPy150Open 0rpc/zerorpc-python/zerorpc/decorators.py/DecoratorBase._zerorpc_args
|
4,698 |
def tearDown(self):
try:
os.unlink(self.cfg_file)
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open ganeti/ganeti/test/py/ganeti.config_unittest.py/TestConfigRunner.tearDown
|
4,699 |
def make_log_graph(repos, revs):
"""Generate graph information for the given revisions.
Returns a tuple `(threads, vertices, columns)`, where:
* `threads`: List of paint command lists `[(type, column, line)]`, where
`type` is either 0 for "move to" or 1 for "line to", and `column` and
`line` are coordinates.
* `vertices`: List of `(column, thread_index)` tuples, where the `i`th
item specifies the column in which to draw the dot in line `i` and the
corresponding thread.
* `columns`: Maximum width of the graph.
"""
threads = []
vertices = []
columns = 0
revs = iter(revs)
def add_edge(thread, column, line):
if thread and thread[-1][:2] == [1, column] \
and thread[-2][1] == column:
thread[-1][2] = line
else:
thread.append([1, column, line])
try:
next_rev = revs.next()
line = 0
active = []
active_thread = []
while True:
rev = next_rev
if rev not in active:
# Insert new head
threads.append([[0, len(active), line]])
active_thread.append(threads[-1])
active.append(rev)
columns = max(columns, len(active))
column = active.index(rev)
vertices.append((column, threads.index(active_thread[column])))
next_rev = revs.next() # Raises StopIteration when no more revs
next = active[:]
parents = list(repos.parent_revs(rev))
# Replace current item with parents not already present
new_parents = [p for p in parents if p not in active]
next[column:column + 1] = new_parents
# Add edges to parents
for col, (r, thread) in enumerate(izip(active, active_thread)):
if r in next:
add_edge(thread, next.index(r), line + 1)
elif r == rev:
if new_parents:
parents.remove(new_parents[0])
parents.append(new_parents[0])
for parent in parents:
if parent != parents[0]:
thread.append([0, col, line])
add_edge(thread, next.index(parent), line + 1)
if not new_parents:
del active_thread[column]
else:
base = len(threads)
threads.extend([[0, column + 1 + i, line + 1]]
for i in xrange(len(new_parents) - 1))
active_thread[column + 1:column + 1] = threads[base:]
active = next
line += 1
except __HOLE__:
pass
return threads, vertices, columns
|
StopIteration
|
dataset/ETHPy150Open edgewall/trac/trac/versioncontrol/web_ui/util.py/make_log_graph
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.