Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,400 |
def verifyFileOnlyWritableByMunkiAndRoot(file_path):
"""
Check the permissions on a given file path; fail if owner or group
does not match the munki process (default: root/admin) or the group is not
'wheel', or if other users are able to write to the file. This prevents
escalated execution of arbitrary code.
Args:
file_path: str path of file to verify permissions on.
Raises:
VerifyFilePermissionsError: there was an error verifying file permissions.
InsecureFilePermissionsError: file permissions were found to be insecure.
"""
try:
file_stat = os.stat(file_path)
except __HOLE__, err:
raise VerifyFilePermissionsError(
'%s does not exist. \n %s' % (file_path, str(err)))
try:
admin_gid = grp.getgrnam('admin').gr_gid
wheel_gid = grp.getgrnam('wheel').gr_gid
user_gid = os.getegid()
# verify the munki process uid matches the file owner uid.
if os.geteuid() != file_stat.st_uid:
raise InsecureFilePermissionsError(
'owner does not match munki process!')
# verify the munki process gid matches the file owner gid, or the file
# owner gid is wheel or admin gid.
elif file_stat.st_gid not in [admin_gid, wheel_gid, user_gid]:
raise InsecureFilePermissionsError(
'group does not match munki process!')
# verify other users cannot write to the file.
elif file_stat.st_mode & stat.S_IWOTH != 0:
raise InsecureFilePermissionsError('world writable!')
except InsecureFilePermissionsError, err:
raise InsecureFilePermissionsError(
'%s is not secure! %s' % (file_path, err.args[0]))
|
OSError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/utils.py/verifyFileOnlyWritableByMunkiAndRoot
|
4,401 |
def runExternalScript(script, allow_insecure=False, script_args=()):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path.
RunExternalScriptError: there was an error running the script.
"""
if not os.path.exists(script):
raise ScriptNotFoundError('script does not exist: %s' % script)
if not allow_insecure:
try:
verifyFileOnlyWritableByMunkiAndRoot(script)
except VerifyFilePermissionsError, err:
msg = ('Skipping execution due to failed file permissions '
'verification: %s\n%s' % (script, str(err)))
raise RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = None
try:
proc = subprocess.Popen(cmd, shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except (__HOLE__, IOError), err:
raise RunExternalScriptError(
'Error %s when attempting to run %s' % (unicode(err), script))
if proc:
(stdout, stderr) = proc.communicate()
return proc.returncode, stdout.decode('UTF-8', 'replace'), \
stderr.decode('UTF-8', 'replace')
else:
raise RunExternalScriptError('%s not executable' % script)
|
OSError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/utils.py/runExternalScript
|
4,402 |
def getPIDforProcessName(processname):
'''Returns a process ID for processname'''
cmd = ['/bin/ps', '-eo', 'pid=,command=']
try:
proc = subprocess.Popen(cmd, shell=False, bufsize=-1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
except OSError:
return 0
while True:
line = proc.stdout.readline().decode('UTF-8')
if not line and (proc.poll() != None):
break
line = line.rstrip('\n')
if line:
try:
(pid, process) = line.split(None, 1)
except __HOLE__:
# funky process line, so we'll skip it
pass
else:
if process.find(processname) != -1:
return str(pid)
return 0
|
ValueError
|
dataset/ETHPy150Open munki/munki/code/client/munkilib/utils.py/getPIDforProcessName
|
4,403 |
def _parse_white_list_from_config(self, whitelists):
"""Parse and validate the pci whitelist from the nova config."""
specs = []
for jsonspec in whitelists:
try:
dev_spec = jsonutils.loads(jsonspec)
except __HOLE__:
raise exception.PciConfigInvalidWhitelist(
reason=_("Invalid entry: '%s'") % jsonspec)
if isinstance(dev_spec, dict):
dev_spec = [dev_spec]
elif not isinstance(dev_spec, list):
raise exception.PciConfigInvalidWhitelist(
reason=_("Invalid entry: '%s'; "
"Expecting list or dict") % jsonspec)
for ds in dev_spec:
if not isinstance(ds, dict):
raise exception.PciConfigInvalidWhitelist(
reason=_("Invalid entry: '%s'; "
"Expecting dict") % ds)
spec = devspec.PciDeviceSpec(ds)
specs.append(spec)
return specs
|
ValueError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/pci/whitelist.py/Whitelist._parse_white_list_from_config
|
4,404 |
def _get_model_from_node(self, node, attr):
"""
Helper to look up a model from a <object model=...> or a <field
rel=... to=...> node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute" \
% (node.nodeName, attr))
try:
Model = models.get_model(*model_identifier.split("."))
except __HOLE__:
Model = None
if Model is None:
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'" % \
(node.nodeName, model_identifier))
return Model
|
TypeError
|
dataset/ETHPy150Open dcramer/django-compositepks/django/core/serializers/xml_serializer.py/Deserializer._get_model_from_node
|
4,405 |
def _subjectAltNameString(self):
method = _lib.X509V3_EXT_get(self._extension)
if method == _ffi.NULL:
# TODO: This is untested.
_raise_current_error()
payload = self._extension.value.data
length = self._extension.value.length
payloadptr = _ffi.new("unsigned char**")
payloadptr[0] = payload
if method.it != _ffi.NULL:
ptr = _lib.ASN1_ITEM_ptr(method.it)
data = _lib.ASN1_item_d2i(_ffi.NULL, payloadptr, length, ptr)
names = _ffi.cast("GENERAL_NAMES*", data)
else:
names = _ffi.cast(
"GENERAL_NAMES*",
method.d2i(_ffi.NULL, payloadptr, length))
parts = []
for i in range(_lib.sk_GENERAL_NAME_num(names)):
name = _lib.sk_GENERAL_NAME_value(names, i)
try:
label = self._prefixes[name.type]
except __HOLE__:
bio = _new_mem_buf()
_lib.GENERAL_NAME_print(bio, name)
parts.append(_native(_bio_to_string(bio)))
else:
value = _native(
_ffi.buffer(name.d.ia5.data, name.d.ia5.length)[:])
parts.append(label + ":" + value)
return ", ".join(parts)
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pyopenssl/OpenSSL/crypto.py/X509Extension._subjectAltNameString
|
4,406 |
def get_tests(app_module):
try:
app_path = app_module.__name__.split('.')[:-1]
test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE)
except __HOLE__, e:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
import os.path
from imp import find_module
try:
mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)])
except ImportError:
# 'tests' module doesn't exist. Move on.
test_module = None
else:
# The module exists, so there must be an import error in the
# test module itself. We don't need the module; so if the
# module was a single file module (i.e., tests.py), close the file
# handle returned by find_module. Otherwise, the test module
# is a directory, and there is nothing to close.
if mod[0]:
mod[0].close()
raise
return test_module
|
ImportError
|
dataset/ETHPy150Open dcramer/django-compositepks/django/test/simple.py/get_tests
|
4,407 |
def build_suite(app_module):
"Create a complete Django test suite for the provided application module"
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(app_module, 'suite'):
suite.addTest(app_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module))
try:
suite.addTest(doctest.DocTestSuite(app_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
test_module = get_tests(app_module)
if test_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(test_module, 'suite'):
suite.addTest(test_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module))
try:
suite.addTest(doctest.DocTestSuite(test_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except __HOLE__:
# No doc tests in tests.py
pass
return suite
|
ValueError
|
dataset/ETHPy150Open dcramer/django-compositepks/django/test/simple.py/build_suite
|
4,408 |
def build_test(label):
"""Construct a test case a test with the specified label. Label should
be of the form model.TestClass or model.TestClass.test_method. Returns
an instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase or app.TestCase.test_method" % label)
app_module = get_app(parts[0])
TestClass = getattr(app_module, parts[1], None)
# Couldn't find the test class in models.py; look in tests.py
if TestClass is None:
test_module = get_tests(app_module)
if test_module:
TestClass = getattr(test_module, parts[1], None)
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(TestClass)
except __HOLE__:
raise ValueError("Test label '%s' does not refer to a test class" % label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
|
TypeError
|
dataset/ETHPy150Open dcramer/django-compositepks/django/test/simple.py/build_test
|
4,409 |
def organize_commands(corrected_commands):
"""Yields sorted commands without duplicates.
:type corrected_commands: Iterable[thefuck.types.CorrectedCommand]
:rtype: Iterable[thefuck.types.CorrectedCommand]
"""
try:
first_command = next(corrected_commands)
yield first_command
except __HOLE__:
return
without_duplicates = {
command for command in sorted(
corrected_commands, key=lambda command: command.priority)
if command != first_command}
sorted_commands = sorted(
without_duplicates,
key=lambda corrected_command: corrected_command.priority)
logs.debug('Corrected commands: '.format(
', '.join(u'{}'.format(cmd) for cmd in [first_command] + sorted_commands)))
for command in sorted_commands:
yield command
|
StopIteration
|
dataset/ETHPy150Open nvbn/thefuck/thefuck/corrector.py/organize_commands
|
4,410 |
def __call__(self, environ, start_response):
request_path = environ.get('PATH_INFO', '')
# check if the request is for static files at all
path_parts = request_path.strip('/').split('/', 2)
if len(path_parts) == 3 and path_parts[0] in ['play', 'game-meta']:
slug = path_parts[1]
game = self.game_list.get_by_slug(slug)
if game and game.path.is_set():
asset_path = path_parts[2]
file_asset_path = normpath(join(get_absolute_path(game.path), asset_path))
def build_file_iter(f, block_size):
return StaticFileIter(file_asset_path, normpath(join(slug, asset_path)), f, block_size)
def remove_ranges_start_response(status, headers, exc_info=None):
if status == '200 OK':
headers = [t for t in headers if t[0] != 'Accept-Ranges' and t[0] != 'Content-Range']
return start_response(status, headers, exc_info)
# check if the request is already cached
app = self.cached_apps.get(request_path)
if app:
environ['wsgi.file_wrapper'] = build_file_iter
try:
return app(environ, remove_ranges_start_response)
except __HOLE__ as e:
LOG.error(e)
elif access(file_asset_path, R_OK):
content_type, _ = guess_type(file_asset_path)
if content_type in self.utf8_mimetypes:
content_type += '; charset=utf-8'
app = FileApp(file_asset_path, content_type=content_type)
if asset_path.startswith('staticmax'):
app.cache_control(max_age=self.staticmax_max_age)
else:
app.cache_control(max_age=0)
self.cached_apps[request_path] = app
environ['wsgi.file_wrapper'] = build_file_iter
return app(environ, remove_ranges_start_response)
start_response(
'404 Not Found',
[('Content-Type', 'text/html; charset=UTF-8'),
('Content-Length', '0')]
)
return ['']
return self.app(environ, start_response)
|
OSError
|
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/middleware/static_game_files.py/StaticGameFilesMiddleware.__call__
|
4,411 |
def reload_qt():
"""
Reload the Qt bindings.
If the QT_API environment variable has been updated, this will load the
new Qt bindings given by this variable. This should be used instead of
the build-in ``reload`` function because the latter can in some cases
cause issues with the ImportDenier (which prevents users from importing
e.g. PySide if PyQt4 is loaded).
"""
# Clear any forbidden modules
_import_hook._forbidden.clear()
# Quit app if active
global qapp
if qapp is not None:
qapp.quit()
qapp = None
global QtCore
global QtGui
if os.environ.get('QT_API') == QT_API_PYQT5:
loaders = [_load_pyqt5]
elif os.environ.get('QT_API') == QT_API_PYSIDE:
loaders = [_load_pyside, _load_pyqt4]
else:
loaders = [_load_pyqt4, _load_pyside, _load_pyqt5]
msgs = []
# acutally do the loading
for loader in loaders:
try:
loader()
# we set this env var, since IPython also looks for it
os.environ['QT_API'] = QT_API
QtCore = sys.modules[__name__ + '.QtCore']
QtGui = sys.modules[__name__ + '.QtGui']
break
except __HOLE__ as e:
msgs.append(str(e))
pass
else:
raise ImportError("Could not find a suitable QT installation."
" Encountered the following errors: %s" %
'\n'.join(msgs))
# We patch this only now, once QtCore and QtGui are defined
if is_pyside() or is_pyqt4():
patch_qcombobox()
# For PySide, we need to create a loadUi function
if is_pyside():
patch_loadui()
|
ImportError
|
dataset/ETHPy150Open glue-viz/glue/glue/external/qt.py/reload_qt
|
4,412 |
def patch_loadui():
# In PySide, loadUi does not exist, so we define it using QUiLoader, and
# then make sure we expose that function. This is based on the solution at
#
# https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# which was released under the MIT license:
#
# Copyright (c) 2011 Sebastian Wiesner <[email protected]>
# Modifications by Charl Botha <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# This version includes further changes.
from PySide.QtCore import Slot, QMetaObject
from PySide.QtUiTools import QUiLoader
from PySide.QtGui import QApplication, QMainWindow, QMessageBox
class UiLoader(QUiLoader):
"""
Subclass of :class:`~PySide.QtUiTools.QUiLoader` to create the user
interface in a base instance.
Unlike :class:`~PySide.QtUiTools.QUiLoader` itself this class does not
create a new instance of the top-level widget, but creates the user
interface in an existing instance of the top-level class if needed.
This mimics the behaviour of :func:`PyQt4.uic.loadUi`.
"""
def __init__(self, baseinstance, customWidgets=None):
"""
Create a loader for the given ``baseinstance``.
The user interface is created in ``baseinstance``, which must be an
instance of the top-level class in the user interface to load, or a
subclass thereof.
``customWidgets`` is a dictionary mapping from class name to class
object for custom widgets. Usually, this should be done by calling
registerCustomWidget on the QUiLoader, but with PySide 1.1.2 on
Ubuntu 12.04 x86_64 this causes a segfault.
``parent`` is the parent object of this loader.
"""
QUiLoader.__init__(self, baseinstance)
self.baseinstance = baseinstance
self.customWidgets = customWidgets
def createWidget(self, class_name, parent=None, name=''):
"""
Function that is called for each widget defined in ui file,
overridden here to populate baseinstance instead.
"""
if parent is None and self.baseinstance:
# supposed to create the top-level widget, return the base
# instance instead
return self.baseinstance
else:
# For some reason, Line is not in the list of available
# widgets, but works fine, so we have to special case it here.
if class_name in self.availableWidgets() or class_name == 'Line':
# create a new widget for child widgets
widget = QUiLoader.createWidget(self, class_name, parent, name)
else:
# if not in the list of availableWidgets, must be a custom
# widget this will raise KeyError if the user has not
# supplied the relevant class_name in the dictionary, or
# TypeError, if customWidgets is None
try:
widget = self.customWidgets[class_name](parent)
except (TypeError, __HOLE__) as e:
raise Exception('No custom widget ' + class_name + ' '
'found in customWidgets')
if self.baseinstance:
# set an attribute for the new child widget on the base
# instance, just like PyQt4.uic.loadUi does.
setattr(self.baseinstance, name, widget)
return widget
def loadUi(uifile, baseinstance=None, customWidgets=None,
workingDirectory=None):
"""
Dynamically load a user interface from the given ``uifile``.
``uifile`` is a string containing a file name of the UI file to load.
If ``baseinstance`` is ``None``, the a new instance of the top-level
widget will be created. Otherwise, the user interface is created within
the given ``baseinstance``. In this case ``baseinstance`` must be an
instance of the top-level widget class in the UI file to load, or a
subclass thereof. In other words, if you've created a ``QMainWindow``
interface in the designer, ``baseinstance`` must be a ``QMainWindow``
or a subclass thereof, too. You cannot load a ``QMainWindow`` UI file
with a plain :class:`~PySide.QtGui.QWidget` as ``baseinstance``.
``customWidgets`` is a dictionary mapping from class name to class
object for custom widgets. Usually, this should be done by calling
registerCustomWidget on the QUiLoader, but with PySide 1.1.2 on Ubuntu
12.04 x86_64 this causes a segfault.
:method:`~PySide.QtCore.QMetaObject.connectSlotsByName()` is called on
the created user interface, so you can implemented your slots according
to its conventions in your widget class.
Return ``baseinstance``, if ``baseinstance`` is not ``None``. Otherwise
return the newly created instance of the user interface.
"""
loader = UiLoader(baseinstance, customWidgets)
if workingDirectory is not None:
loader.setWorkingDirectory(workingDirectory)
widget = loader.load(uifile)
QMetaObject.connectSlotsByName(widget)
return widget
import PySide
PySide.loadUi = loadUi
# Now load default Qt
|
KeyError
|
dataset/ETHPy150Open glue-viz/glue/glue/external/qt.py/patch_loadui
|
4,413 |
def code_changed():
global _mtimes, _win
filenames = []
for m in list(sys.modules.values()):
try:
filenames.append(m.__file__)
except __HOLE__:
pass
for filename in filenames + _error_files:
if not filename:
continue
if filename.endswith(".pyc") or filename.endswith(".pyo"):
filename = filename[:-1]
if filename.endswith("$py.class"):
filename = filename[:-9] + ".py"
if not os.path.exists(filename):
continue # File might be in an egg, so it can't be reloaded.
stat = os.stat(filename)
mtime = stat.st_mtime
if _win:
mtime -= stat.st_ctime
if filename not in _mtimes:
_mtimes[filename] = mtime
continue
if mtime != _mtimes[filename]:
_mtimes = {}
try:
del _error_files[_error_files.index(filename)]
except ValueError:
pass
return True
return False
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/utils/autoreload.py/code_changed
|
4,414 |
def check_errors(fn):
def wrapper(*args, **kwargs):
try:
fn(*args, **kwargs)
except (ImportError, IndentationError, __HOLE__, SyntaxError,
TypeError, AttributeError):
et, ev, tb = sys.exc_info()
if getattr(ev, 'filename', None) is None:
# get the filename from the last item in the stack
filename = traceback.extract_tb(tb)[-1][0]
else:
filename = ev.filename
if filename not in _error_files:
_error_files.append(filename)
raise
return wrapper
|
NameError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/utils/autoreload.py/check_errors
|
4,415 |
def python_reloader(main_func, args, kwargs):
if os.environ.get("RUN_MAIN") == "true":
thread.start_new_thread(main_func, args, kwargs)
try:
reloader_thread()
except KeyboardInterrupt:
pass
else:
try:
exit_code = restart_with_reloader()
if exit_code < 0:
os.kill(os.getpid(), -exit_code)
else:
sys.exit(exit_code)
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/utils/autoreload.py/python_reloader
|
4,416 |
def __init__(self, host='localhost', port=6379, password=None,
default_timeout=300, key_prefix=None):
BaseCache.__init__(self, default_timeout)
if isinstance(host, basestring):
try:
import redis
except __HOLE__:
raise RuntimeError('no redis module found')
self._client = redis.Redis(host=host, port=port, password=password)
else:
self._client = host
self.key_prefix = key_prefix or ''
|
ImportError
|
dataset/ETHPy150Open jojoin/cutout/cutout/cache/rediscache.py/RedisCache.__init__
|
4,417 |
def load_object(self, value):
"""The reversal of :meth:`dump_object`. This might be callde with
None.
"""
if value is None:
return None
if value.startswith('!'):
return pickle.loads(value[1:])
try:
return int(value)
except __HOLE__:
# before 0.8 we did not have serialization. Still support that.
return value
|
ValueError
|
dataset/ETHPy150Open jojoin/cutout/cutout/cache/rediscache.py/RedisCache.load_object
|
4,418 |
def _watch_workers(self, check_interval=5):
keep_running = True
while keep_running:
self._start_workers()
try:
try:
sleep(check_interval)
self._reap_workers()
except self.Stop:
logger.debug('Waiting for all workers to exit')
keep_running = False
self._reap_workers(True)
except __HOLE__, e:
if e.errno != errno.ECHILD:
logger.exception('Failed to wait for any worker to exit')
else:
logger.debug('No alive workers left')
|
OSError
|
dataset/ETHPy150Open momyc/gevent-fastcgi/gevent_fastcgi/server.py/FastCGIServer._watch_workers
|
4,419 |
def _kill_workers(self, kill_timeout=2):
for pid, sig in self._killing_sequence(kill_timeout):
try:
logger.debug(
'Killing worker {0} with signal {1}'.format(pid, sig))
os.kill(pid, sig)
except __HOLE__, x:
if x.errno == errno.ESRCH:
logger.error('Worker with pid {0} not found'.format(pid))
if pid in self._workers:
self._workers.remove(pid)
elif x.errno == errno.ECHILD:
logger.error('No alive workers left')
self._workers = []
break
else:
logger.exception(
'Failed to kill worker {0} with signal {1}'.format(
pid, sig))
|
OSError
|
dataset/ETHPy150Open momyc/gevent-fastcgi/gevent_fastcgi/server.py/FastCGIServer._kill_workers
|
4,420 |
def _remove_socket_file(self):
socket_file = self.__dict__.pop('_socket_file', None)
if socket_file:
try:
logger.debug('Removing socket-file {0}'.format(socket_file))
os.unlink(socket_file)
except __HOLE__:
logger.exception(
'Failed to remove socket file {0}'
.format(socket_file))
|
OSError
|
dataset/ETHPy150Open momyc/gevent-fastcgi/gevent_fastcgi/server.py/FastCGIServer._remove_socket_file
|
4,421 |
def _align_to_newline(self):
"Aligns the file object's position to the next newline."
fo, bsize = self._file_obj, self._blocksize
cur, total_read = '', 0
cur_pos = fo.tell()
while '\n' not in cur:
cur = fo.read(bsize)
total_read += bsize
try:
newline_offset = cur.index('\n') + total_read - bsize
except __HOLE__:
raise # TODO: seek to end?
fo.seek(cur_pos + newline_offset)
|
ValueError
|
dataset/ETHPy150Open mahmoud/boltons/boltons/jsonutils.py/JSONLIterator._align_to_newline
|
4,422 |
def _main():
import sys
if '-h' in sys.argv or '--help' in sys.argv:
print('loads one or more JSON Line files for basic validation.')
return
verbose = False
if '-v' in sys.argv or '--verbose' in sys.argv:
verbose = True
file_count, obj_count = 0, 0
filenames = sys.argv[1:]
for filename in filenames:
if filename in ('-h', '--help', '-v', '--verbose'):
continue
file_count += 1
with open(filename, 'rb') as file_obj:
iterator = JSONLIterator(file_obj)
cur_obj_count = 0
while 1:
try:
next(iterator)
except __HOLE__:
print('error reading object #%s around byte %s in %s'
% (cur_obj_count + 1, iterator.cur_byte_pos, filename))
return
except StopIteration:
break
obj_count += 1
cur_obj_count += 1
if verbose and obj_count and obj_count % 100 == 0:
sys.stdout.write('.')
if obj_count % 10000:
sys.stdout.write('%s\n' % obj_count)
if verbose:
print('files checked: %s' % file_count)
print('objects loaded: %s' % obj_count)
return
|
ValueError
|
dataset/ETHPy150Open mahmoud/boltons/boltons/jsonutils.py/_main
|
4,423 |
def most_recent(self):
"""
Returns the most recent copy of the instance available in the history.
"""
if not self.instance:
raise TypeError("Can't use most_recent() without a %s instance." %
self.model._meta.object_name)
tmp = []
for field in self.instance._meta.fields:
if isinstance(field, models.ForeignKey):
tmp.append(field.name + "_id")
else:
tmp.append(field.name)
fields = tuple(tmp)
try:
values = self.get_queryset().values_list(*fields)[0]
except __HOLE__:
raise self.instance.DoesNotExist("%s has no historical record." %
self.instance._meta.object_name)
return self.instance.__class__(*values)
|
IndexError
|
dataset/ETHPy150Open treyhunner/django-simple-history/simple_history/manager.py/HistoryManager.most_recent
|
4,424 |
def as_of(self, date):
"""Get a snapshot as of a specific date.
Returns an instance, or an iterable of the instances, of the
original model with all the attributes set according to what
was present on the object on the date provided.
"""
if not self.instance:
return self._as_of_set(date)
queryset = self.get_queryset().filter(history_date__lte=date)
try:
history_obj = queryset[0]
except __HOLE__:
raise self.instance.DoesNotExist(
"%s had not yet been created." %
self.instance._meta.object_name)
if history_obj.history_type == '-':
raise self.instance.DoesNotExist(
"%s had already been deleted." %
self.instance._meta.object_name)
return history_obj.instance
|
IndexError
|
dataset/ETHPy150Open treyhunner/django-simple-history/simple_history/manager.py/HistoryManager.as_of
|
4,425 |
def codepoints_to_string(xc, p, contextItem, args):
if len(args) != 1: raise XPathContext.FunctionNumArgs()
try:
return ''.join(chr(c) for c in args[0])
except __HOLE__:
XPathContext.FunctionArgType(1,"xs:integer*")
|
TypeError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/codepoints_to_string
|
4,426 |
def substring_functions(xc, args, contains=None, startEnd=None, beforeAfter=None):
if len(args) == 3: raise fnFunctionNotAvailable()
if len(args) != 2: raise XPathContext.FunctionNumArgs()
string = stringArg(xc, args, 0, "xs:string?")
portion = stringArg(xc, args, 1, "xs:string")
if contains == True:
return portion in string
elif startEnd == True:
return string.startswith(portion)
elif startEnd == False:
return string.endswith(portion)
elif beforeAfter is not None:
if portion == '': return ''
try:
if beforeAfter: return string.partition( portion )[0]
else: return string.rpartition( portion )[2]
except __HOLE__:
return ''
raise fnFunctionNotAvailable() # wrong arguments?
|
ValueError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/substring_functions
|
4,427 |
def avg(xc, p, contextItem, args):
if len(args) != 1: raise XPathContext.FunctionNumArgs()
addends = xc.atomize( p, args[0] )
try:
l = len(addends)
if l == 0:
return () # xpath allows empty sequence argument
hasFloat = False
hasDecimal = False
for a in addends:
if math.isnan(a) or math.isinf(a):
return NaN
if isinstance(a, float):
hasFloat = True
elif isinstance(a, Decimal):
hasDecimal = True
if hasFloat and hasDecimal: # promote decimals to float
addends = [float(a) if isinstance(a, Decimal) else a
for a in addends]
return sum( addends ) / len( args[0] )
except __HOLE__:
raise XPathContext.FunctionArgType(1,"sumable values", addends, errCode='err:FORG0001')
|
TypeError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/avg
|
4,428 |
def fn_max(xc, p, contextItem, args):
if len(args) != 1: raise XPathContext.FunctionNumArgs()
comparands = xc.atomize( p, args[0] )
try:
if len(comparands) == 0:
return () # xpath allows empty sequence argument
if any(isinstance(c, float) and math.isnan(c) for c in comparands):
return NaN
return max( comparands )
except __HOLE__:
raise XPathContext.FunctionArgType(1,"comparable values", comparands, errCode='err:FORG0001')
|
TypeError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/fn_max
|
4,429 |
def fn_min(xc, p, contextItem, args):
if len(args) != 1: raise XPathContext.FunctionNumArgs()
comparands = xc.atomize( p, args[0] )
try:
if len(comparands) == 0:
return () # xpath allows empty sequence argument
if any(isinstance(c, float) and math.isnan(c) for c in comparands):
return NaN
return min( comparands )
except __HOLE__:
raise XPathContext.FunctionArgType(1,"comparable values", comparands, errCode='err:FORG0001')
|
TypeError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/fn_min
|
4,430 |
def fn_sum(xc, p, contextItem, args):
if len(args) != 1: raise XPathContext.FunctionNumArgs()
addends = xc.atomize( p, args[0] )
try:
if len(addends) == 0:
return 0 # xpath allows empty sequence argument
hasFloat = False
hasDecimal = False
for a in addends:
if math.isnan(a):
return NaN
if isinstance(a, float):
hasFloat = True
elif isinstance(a, Decimal):
hasDecimal = True
if hasFloat and hasDecimal: # promote decimals to float
addends = [float(a) if isinstance(a, Decimal) else a
for a in addends]
return sum( addends )
except __HOLE__:
raise XPathContext.FunctionArgType(1,"summable sequence", addends, errCode='err:FORG0001')
|
TypeError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/fn_sum
|
4,431 |
def format_number(xc, p, args):
if len(args) != 2: raise XPathContext.FunctionNumArgs()
value = numericArg(xc, p, args, 0, missingArgFallback='NaN', emptyFallback='NaN')
picture = stringArg(xc, args, 1, "xs:string", missingArgFallback='', emptyFallback='')
try:
return format_picture(xc.modelXbrl.locale, value, picture)
except __HOLE__ as err:
raise XPathContext.XPathException(p, 'err:FODF1310', str(err) )
|
ValueError
|
dataset/ETHPy150Open Arelle/Arelle/arelle/FunctionFn.py/format_number
|
4,432 |
def _request(self, endpoint, method, data=None, **kwargs):
"""
Method to hanle both GET and POST requests.
:param endpoint: Endpoint of the API.
:param method: Method of HTTP request.
:param data: POST DATA for the request.
:param kwargs: Other keyword arguments.
:return: Response for the request.
"""
final_url = self.url + endpoint
if not self._is_authenticated:
raise LoginRequired
rq = self.session
if method == 'get':
request = rq.get(final_url, **kwargs)
else:
request = rq.post(final_url, data, **kwargs)
request.raise_for_status()
request.encoding = 'utf_8'
if len(request.text) == 0:
data = json.loads('{}')
else:
try:
data = json.loads(request.text)
except __HOLE__:
data = request.text
return data
|
ValueError
|
dataset/ETHPy150Open v1k45/python-qBittorrent/qbittorrent/client.py/Client._request
|
4,433 |
def popd(self):
try:
path = self._dirstack.pop()
except __HOLE__:
return None
else:
os.chdir(path)
return path
|
IndexError
|
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/WindowsServer.py/Win32Agent.popd
|
4,434 |
def poll(self, pid):
"""Poll for async process. Returns exitstatus if done."""
try:
proc = self._procs[pid]
except __HOLE__:
return -errno.ENOENT
if proc.poll() is None:
return -errno.EAGAIN
else:
del self._procs[pid]
return proc.exitstatus
|
KeyError
|
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/WindowsServer.py/Win32Agent.poll
|
4,435 |
def kill(self, pid):
"""Kills a process that was started by run_async."""
try:
proc = self._procs.pop(pid)
except __HOLE__:
return -errno.ENOENT
else:
proc.kill()
sts = proc.wait()
return sts
|
KeyError
|
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/WindowsServer.py/Win32Agent.kill
|
4,436 |
def _get_home(self):
try: # F&*#!&@ windows
HOME = os.environ['USERPROFILE']
except __HOLE__:
try:
HOME = os.path.join(os.environ["HOMEDRIVE"], os.environ["HOMEPATH"])
except KeyError:
HOME = "C:\\"
return HOME
|
KeyError
|
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/remote/WindowsServer.py/Win32Agent._get_home
|
4,437 |
def __str__(self):
result = ["GPD Setting"]
if self.setting_id in GPDID.types:
result.append("%s" % GPDID.types[self.setting_id])
else:
result.append(hex(self.setting_id))
if self.content_id > 0 and self.content_id < 6:
try:
if GPDID.types[self.setting_id] == 'Region':
result.append(GamerTagConstants.Region[self.data])
elif GPDID.types[self.setting_id] == 'GamerZone':
result.append(GamerTagConstants.GamerZone[self.data])
else:
result.append(str(self.data))
except __HOLE__:
result.append(str(self.data))
if self.content_id == 7:
result.append(time.ctime(xboxtime.filetime2unixtime(self.data)))
return " ".join(result)
|
KeyError
|
dataset/ETHPy150Open arkem/py360/py360/xdbf.py/Setting.__str__
|
4,438 |
def run(self):
if os.getcwd() != self.cwd:
raise DistutilsSetupError("Must be in package root!")
self._verify_version()
self._verify_tag()
self.execute(os.system, ('git2cl > ChangeLog',))
if not self.skip_tests:
self.run_command('check')
try:
self.run_command('test')
except __HOLE__ as e:
if e.code != 0:
raise DistutilsSetupError("There were test failures!")
self.run_command('sdist')
if self.pypi:
cmd_obj = self.distribution.get_command_obj('upload')
cmd_obj.sign = True
if self.keyid:
cmd_obj.identity = self.keyid
self.run_command('upload')
self._sign()
self._tag()
self._publish()
self.announce("Release complete! Don't forget to:", log.INFO)
self.announce("")
self.announce(" git push && git push --tags", log.INFO)
self.announce("")
|
SystemExit
|
dataset/ETHPy150Open Yubico/python-yubico/release.py/release.run
|
4,439 |
def __init__(self, **kwargs):
for name in dir(self):
value = getattr(self, name)
if isinstance(value, Factory):
setattr(self, name, value())
for name, value in kwargs.iteritems():
try:
attribute = getattr(self, name)
except __HOLE__:
attribute = None
if isinstance(attribute, PersistentList):
attribute.extend(value)
elif isinstance(attribute, (PersistentMapping, OOBTree)):
attribute.update(value)
else:
setattr(self, name, value)
|
AttributeError
|
dataset/ETHPy150Open disqus/playa/playa/ext/zodb.py/Model.__init__
|
4,440 |
def buildClusterSpliceGraph(c, alt5, alt3):
"""use exon/splice start and end positions to build splice graph
for a cluster c. Also finds exons that share same start (but differ
at end: alt5), or share the same end (but differ at start: alt3)."""
start = {}
end = {}
none = []
for e in c.exons:
if e.genomic_start not in start:
start[e.genomic_start] = []
start[e.genomic_start].append(e)
if e.genomic_end not in end:
end[e.genomic_end] = []
end[e.genomic_end].append(e)
for s in c.splices:
try:
exons1 = end[s.ver_gen_start]
except KeyError:
exons1 = none
try:
exons2 = start[s.ver_gen_end]
except __HOLE__:
exons2 = none
for e1 in exons1:
for e2 in exons2:
e1.next[e2] = s # SAVE SPLICE AS EDGE INFO...
s.exons = (e1, e2) # SAVE EXONS DIRECTLY ON THE SPLICE OBJECT
for exons in start.values():
for e1 in exons:
for e2 in exons:
if e1 != e2:
alt5 += e1
alt5 += e2
e1.alt5 += e2
e2.alt5 += e1
for exons in end.values():
for e1 in exons:
for e2 in exons:
if e1 != e2:
alt3 += e1
alt3 += e2
e1.alt3 += e2
e2.alt3 += e1
|
KeyError
|
dataset/ETHPy150Open cjlee112/pygr/pygr/apps/splicegraph.py/buildClusterSpliceGraph
|
4,441 |
def __getattr__(self, attr):
'both parent classes have getattr, so have to call them both...'
try:
return TupleO.__getattr__(self, attr)
except __HOLE__:
return SeqPath.__getattr__(self, attr)
|
AttributeError
|
dataset/ETHPy150Open cjlee112/pygr/pygr/apps/splicegraph.py/ExonForm.__getattr__
|
4,442 |
def loadSpliceGraph(jun03, cluster_t, exon_t, splice_t, genomic_seq_t,
mrna_seq_t=None, protein_seq_t=None, loadAll=True):
"""
Build a splice graph from the specified SQL tables representing gene
clusters, exon forms, and splices. Each table must be specified
as a DB.TABLENAME string.
These tables are loaded into memory.
The splice graph is built based on exact match of exon and splice ends.
In addition, also builds alt5Graph (exons that match at start but differ
at end) and alt3Graph (exons that match at end but differ at start).
Loads all cluster, exon and splice data if loadAll is True.
Returns tuple: clusters, exons, splices, spliceGraph, alt5Graph, alt3Graph
"""
# CREATE OUR GRAPHS
clusterExons = dictGraph()
clusterSplices = dictGraph()
spliceGraph = dictGraph()
alt5 = dictGraph()
alt3 = dictGraph()
class YiGenomicSequence(DNASQLSequence):
def __len__(self):
return self._select('length(seq)') # USE SEQ LENGTH FROM DATABASE
g = jun03[genomic_seq_t]
# Force genomic seq table to use transparent access
g.objclass(YiGenomicSequence)
# Only process this if provided an mRNA table by the user.
if mrna_seq_t is not None:
mrna = jun03[mrna_seq_t]
# Force mRNA seq table to use transparent access.
mrna.objclass(SQLSequence)
else:
mrna = None
# Only process this if provided a protein table by the user.
if protein_seq_t is not None:
class YiProteinSQLSequence(ProteinSQLSequence):
def __len__(self):
return self.protein_length # USE SEQ LENGTH FROM DATABASE
protein = jun03[protein_seq_t]
# Force protein seq table to use transparent access
protein.objclass(YiProteinSQLSequence)
# Alias 'protein_seq' to appear as 'seq'
protein.addAttrAlias(seq='protein_seq')
else:
protein = None
exon_forms = jun03[exon_t]
ExonForm.__class_schema__ = SchemaDict(((spliceGraph, 'next'),
(alt5, 'alt5'), (alt3, 'alt3')))
# Bind this class to container as the one to use as "row objects".
exon_forms.objclass(ExonForm)
if loadAll:
print 'Loading %s...' % exon_forms
exon_forms.load(ExonForm)
clusters = jun03[cluster_t]
class Cluster(TupleO):
__class_schema__ = SchemaDict(((clusterExons, 'exons'),
(clusterSplices, 'splices')))
# Bind this class to container as the one to use as "row objects".
clusters.objclass(Cluster)
if loadAll:
print 'Loading %s...' % clusters
clusters.load(Cluster)
splices = jun03[splice_t]
# Bind this class to container as the one to use as "row objects".
splices.objclass(Splice)
if loadAll:
print 'Loading %s...' % splices
splices.load(Splice)
## print 'Saving alignment of protein to mrna isoforms...'
## mrna_protein=PathMapping2()
## for form_id in protein:
## p=protein[form_id]
## m=mrna[form_id]
## start=3*(p.mRNA_start-1)+int(p.reading_frame)
## end=start+3*p.protein_length
## mrna_protein[p]=m[start:end]
print 'Adding clusters to graph...'
for c in clusters.values(): # ADD CLUSTERS AS NODES TO GRAPH
clusterExons+=c
clusterSplices+=c
print 'Adding exons to graph...'
for e in exon_forms.values():
c=clusters[e.cluster_id]
try:
c.exons+=e
spliceGraph+=e
except __HOLE__:
pass # BAD EXON: EMPTY SEQUENCE INTERVAL... IGNORE IT
print 'Adding splices to graph...'
for s in splices.values():
try:
c=clusters[s.cluster_id]
except KeyError: # WIERD, ONE SPLICE WITH BLANK (NOT NULL) VALUE!
pass
else:
c.splices+=s
print 'Building splice graph...'
for c in clusters.values():
buildClusterSpliceGraph(c, alt5, alt3)
return clusters, exon_forms, splices, g, spliceGraph, alt5, alt3, mrna,\
protein, clusterExons, clusterSplices
|
IndexError
|
dataset/ETHPy150Open cjlee112/pygr/pygr/apps/splicegraph.py/loadSpliceGraph
|
4,443 |
def emit(self, record):
if record.levelno <= logging.ERROR and self.can_record(record):
request = None
exc_info = None
for frame_info in getouterframes(currentframe()):
frame = frame_info[0]
if not request:
request = frame.f_locals.get('request', None)
if not request:
view = frame.f_locals.get('self', None)
try:
request = getattr(view, 'request', None)
except RuntimeError:
request = None
if not exc_info:
exc_info = frame.f_locals.get('exc_info', None)
if not hasattr(exc_info, '__getitem__'):
exc_info = None
if request and exc_info:
break
if exc_info:
record.exc_info = exc_info
record.stack = \
iter_stack_frames(getinnerframes(exc_info[2]))
if request:
try:
body_pos = request.stdin.tell()
request.stdin.seek(0)
body = request.stdin.read()
request.stdin.seek(body_pos)
http = dict(headers=request.environ,
url=request.getURL(),
method=request.method,
host=request.environ.get('REMOTE_ADDR',
''), data=body)
if 'HTTP_USER_AGENT' in http['headers']:
if 'User-Agent' not in http['headers']:
http['headers']['User-Agent'] = \
http['headers']['HTTP_USER_AGENT']
if 'QUERY_STRING' in http['headers']:
http['query_string'] = http['headers']['QUERY_STRING']
setattr(record, 'request', http)
user = request.get('AUTHENTICATED_USER', None)
if user is not None and user != nobody:
user_dict = {
'id': user.getId(),
'email': user.getProperty('email') or '',
}
else:
user_dict = {}
setattr(record, 'user', user_dict)
except (AttributeError, __HOLE__):
logger.warning('Could not extract data from request', exc_info=True)
return super(ZopeSentryHandler, self).emit(record)
|
KeyError
|
dataset/ETHPy150Open getsentry/raven-python/raven/contrib/zope/__init__.py/ZopeSentryHandler.emit
|
4,444 |
def parse(self, extensions=None, keywords=None, compressed=False):
"""Returns the FITS file header(s) in a readable format.
Parameters
----------
extensions : list of int or str, optional
Format only specific HDU(s), identified by number or name.
The name can be composed of the "EXTNAME" or "EXTNAME,EXTVER"
keywords.
keywords : list of str, optional
Keywords for which the value(s) should be returned.
If not specified, then the entire header is returned.
compressed : boolean, optional
If True, shows the header describing the compression, rather than
the header obtained after decompression. (Affects FITS files
containing `CompImageHDU` extensions only.)
Returns
-------
formatted_header : str or astropy.table.Table
Traditional 80-char wide format in the case of `HeaderFormatter`;
an Astropy Table object in the case of `TableHeaderFormatter`.
"""
# `hdukeys` will hold the keys of the HDUList items to display
if extensions is None:
hdukeys = range(len(self._hdulist)) # Display all by default
else:
hdukeys = []
for ext in extensions:
try:
# HDU may be specified by number
hdukeys.append(int(ext))
except __HOLE__:
# The user can specify "EXTNAME" or "EXTNAME,EXTVER"
parts = ext.split(',')
if len(parts) > 1:
extname = ','.join(parts[0:-1])
extver = int(parts[-1])
hdukeys.append((extname, extver))
else:
hdukeys.append(ext)
# Having established which HDUs the user wants, we now format these:
return self._parse_internal(hdukeys, keywords, compressed)
|
ValueError
|
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/scripts/fitsheader.py/HeaderFormatter.parse
|
4,445 |
def _get_cards(self, hdukey, keywords, compressed):
"""Returns a list of `pyfits.card.Card` objects.
This function will return the desired header cards, taking into
account the user's preference to see the compressed or uncompressed
version.
Parameters
----------
hdukey : int or str
Key of a single HDU in the HDUList.
keywords : list of str, optional
Keywords for which the cards should be returned.
compressed : boolean, optional
If True, shows the header describing the compression.
Raises
------
ExtensionNotFoundException
If the hdukey does not correspond to an extension.
"""
# First we obtain the desired header
try:
if compressed:
# In the case of a compressed image, return the header before
# decompression (not the default behavior)
header = self._hdulist[hdukey]._header
else:
header = self._hdulist[hdukey].header
except (__HOLE__, KeyError):
message = '{0}: Extension {1} not found.'.format(self.filename,
hdukey)
log.warning(message)
raise ExtensionNotFoundException(message)
if not keywords: # return all cards
cards = header.cards
else: # specific keywords are requested
cards = []
for kw in keywords:
try:
crd = header.cards[kw]
if isinstance(crd, pyfits.card.Card): # Single card
cards.append(crd)
else: # Allow for wildcard access
cards.extend(crd)
except KeyError as e: # Keyword does not exist
log.warning('{filename} (HDU {hdukey}): '
'Keyword {kw} not found.'.format(
filename=self.filename,
hdukey=hdukey,
kw=kw))
return cards
|
IndexError
|
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/scripts/fitsheader.py/HeaderFormatter._get_cards
|
4,446 |
def print_headers_traditional(args):
"""Prints FITS header(s) using the traditional 80-char format.
Parameters
----------
args : argparse.Namespace
Arguments passed from the command-line as defined below.
"""
for idx, filename in enumerate(args.filename): # support wildcards
if idx > 0 and not args.keywords:
print() # print a newline between different files
try:
formatter = HeaderFormatter(filename)
print(formatter.parse(args.extensions,
args.keywords,
args.compressed), end='')
except __HOLE__ as e:
log.error(str(e))
|
IOError
|
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/scripts/fitsheader.py/print_headers_traditional
|
4,447 |
def main(args=None):
"""This is the main function called by the `fitsheader` script."""
parser = argparse.OptionParser(
description=('Print the header(s) of a FITS file. '
'Optional arguments allow the desired extension(s), '
'keyword(s), and output format to be specified. '
'Note that in the case of a compressed image, '
'the decompressed header is shown by default.'))
parser.add_option('-e', '--ext', metavar='HDU',
action='append', dest='extensions',
help='specify the extension by name or number; '
'this argument can be repeated '
'to select multiple extensions')
parser.add_option('-k', '--keyword', metavar='KEYWORD',
action='append', dest='keywords',
help='specify a keyword; this argument can be '
'repeated to select multiple keywords; '
'also supports wildcards')
parser.add_option('-c', '--compressed', action='store_true',
help='for compressed image data, '
'show the true header which describes '
'the compression rather than the data')
parser.add_option('filename', nargs='+',
help='path to one or more files; '
'wildcards are supported')
args = parser.parse_args(args)
# Now print the desired headers
try:
print_headers_traditional(args)
except __HOLE__ as e:
# A 'Broken pipe' IOError may occur when stdout is closed prematurely,
# eg. when calling `fitsheader file.fits | head`. We let this pass.
pass
|
IOError
|
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/scripts/fitsheader.py/main
|
4,448 |
def encode_str(s):
try:
s = s.encode('utf-8')
except __HOLE__:
s = s.decode('iso-8859-1').encode('utf-8')
return s
|
UnicodeDecodeError
|
dataset/ETHPy150Open gwu-libraries/launchpad/lp/ui/templatetags/launchpad_extras.py/encode_str
|
4,449 |
def get_filter_args(argstring, keywords=(), intargs=(), boolargs=(), stripquotes=False):
"""Convert a string formatted list of arguments into a kwargs dictionary.
Automatically converts all keywords in intargs to integers.
If keywords is not empty, then enforces that only those keywords are returned.
Also handles args, which are just elements without an equal sign
ex:
in: get_filter_kwargs('length=10,format=medium', ('length'))
out: (), {'length' : 10, 'format' : 'medium'}
"""
args = []
kwargs = {}
if argstring:
work = [x.strip() for x in argstring.split(',')]
work = [x for x in work if x != '']
for elt in work:
parts = elt.split('=', 1)
if len(parts) == 1:
if stripquotes:
elt=_stripquotes(elt)
args.append(elt)
else:
key, val = parts
val = val.strip()
if stripquotes and val:
val=_stripquotes(val)
key = key.strip()
if not key: continue
key = key.lower().encode('ascii')
if not keywords or key in keywords:
if key in intargs:
try:
val = int(val)
except __HOLE__:
raise ValueError('Could not convert value "%s" to integer for keyword "%s"' % (val, key))
if key in boolargs:
val = val.lower()
val = val in (1, 't', 'true', 'yes', 'y', 'on')
kwargs[key] = val
return args, kwargs
|
ValueError
|
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/satchmo_utils/templatetags/__init__.py/get_filter_args
|
4,450 |
def collect_report(self):
indexes = []
for app in settings.INSTALLED_APPS:
try:
mod = importlib.import_module(app)
except __HOLE__:
warnings.warn('Installed app %s is not an importable Python module and will be ignored' % app)
continue
try:
report_module = importlib.import_module("%s.reports" % app)
except ImportError:
if module_has_submodule(mod, 'reports'):
raise
continue
for item_name, item in inspect.getmembers(report_module,
inspect.isclass):
if getattr(item, 'get_queryset', None) and item_name != "Mining":
indexes.append(item().get_queryset())
elif getattr(item, 'model', None) and item_name != "Mining":
indexes.append(item().model.objects.all())
return indexes
|
ImportError
|
dataset/ETHPy150Open mining/django-report/report/management/commands/update_report.py/Command.collect_report
|
4,451 |
def __init__(self, *args, **kwargs):
super(YearMonthForm, self).__init__(*args, **kwargs)
now = datetime.datetime.now()
this_year = now.year
this_month = now.month
try:
first_entry = Entry.no_join.values('end_time')\
.order_by('end_time')[0]
except __HOLE__:
first_year = this_year
else:
first_year = first_entry['end_time'].year
years = [(year, year) for year in range(first_year, this_year + 1)]
self.fields['year'].choices = years
initial = kwargs.get('initial')
if initial:
this_year = initial.get('year', this_year)
this_month = initial.get('month', this_month)
self.fields['year'].initial = this_year
self.fields['month'].initial = this_month
|
IndexError
|
dataset/ETHPy150Open caktus/django-timepiece/timepiece/forms.py/YearMonthForm.__init__
|
4,452 |
def _ensure_element(tup, elem):
"""
Create a tuple containing all elements of tup, plus elem.
Returns the new tuple and the index of elem in the new tuple.
"""
try:
return tup, tup.index(elem)
except __HOLE__:
return tuple(chain(tup, (elem,))), len(tup)
|
ValueError
|
dataset/ETHPy150Open quantopian/zipline/zipline/pipeline/expression.py/_ensure_element
|
4,453 |
def _start_server(self, callback, workers):
"""Run a given service.
:param callback: callback that will start the required service
:param workers: number of service workers
:returns: list of spawned workers' pids
"""
self.workers = workers
# Fork a new process in which server will be started
pid = os.fork()
if pid == 0:
status = 0
try:
callback(workers)
except __HOLE__ as exc:
status = exc.code
except BaseException:
traceback.print_exc()
status = 2
# Really exit
os._exit(status)
self.service_pid = pid
# If number of workers is 1 it is assumed that we run
# a service in the current process.
if self.workers > 1:
# Wait at most 10 seconds to spawn workers
condition = lambda: self.workers == len(self._get_workers())
utils.wait_until_true(
condition, timeout=10, sleep=0.1,
exception=RuntimeError(
"Failed to start %d workers." % self.workers))
workers = self._get_workers()
self.assertEqual(len(workers), self.workers)
return workers
# Wait for a service to start.
utils.wait_until_true(self.health_checker, timeout=10, sleep=0.1,
exception=RuntimeError(
"Failed to start service."))
return [self.service_pid]
|
SystemExit
|
dataset/ETHPy150Open openstack/neutron/neutron/tests/functional/test_server.py/TestNeutronServer._start_server
|
4,454 |
def load(self):
try:
self.file = open(self.filename, self.read_mode)
self.empty_file = False
except __HOLE__:
if self.binary:
self.file = BytesIO()
else:
self.file = StringIO()
self.empty_file = True
|
IOError
|
dataset/ETHPy150Open wq/wq.io/loaders.py/FileLoader.load
|
4,455 |
@decorator.decorator
def except_500_and_return(fn, *args, **kwargs):
def error_as_resp(args, e):
"""
Always return the error wrapped in a response object
"""
resp = ErrorResponse(e.response)
for arg in args:
if isinstance(arg, Document):
self.recorder.failure(arg, resp)
break
return resp
try:
return fn(*args, **kwargs)
except AvalaraServerException as e:
self = args[0] # the first arg is self
logged = False
try:
# but don't log the doc status error as an exception
error_as_json = json.loads(e.full_request_as_string)
if 'DocStatus' in error_as_json:
if error_as_json == 'DocStatus is invalid for this operation.':
self.logger.warning(e.full_request_as_string) # this case is not an error, just log a warning
logged = True
if not logged:
self.logger.exception(e.full_request_as_string)
return error_as_resp(args, e)
except __HOLE__: # json failed to parse
self.logger.exception(e.full_request_as_string)
return error_as_resp(args, e)
|
ValueError
|
dataset/ETHPy150Open activefrequency/pyavatax/pyavatax/api.py/except_500_and_return
|
4,456 |
@except_500_and_return
def get_tax(self, lat, lng, doc, sale_amount=None):
"""Performs a HTTP GET to tax/get/"""
if doc is not None:
if isinstance(doc, dict):
doc = Document.from_data(doc)
elif not isinstance(doc, Document) and sale_amount == None:
raise AvalaraTypeException(AvalaraException.CODE_BAD_DOC, 'Please pass a document or a dictionary to create a Document')
elif sale_amount is None:
raise AvalaraException(AvalaraException.CODE_BAD_ARGS, 'Please pass a doc argument, or sale_amount kwarg')
try:
stem = '/'.join([self.VERSION, 'tax', '%.6f,%.6f' % (lat, lng), 'get'])
except __HOLE__:
raise AvalaraTypeException(AvalaraException.CODE_LATLNG, 'Please pass lat and lng as floats, or Decimal')
data = {'saleamount': sale_amount} if sale_amount else {'saleamount': doc.total}
resp = self._get(stem, data)
self.logger.info('"GET" %s%s with: %s' % (self.url, stem, data))
self.recorder.success(doc)
return GetTaxResponse(resp)
|
TypeError
|
dataset/ETHPy150Open activefrequency/pyavatax/pyavatax/api.py/API.get_tax
|
4,457 |
@property
def _details(self):
try:
return [{m.RefersTo: m.Summary} for m in self.CancelTaxResult.Messages]
except __HOLE__: # doesn't have RefersTo
return [{m.Source: m.Summary} for m in self.CancelTaxResult.Messages]
|
AttributeError
|
dataset/ETHPy150Open activefrequency/pyavatax/pyavatax/api.py/CancelTaxResponse._details
|
4,458 |
@property
def is_success(self):
"""Returns whether or not the response was successful.
Avalara bungled this response, it is formatted differently than every other response"""
try:
return True if self.CancelTaxResult.ResultCode == BaseResponse.SUCCESS else False
except __HOLE__:
raise AvalaraException('error not applicable for this response')
|
AttributeError
|
dataset/ETHPy150Open activefrequency/pyavatax/pyavatax/api.py/CancelTaxResponse.is_success
|
4,459 |
@property
def error(self):
"""Returns a list of tuples. The first position in the tuple is
either the offending field that threw an error, or the class in
the Avalara system that threw it. The second position is a
human-readable message from Avalara.
Avalara bungled this response, it is formatted differently than every other response"""
cond = False
try:
cond = self.CancelTaxResult.ResultCode == BaseResponse.ERROR
except __HOLE__:
raise AvalaraException('error not applicable for this response')
return self._details if cond else False
|
AttributeError
|
dataset/ETHPy150Open activefrequency/pyavatax/pyavatax/api.py/CancelTaxResponse.error
|
4,460 |
def render(self, context):
"""
For date values that are tomorrow, today or yesterday compared to
present day returns representing string. Otherwise, returns an empty string.
"""
value = self.value.resolve(context)
try:
value = date(value.year, value.month, value.day)
except AttributeError:
# Passed value wasn't a date object
ret = ''
except __HOLE__:
# Date arguments out of range
ret = ''
delta = value - date.today()
if delta.days == 0:
ret = 'today'
elif delta.days == 1:
ret = 'tomorrow'
elif delta.days == -1:
ret = 'yesterday'
else:
ret = ''
context[self.var_name] = ret
return ''
|
ValueError
|
dataset/ETHPy150Open zorna/zorna/zorna/templatetags/zorna_tags.py/natural_date.render
|
4,461 |
def render(self, context):
if self.url:
context[self.var_name] = feedparser.parse(self.url)
else:
try:
context[self.var_name] = feedparser.parse(
context[self.url_var_name])
except __HOLE__:
raise template.TemplateSyntaxError, "the variable \"%s\" can't be found in the context" % self.url_var_name
for entry in context[self.var_name]['entries']:
date_published = entry.get('published', entry.get('updated'))
date_published = dateutil.parser.parse(date_published)
entry['date_published'] = date_published
return ''
|
KeyError
|
dataset/ETHPy150Open zorna/zorna/zorna/templatetags/zorna_tags.py/RssParserNode.render
|
4,462 |
@register.tag(name="get_rss")
def get_rss(parser, token):
# This version uses a regular expression to parse tag contents.
try:
# Splitting by None == splitting by spaces.
tag_name, arg = token.contents.split(None, 1)
except __HOLE__:
raise template.TemplateSyntaxError, "%r tag requires arguments" % token.contents.split()[
0]
m = re.search(r'(.*?) as (\w+)', arg)
if not m:
raise template.TemplateSyntaxError, "%r tag had invalid arguments" % tag_name
url, var_name = m.groups()
if url[0] == url[-1] and url[0] in ('"', "'"):
return RssParserNode(var_name, url=url[1:-1])
else:
return RssParserNode(var_name, url_var_name=url)
|
ValueError
|
dataset/ETHPy150Open zorna/zorna/zorna/templatetags/zorna_tags.py/get_rss
|
4,463 |
def load_backend(path):
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error importing authentication backend %s: "%s"' % (module, e))
except ValueError, e:
raise ImproperlyConfigured('Error importing authentication backends. Is AUTHENTICATION_BACKENDS a correctly defined list or tuple?')
try:
cls = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" authentication backend' % (module, attr))
try:
getattr(cls, 'supports_object_permissions')
except __HOLE__:
warn("Authentication backends without a `supports_object_permissions` attribute are deprecated. Please define it in %s." % cls,
PendingDeprecationWarning)
cls.supports_object_permissions = False
try:
getattr(cls, 'supports_anonymous_user')
except AttributeError:
warn("Authentication backends without a `supports_anonymous_user` attribute are deprecated. Please define it in %s." % cls,
PendingDeprecationWarning)
cls.supports_anonymous_user = False
return cls()
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/auth/__init__.py/load_backend
|
4,464 |
def authenticate(**credentials):
"""
If the given credentials are valid, return a User object.
"""
for backend in get_backends():
try:
user = backend.authenticate(**credentials)
except __HOLE__:
# This backend doesn't accept these credentials as arguments. Try the next one.
continue
if user is None:
continue
# Annotate the user object with the path of the backend.
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
return user
|
TypeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/auth/__init__.py/authenticate
|
4,465 |
def get_user(request):
from django.contrib.auth.models import AnonymousUser
try:
user_id = request.session[SESSION_KEY]
backend_path = request.session[BACKEND_SESSION_KEY]
backend = load_backend(backend_path)
user = backend.get_user(user_id) or AnonymousUser()
except __HOLE__:
user = AnonymousUser()
return user
|
KeyError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/auth/__init__.py/get_user
|
4,466 |
def setup(self):
class Double(Compound):
field_schema = [Integer.named(u'x'), Integer.named(u'y')]
def compose(self):
ex, ey = self.get(u'x'), self.get(u'y')
ux, uy = ex.u, ey.u
if ex.u and ey.u:
string = u"%sx%s" % (ex.u, ey.u)
else:
string = u''
if ex.value is not None and ey.value is not None:
value = (ex.value, ey.value)
else:
value = None
return string, value
def explode(self, value):
if value == u'boom':
raise AttributeError('boom')
if value == u'return-none':
return
try:
x, y = value
except (__HOLE__, ValueError):
return False
self[u'x'].set(x)
self[u'y'].set(y)
return True
self.Double = Double
|
TypeError
|
dataset/ETHPy150Open jek/flatland/tests/schema/test_compound.py/TestDoubleField.setup
|
4,467 |
def nth(n, seq):
try:
return seq[n]
except __HOLE__:
return None
except TypeError:
return next(islice(seq, n, None), None)
|
IndexError
|
dataset/ETHPy150Open Suor/funcy/funcy/seqs.py/nth
|
4,468 |
def last(seq):
try:
return seq[-1]
except __HOLE__:
return None
except TypeError:
item = None
for item in seq:
pass
return item
|
IndexError
|
dataset/ETHPy150Open Suor/funcy/funcy/seqs.py/last
|
4,469 |
def butlast(seq):
it = iter(seq)
try:
prev = next(it)
except __HOLE__:
pass
else:
for item in it:
yield prev
prev = item
|
StopIteration
|
dataset/ETHPy150Open Suor/funcy/funcy/seqs.py/butlast
|
4,470 |
def byte_FOR_ITER(self, jump):
iterobj = self.top()
try:
v = next(iterobj)
self.push(v)
except __HOLE__:
self.pop()
self.jump(jump)
|
StopIteration
|
dataset/ETHPy150Open nedbat/byterun/byterun/pyvm2.py/VirtualMachine.byte_FOR_ITER
|
4,471 |
def byte_YIELD_FROM(self):
u = self.pop()
x = self.top()
try:
if not isinstance(x, Generator) or u is None:
# Call next on iterators.
retval = next(x)
else:
retval = x.send(u)
self.return_value = retval
except __HOLE__ as e:
self.pop()
self.push(e.value)
else:
# YIELD_FROM decrements f_lasti, so that it will be called
# repeatedly until a StopIteration is raised.
self.jump(self.frame.f_lasti - 1)
# Returning "yield" prevents the block stack cleanup code
# from executing, suspending the frame in its current state.
return "yield"
## Importing
|
StopIteration
|
dataset/ETHPy150Open nedbat/byterun/byterun/pyvm2.py/VirtualMachine.byte_YIELD_FROM
|
4,472 |
def _inotify_init(self):
try:
fd = inotify_syscalls.inotify_init()
except __HOLE__, err:
self._last_errno = err.errno
return -1
return fd
|
IOError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/_INotifySyscallsWrapper._inotify_init
|
4,473 |
def _inotify_add_watch(self, fd, pathname, mask):
try:
wd = inotify_syscalls.inotify_add_watch(fd, pathname, mask)
except __HOLE__, err:
self._last_errno = err.errno
return -1
return wd
|
IOError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/_INotifySyscallsWrapper._inotify_add_watch
|
4,474 |
def _inotify_rm_watch(self, fd, wd):
try:
ret = inotify_syscalls.inotify_rm_watch(fd, wd)
except __HOLE__, err:
self._last_errno = err.errno
return -1
return ret
|
IOError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/_INotifySyscallsWrapper._inotify_rm_watch
|
4,475 |
def init(self):
assert ctypes
try_libc_name = 'c'
if sys.platform.startswith('freebsd'):
try_libc_name = 'inotify'
libc_name = None
try:
libc_name = ctypes.util.find_library(try_libc_name)
except (OSError, IOError):
pass # Will attemp to load it with None anyway.
if sys.version_info >= (2, 6):
self._libc = ctypes.CDLL(libc_name, use_errno=True)
self._get_errno_func = ctypes.get_errno
else:
self._libc = ctypes.CDLL(libc_name)
try:
location = self._libc.__errno_location
location.restype = ctypes.POINTER(ctypes.c_int)
self._get_errno_func = lambda: location().contents.value
except __HOLE__:
pass
# Eventually check that libc has needed inotify bindings.
if (not hasattr(self._libc, 'inotify_init') or
not hasattr(self._libc, 'inotify_add_watch') or
not hasattr(self._libc, 'inotify_rm_watch')):
return False
self._libc.inotify_init.argtypes = []
self._libc.inotify_init.restype = ctypes.c_int
self._libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint32]
self._libc.inotify_add_watch.restype = ctypes.c_int
self._libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
self._libc.inotify_rm_watch.restype = ctypes.c_int
return True
|
AttributeError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/_CtypesLibcINotifyWrapper.init
|
4,476 |
def __init__(self, raw):
"""
Concretely, this is the raw event plus inferred infos.
"""
_Event.__init__(self, raw)
self.maskname = EventsCodes.maskname(self.mask)
if COMPATIBILITY_MODE:
self.event_name = self.maskname
try:
if self.name:
self.pathname = os.path.abspath(os.path.join(self.path,
self.name))
else:
self.pathname = os.path.abspath(self.path)
except __HOLE__, err:
# Usually it is not an error some events are perfectly valids
# despite the lack of these attributes.
log.debug(err)
|
AttributeError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/Event.__init__
|
4,477 |
def process_IN_CREATE(self, raw_event):
"""
If the event affects a directory and the auto_add flag of the
targetted watch is set to True, a new watch is added on this
new directory, with the same attribute values than those of
this watch.
"""
if raw_event.mask & IN_ISDIR:
watch_ = self._watch_manager.get_watch(raw_event.wd)
created_dir = os.path.join(watch_.path, raw_event.name)
if watch_.auto_add and not watch_.exclude_filter(created_dir):
addw = self._watch_manager.add_watch
# The newly monitored directory inherits attributes from its
# parent directory.
addw_ret = addw(created_dir, watch_.mask,
proc_fun=watch_.proc_fun,
rec=False, auto_add=watch_.auto_add,
exclude_filter=watch_.exclude_filter)
# Trick to handle mkdir -p /d1/d2/t3 where d1 is watched and
# d2 and t3 (directory or file) are created.
# Since the directory d2 is new, then everything inside it must
# also be new.
created_dir_wd = addw_ret.get(created_dir)
if ((created_dir_wd is not None) and (created_dir_wd > 0) and
os.path.isdir(created_dir)):
try:
for name in os.listdir(created_dir):
inner = os.path.join(created_dir, name)
if self._watch_manager.get_wd(inner) is not None:
continue
# Generate (simulate) creation events for sub-
# directories and files.
if os.path.isfile(inner):
# symlinks are handled as files.
flags = IN_CREATE
elif os.path.isdir(inner):
flags = IN_CREATE | IN_ISDIR
else:
# This path should not be taken.
continue
rawevent = _RawEvent(created_dir_wd, flags, 0, name)
self._notifier.append_event(rawevent)
except __HOLE__, err:
msg = "process_IN_CREATE, invalid directory %s: %s"
log.debug(msg % (created_dir, str(err)))
return self.process_default(raw_event)
|
OSError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/_SysProcessEvent.process_IN_CREATE
|
4,478 |
def loop(self, callback=None, daemonize=False, **args):
"""
Events are read only one time every min(read_freq, timeout)
seconds at best and only if the size to read is >= threshold.
After this method returns it must not be called again for the same
instance.
@param callback: Functor called after each event processing iteration.
Expects to receive the notifier object (self) as first
parameter. If this function returns True the loop is
immediately terminated otherwise the loop method keeps
looping.
@type callback: callable object or function
@param daemonize: This thread is daemonized if set to True.
@type daemonize: boolean
@param args: Optional and relevant only if daemonize is True. Remaining
keyworded arguments are directly passed to daemonize see
__daemonize() method. If pid_file=None or is set to a
pathname the caller must ensure the file does not exist
before this method is called otherwise an exception
pyinotify.NotifierError will be raised. If pid_file=False
it is still daemonized but the pid is not written in any
file.
@type args: various
"""
if daemonize:
self.__daemonize(**args)
# Read and process events forever
while 1:
try:
self.process_events()
if (callback is not None) and (callback(self) is True):
break
ref_time = time.time()
# check_events is blocking
if self.check_events():
self._sleep(ref_time)
self.read_events()
except __HOLE__:
# Stop monitoring if sigint is caught (Control-C).
log.debug('Pyinotify stops monitoring.')
break
# Close internals
self.stop()
|
KeyboardInterrupt
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/Notifier.loop
|
4,479 |
def del_watch(self, wd):
"""
Remove watch entry associated to watch descriptor wd.
@param wd: Watch descriptor.
@type wd: int
"""
try:
del self._wmd[wd]
except __HOLE__, err:
log.error('Cannot delete unknown watch descriptor %s' % str(err))
|
KeyError
|
dataset/ETHPy150Open seb-m/pyinotify/python2/pyinotify.py/WatchManager.del_watch
|
4,480 |
def exists(self, name):
try:
self._zip_file.getinfo(name)
return True
except __HOLE__:
return False
|
KeyError
|
dataset/ETHPy150Open mwilliamson/python-mammoth/mammoth/zips.py/_Zip.exists
|
4,481 |
def verify(self, **kwargs):
super(AccessTokenResponse, self).verify(**kwargs)
if "id_token" in self:
# Try to decode the JWT, checks the signature
args = {}
for arg in ["key", "keyjar", "algs", "sender"]:
try:
args[arg] = kwargs[arg]
except __HOLE__:
pass
idt = IdToken().from_jwt(str(self["id_token"]), **args)
if not idt.verify(**kwargs):
return False
# replace the JWT with the IdToken instance
self["id_token"] = idt
return True
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/AccessTokenResponse.verify
|
4,482 |
def verify(self, **kwargs):
super(AuthorizationResponse, self).verify(**kwargs)
if "aud" in self:
if "client_id" in kwargs:
# check that it's for me
if kwargs["client_id"] not in self["aud"]:
return False
if "id_token" in self:
# Try to decode the JWT, checks the signature
args = {}
for arg in ["key", "keyjar", "algs", "sender"]:
try:
args[arg] = kwargs[arg]
except KeyError:
pass
idt = IdToken().from_jwt(str(self["id_token"]), **args)
if not idt.verify(**kwargs):
raise VerificationError("Could not verify id_token", idt)
_alg = idt.jws_header["alg"]
# What if _alg == 'none'
hfunc = "HS" + _alg[-3:]
if "access_token" in self:
try:
assert "at_hash" in idt
except __HOLE__:
raise MissingRequiredAttribute("Missing at_hash property",
idt)
try:
assert idt["at_hash"] == jws.left_hash(
self["access_token"], hfunc)
except AssertionError:
raise AtHashError(
"Failed to verify access_token hash", idt)
if "code" in self:
try:
assert "c_hash" in idt
except AssertionError:
raise MissingRequiredAttribute("Missing c_hash property",
idt)
try:
assert idt["c_hash"] == jws.left_hash(self["code"], hfunc)
except AssertionError:
raise CHashError("Failed to verify code hash", idt)
self["id_token"] = idt
return True
|
AssertionError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/AuthorizationResponse.verify
|
4,483 |
def verify(self, **kwargs):
"""Authorization Request parameters that are OPTIONAL in the OAuth 2.0
specification MAY be included in the OpenID Request Object without also
passing them as OAuth 2.0 Authorization Request parameters, with one
exception: The scope parameter MUST always be present in OAuth 2.0
Authorization Request parameters.
All parameter values that are present both in the OAuth 2.0
Authorization Request and in the OpenID Request Object MUST exactly
match."""
super(AuthorizationRequest, self).verify(**kwargs)
args = {}
for arg in ["key", "keyjar", "opponent_id", "sender"]:
try:
args[arg] = kwargs[arg]
except __HOLE__:
pass
if "opponent_id" not in kwargs:
args["opponent_id"] = self["client_id"]
if "request" in self:
if isinstance(self["request"], six.string_types):
# Try to decode the JWT, checks the signature
oidr = OpenIDRequest().from_jwt(str(self["request"]), **args)
# verify that nothing is change in the original message
for key, val in oidr.items():
if key in self:
assert self[key] == val
# replace the JWT with the parsed and verified instance
self["request"] = oidr
if "id_token_hint" in self:
if isinstance(self["id_token_hint"], six.string_types):
idt = IdToken().from_jwt(str(self["id_token_hint"]), **args)
self["id_token_hint"] = idt
if "response_type" not in self:
raise MissingRequiredAttribute("response_type missing", self)
_rt = self["response_type"]
if "token" in _rt or "id_token" in _rt:
if "nonce" not in self:
raise MissingRequiredAttribute("Nonce missing", self)
if "openid" not in self.get("scope", []):
raise MissingRequiredValue("openid not in scope", self)
if "offline_access" in self.get("scope", []):
if "prompt" not in self or "consent" not in self["prompt"]:
raise MissingRequiredValue("consent in prompt", self)
if "prompt" in self:
if "none" in self["prompt"] and len(self["prompt"]) > 1:
raise InvalidRequest("prompt none combined with other value",
self)
return True
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/AuthorizationRequest.verify
|
4,484 |
def verify(self, **kwargs):
super(OpenIDSchema, self).verify(**kwargs)
if "birthdate" in self:
# Either YYYY-MM-DD or just YYYY or 0000-MM-DD
try:
_ = time.strptime(self["birthdate"], "%Y-%m-%d")
except __HOLE__:
try:
_ = time.strptime(self["birthdate"], "%Y")
except ValueError:
try:
_ = time.strptime(self["birthdate"], "0000-%m-%d")
except ValueError:
raise VerificationError("Birthdate format error", self)
return True
|
ValueError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/OpenIDSchema.verify
|
4,485 |
def verify(self, **kwargs):
super(IdToken, self).verify(**kwargs)
if "aud" in self:
if "client_id" in kwargs:
# check that I'm among the recipients
if kwargs["client_id"] not in self["aud"]:
raise NotForMe("", self)
# Then azp has to be present and be one of the aud values
if len(self["aud"]) > 1:
try:
assert "azp" in self
except AssertionError:
raise VerificationError("azp missing", self)
else:
try:
assert self["azp"] in self["aud"]
except AssertionError:
raise VerificationError(
"Mismatch between azp and aud claims", self)
if "azp" in self:
if "client_id" in kwargs:
if kwargs["client_id"] != self["azp"]:
raise NotForMe("", self)
_now = time_util.utc_time_sans_frac()
try:
_skew = kwargs['skew']
except KeyError:
_skew = 0
try:
_exp = self['exp']
except __HOLE__:
raise MissingRequiredAttribute('exp')
else:
if (_now - _skew) > _exp:
raise EXPError('Invalid expiration time')
try:
_storage_time = kwargs['nonce_storage_time']
except KeyError:
_storage_time = NONCE_STORAGE_TIME
try:
_iat = self['iat']
except KeyError:
raise MissingRequiredAttribute('iat')
else:
if (_iat + _storage_time) < (_now - _skew):
raise IATError('Issued too long ago')
return True
|
KeyError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/IdToken.verify
|
4,486 |
def verify(self, **kwargs):
super(ProviderConfigurationResponse, self).verify(**kwargs)
if "scopes_supported" in self:
assert "openid" in self["scopes_supported"]
for scope in self["scopes_supported"]:
check_char_set(scope, SCOPE_CHARSET)
parts = urlparse(self["issuer"])
try:
assert parts.scheme == "https"
except __HOLE__:
raise SchemeError("Not HTTPS")
assert not parts.query and not parts.fragment
return True
|
AssertionError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/ProviderConfigurationResponse.verify
|
4,487 |
def factory(msgtype):
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, Message):
try:
if obj.__name__ == msgtype:
return obj
except __HOLE__:
pass
# Fall back to basic OAuth2 messages
return message.factory(msgtype)
|
AttributeError
|
dataset/ETHPy150Open rohe/pyoidc/src/oic/oic/message.py/factory
|
4,488 |
def current_engine(self):
try:
return settings.DATABASES[self.db]['ENGINE']
except __HOLE__:
return settings.DATABASE_ENGINE
|
AttributeError
|
dataset/ETHPy150Open celery/django-celery/djcelery/managers.py/ExtendedManager.current_engine
|
4,489 |
def __init__(self, *args, **kwargs):
self._data = list(Row(arg) for arg in args)
self.__headers = None
# ('title', index) tuples
self._separators = []
# (column, callback) tuples
self._formatters = []
try:
self.headers = kwargs['headers']
except KeyError:
self.headers = None
try:
self.title = kwargs['title']
except __HOLE__:
self.title = None
self._register_formats()
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset.__init__
|
4,490 |
def __repr__(self):
try:
return '<%s dataset>' % (self.title.lower())
except __HOLE__:
return '<dataset object>'
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset.__repr__
|
4,491 |
@classmethod
def _register_formats(cls):
"""Adds format properties."""
for fmt in formats.available:
try:
try:
setattr(cls, fmt.title, property(fmt.export_set, fmt.import_set))
except __HOLE__:
setattr(cls, fmt.title, property(fmt.export_set))
except AttributeError:
pass
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset._register_formats
|
4,492 |
def _package(self, dicts=True, ordered=True):
"""Packages Dataset into lists of dictionaries for transmission."""
# TODO: Dicts default to false?
_data = list(self._data)
if ordered:
dict_pack = OrderedDict
else:
dict_pack = dict
# Execute formatters
if self._formatters:
for row_i, row in enumerate(_data):
for col, callback in self._formatters:
try:
if col is None:
for j, c in enumerate(row):
_data[row_i][j] = callback(c)
else:
_data[row_i][col] = callback(row[col])
except __HOLE__:
raise InvalidDatasetIndex
if self.headers:
if dicts:
data = [dict_pack(list(zip(self.headers, data_row))) for data_row in _data]
else:
data = [list(self.headers)] + list(_data)
else:
data = [list(row) for row in _data]
return data
|
IndexError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset._package
|
4,493 |
def _set_headers(self, collection):
"""Validating headers setter."""
self._validate(collection)
if collection:
try:
self.__headers = list(collection)
except __HOLE__:
raise TypeError
else:
self.__headers = None
|
TypeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset._set_headers
|
4,494 |
@property
def width(self):
"""The number of columns currently in the :class:`Dataset`.
Cannot be directly modified.
"""
try:
return len(self._data[0])
except __HOLE__:
try:
return len(self.headers)
except TypeError:
return 0
# -------
# Formats
# -------
|
IndexError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset.width
|
4,495 |
def stack_cols(self, other):
"""Stack two :class:`Dataset` instances together by
joining at the column level, and return a new
combined ``Dataset`` instance. If either ``Dataset``
has headers set, than the other must as well."""
if not isinstance(other, Dataset):
return
if self.headers or other.headers:
if not self.headers or not other.headers:
raise HeadersNeeded
if self.height != other.height:
raise InvalidDimensions
try:
new_headers = self.headers + other.headers
except __HOLE__:
new_headers = None
_dset = Dataset()
for column in self.headers:
_dset.append_col(col=self[column])
for column in other.headers:
_dset.append_col(col=other[column])
_dset.headers = new_headers
return _dset
|
TypeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Dataset.stack_cols
|
4,496 |
def __repr__(self):
try:
return '<%s databook>' % (self.title.lower())
except __HOLE__:
return '<databook object>'
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Databook.__repr__
|
4,497 |
@classmethod
def _register_formats(cls):
"""Adds format properties."""
for fmt in formats.available:
try:
try:
setattr(cls, fmt.title, property(fmt.export_book, fmt.import_book))
except __HOLE__:
setattr(cls, fmt.title, property(fmt.export_book))
except AttributeError:
pass
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/Databook._register_formats
|
4,498 |
def detect(stream):
"""Return (format, stream) of given stream."""
for fmt in formats.available:
try:
if fmt.detect(stream):
return (fmt, stream)
except __HOLE__:
pass
return (None, stream)
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/detect
|
4,499 |
def import_set(stream):
"""Return dataset of given stream."""
(format, stream) = detect(stream)
try:
data = Dataset()
format.import_set(data, stream)
return data
except __HOLE__:
return None
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/import_set
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.