Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,400
def set_picture(self, new_file): uid = self.target.username self.picture.error = None error = None # No value, exit if new_file == '': return None try: validator.validate_file( new_file, self.assets_manager.max_size, _(u'File must be less than %d KB')) except __HOLE__, e: error = e.message if imghdr.what(new_file.file) is None: error = _(u'Invalid image file') if error: self.picture.error = error return None # Remove old value if self.target.picture: self.assets_manager.delete(uid) # Save new value self.assets_manager.save(new_file.file.read(), file_id=uid, metadata={ 'filename': new_file.filename, 'content-type': new_file.type}, THUMB_SIZE=(100, 100)) self.picture(self.assets_manager.get_image_url(uid, size='thumb'))
ValueError
dataset/ETHPy150Open Net-ng/kansha/kansha/user/user_profile.py/UserForm.set_picture
5,401
def test_validates_local_url(self): try: self.validator('/') except __HOLE__: self.fail('ExtendedURLValidator raised ValidationError' 'unexpectedly!')
ValidationError
dataset/ETHPy150Open django-oscar/django-oscar/tests/integration/core/validator_tests.py/TestExtendedURLValidatorWithVerifications.test_validates_local_url
5,402
def test_validates_local_url_with_query_strings(self): try: self.validator('/?q=test') # Query strings shouldn't affect validation except __HOLE__: self.fail('ExtendedURLValidator raised ValidationError' 'unexpectedly!')
ValidationError
dataset/ETHPy150Open django-oscar/django-oscar/tests/integration/core/validator_tests.py/TestExtendedURLValidatorWithVerifications.test_validates_local_url_with_query_strings
5,403
def test_validates_urls_missing_preceding_slash(self): try: self.validator('catalogue/') except __HOLE__: self.fail('ExtendedURLValidator raised ValidationError' 'unexpectedly!')
ValidationError
dataset/ETHPy150Open django-oscar/django-oscar/tests/integration/core/validator_tests.py/TestExtendedURLValidatorWithVerifications.test_validates_urls_missing_preceding_slash
5,404
def test_validates_flatpages_urls(self): FlatPage.objects.create(title='test page', url='/test/page/') try: self.validator('/test/page/') except __HOLE__: self.fail('ExtendedURLValidator raises ValidationError' 'unexpectedly!')
ValidationError
dataset/ETHPy150Open django-oscar/django-oscar/tests/integration/core/validator_tests.py/TestExtendedURLValidatorWithVerifications.test_validates_flatpages_urls
5,405
def returner(ret): ''' Log outcome to sentry. The returner tries to identify errors and report them as such. All other messages will be reported at info level. ''' def connect_sentry(message, result): ''' Connect to the Sentry server ''' pillar_data = __salt__['pillar.raw']() grains = __salt__['grains.items']() sentry_data = { 'result': result, 'pillar': pillar_data, 'grains': grains } data = { 'platform': 'python', 'culprit': ret['fun'], 'level': 'error' } tags = {} if 'tags' in pillar_data['raven']: for tag in pillar_data['raven']['tags']: tags[tag] = grains[tag] if ret['return']: data['level'] = 'info' servers = [] try: for server in pillar_data['raven']['servers']: servers.append(server + '/api/store/') client = Client( servers=servers, public_key=pillar_data['raven']['public_key'], secret_key=pillar_data['raven']['secret_key'], project=pillar_data['raven']['project'], ) except __HOLE__ as missing_key: logger.error( 'Sentry returner need config \'{0}\' in pillar'.format( missing_key ) ) else: try: client.capture( 'raven.events.Message', message=message, data=data, extra=sentry_data, tags=tags ) except Exception as err: logger.error( 'Can\'t send message to sentry: {0}'.format(err), exc_info=True ) try: connect_sentry(ret['fun'], ret) except Exception as err: logger.error( 'Can\'t run connect_sentry: {0}'.format(err), exc_info=True )
KeyError
dataset/ETHPy150Open saltstack/salt/salt/returners/sentry_return.py/returner
5,406
def urlopen(self, path, method='GET', post='', env=None): result = {'code':0, 'status':'error', 'header':{}, 'body':tob('')} def start_response(status, header): result['code'] = int(status.split()[0]) result['status'] = status.split(None, 1)[-1] for name, value in header: name = name.title() if name in result['header']: result['header'][name] += ', ' + value else: result['header'][name] = value env = env if env else {} wsgiref.util.setup_testing_defaults(env) env['REQUEST_METHOD'] = wsgistr(method.upper().strip()) env['PATH_INFO'] = wsgistr(path) env['QUERY_STRING'] = wsgistr('') if post: env['REQUEST_METHOD'] = 'POST' env['CONTENT_LENGTH'] = str(len(tob(post))) env['wsgi.input'].write(tob(post)) env['wsgi.input'].seek(0) response = self.wsgiapp(env, start_response) for part in response: try: result['body'] += part except __HOLE__: raise TypeError('WSGI app yielded non-byte object %s', type(part)) if hasattr(response, 'close'): response.close() del response return result
TypeError
dataset/ETHPy150Open bottlepy/bottle/test/tools.py/ServerTestBase.urlopen
5,407
@register.filter(name='truncatechars') @stringfilter def truncatechars(value, arg): """ Truncates a string after a certain number of chars. Argument: Number of chars to truncate after. """ try: length = int(arg) except __HOLE__: # Invalid literal for int(). return value # Fail silently. if len(value) > length: return value[:length] + '...' return value
ValueError
dataset/ETHPy150Open disqus/overseer/overseer/templatetags/overseer_helpers.py/truncatechars
5,408
def butler(table_data): try: with open(CONFIG_PATH) as config: movie_path = config.read() except IOError: print(Fore.RED, "\n\nRun `$moviemon PATH` to " "index your movies directory.\n\n") quit() else: table = AsciiTable(table_data) try: with open(movie_path + ".json") as inp: data = json.load(inp) return data, table except __HOLE__: print(Fore.YELLOW, "\n\nRun `$moviemon PATH` to " "index your movies directory.\n\n") quit()
IOError
dataset/ETHPy150Open iCHAIT/moviemon/moviemon/moviemon.py/butler
5,409
def execute(cmd, process_input=None, check_exit_code=True, cwd=None, shell=False, env_overrides=None, stdout_fh=subprocess.PIPE, stderr_fh=subprocess.PIPE): """Helper method to execute a command through subprocess. :param cmd: Command passed to subprocess.Popen. :param process_input: Input send to opened process. :param check_exit_code: Specifies whether to check process return code. If return code is other then `0` - exception will be raised. :param cwd: The child's current directory will be changed to `cwd` before it is executed. :param shell: Specifies whether to use the shell as the program to execute. :param env_overrides: Process environment parameters to override. :param stdout_fh: Stdout file handler. :param stderr_fh: Stderr file handler. :returns: A tuple, (stdout, stderr) from the spawned process. :raises: :class:`exceptions.ProcessExecutionError` when process ends with other then `0` return code. """ # Ensure all string args (i.e. for those that send ints, etc.). cmd = map(str, cmd) # NOTE(skudriashev): If shell is True, it is recommended to pass args as a # string rather than as a sequence. str_cmd = subprocess.list2cmdline(cmd) if shell: cmd = str_cmd LOG.debug('Running shell cmd: %r' % cmd) else: LOG.debug('Running cmd: %r' % cmd) if process_input is not None: process_input = str(process_input) LOG.debug('Process input: %s' % process_input) if cwd: LOG.debug('Process working directory: %r' % cwd) # Override process environment in needed. process_env = None if env_overrides and len(env_overrides): process_env = env.get() for k, v in env_overrides.items(): LOG.debug("Using environment override '%s' => '%s'", k, v) process_env[k] = str(v) # Run command process. exec_kwargs = { 'stdin': subprocess.PIPE, 'stdout': stdout_fh, 'stderr': stderr_fh, 'close_fds': True, 'shell': shell, 'cwd': cwd, 'env': process_env, } result = ("", "") try: obj = subprocess.Popen(cmd, **exec_kwargs) result = obj.communicate(process_input) except __HOLE__ as e: raise excp.ProcessExecutionError( str_cmd, exec_kwargs=exec_kwargs, description="%s: [%s, %s]" % (e, e.errno, e.strerror) ) else: rc = obj.returncode # Handle process exit code. stdout = result[0] or "" stderr = result[1] or "" if rc != 0 and check_exit_code: # Raise exception if return code is not `0`. e = excp.ProcessExecutionError(str_cmd, exec_kwargs=exec_kwargs, stdout=stdout, stderr=stderr, exit_code=rc, where_output="debug log") LOG.debug("Stdout: %s", e.stdout) LOG.debug("Stderr: %s", e.stderr) raise e return stdout, stderr
OSError
dataset/ETHPy150Open openstack/anvil/anvil/shell.py/execute
5,410
def execute_save_output(cmd, file_name, **kwargs): """Helper method to execute a command through subprocess and save stdout and stderr into a file. """ kwargs = kwargs.copy() mkdirslist(dirname(file_name)) try: with open(file_name, 'wb') as fh: return execute(cmd, stdout_fh=fh, stderr_fh=fh, **kwargs) except excp.ProcessExecutionError: with excp.reraise(): try: with open(file_name, 'rb') as fh: lines = collections.deque(fh, maxlen=_TRUNCATED_OUTPUT_LINES) content = "".join(lines) except __HOLE__: pass else: LOG.debug('Last lines from %s:\n%s', file_name, content)
IOError
dataset/ETHPy150Open openstack/anvil/anvil/shell.py/execute_save_output
5,411
def rmdir(path, quiet=True): if not isdir(path): return try: LOG.debug("Deleting directory %r with the cavet that we will fail if it's not empty." % (path)) os.rmdir(path) LOG.debug("Deleted directory %r" % (path)) except __HOLE__: if not quiet: raise else: pass
OSError
dataset/ETHPy150Open openstack/anvil/anvil/shell.py/rmdir
5,412
def unlink(path, ignore_errors=True): LOG.debug("Unlinking (removing) %r" % (path)) try: os.unlink(path) except __HOLE__: if not ignore_errors: raise else: pass
OSError
dataset/ETHPy150Open openstack/anvil/anvil/shell.py/unlink
5,413
@contextmanager def exception_to_errormsg(): try: yield except exceptions.PyUniteWarning as e: warn(str(e)) except exceptions.PyUniteError as e: error(str(e)) except __HOLE__ as e: # It's better to provide a stack trace than nothing if not str(e): raise warn(str(e))
AssertionError
dataset/ETHPy150Open azure-satellite/pyunite/pyunite/ui.py/exception_to_errormsg
5,414
def test_bad_sysname(self): group = Group() try: group.add('0', ExecComp('y=x*2.0'), promotes=['x']) except __HOLE__ as err: self.assertEqual(str(err), ": '0' is not a valid system name.") try: group.add('foo:bar', ExecComp('y=x*2.0'), promotes=['x']) except NameError as err: self.assertEqual(str(err), ": 'foo:bar' is not a valid system name.")
NameError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/core/test/test_group.py/TestGroup.test_bad_sysname
5,415
def test_layout_getter_fixed(self): tr = Layout() tr.fixed = True try: tr.Test.Path raise AssertionError except __HOLE__ as e: self.assertEqual(str(e), self.fixed_error)
AttributeError
dataset/ETHPy150Open ioam/holoviews/tests/testcollector.py/LayoutTest.test_layout_getter_fixed
5,416
def test_layout_setter_fixed(self): tr = Layout() tr.fixed = True try: tr.Test.Path = 42 raise AssertionError except __HOLE__ as e: self.assertEqual(str(e), self.fixed_error)
AttributeError
dataset/ETHPy150Open ioam/holoviews/tests/testcollector.py/LayoutTest.test_layout_setter_fixed
5,417
def test_layout_shallow_fixed_setter(self): tr = Layout() tr.fixed = True try: tr.Test = 42 raise AssertionError except __HOLE__ as e: self.assertEqual(str(e), self.fixed_error)
AttributeError
dataset/ETHPy150Open ioam/holoviews/tests/testcollector.py/LayoutTest.test_layout_shallow_fixed_setter
5,418
def test_layout_toggle_fixed(self): tr = Layout() tr.fixed = True try: tr.Test = 42 raise AssertionError except __HOLE__ as e: self.assertEqual(str(e), self.fixed_error) tr.fixed = False tr.Test = 42
AttributeError
dataset/ETHPy150Open ioam/holoviews/tests/testcollector.py/LayoutTest.test_layout_toggle_fixed
5,419
def unregister(self, name): try: content_type = self.name_to_type[name] self._decoders.pop(content_type, None) self._encoders.pop(name, None) self.type_to_name.pop(content_type, None) self.name_to_type.pop(name, None) except __HOLE__: raise SerializerNotInstalled( 'No encoder/decoder installed for {0}'.format(name))
KeyError
dataset/ETHPy150Open celery/kombu/kombu/serialization.py/SerializerRegistry.unregister
5,420
def _set_default_serializer(self, name): """ Set the default serialization method used by this library. :param name: The name of the registered serialization method. For example, `json` (default), `pickle`, `yaml`, `msgpack`, or any custom methods registered using :meth:`register`. :raises SerializerNotInstalled: If the serialization method requested is not available. """ try: (self._default_content_type, self._default_content_encoding, self._default_encode) = self._encoders[name] except __HOLE__: raise SerializerNotInstalled( 'No encoder installed for {0}'.format(name))
KeyError
dataset/ETHPy150Open celery/kombu/kombu/serialization.py/SerializerRegistry._set_default_serializer
5,421
def register_yaml(): """Register a encoder/decoder for YAML serialization. It is slower than JSON, but allows for more data types to be serialized. Useful if you need to send data such as dates""" try: import yaml registry.register('yaml', yaml.safe_dump, yaml.safe_load, content_type='application/x-yaml', content_encoding='utf-8') except __HOLE__: def not_available(*args, **kwargs): """In case a client receives a yaml message, but yaml isn't installed.""" raise SerializerNotInstalled( 'No decoder installed for YAML. Install the PyYAML library') registry.register('yaml', None, not_available, 'application/x-yaml')
ImportError
dataset/ETHPy150Open celery/kombu/kombu/serialization.py/register_yaml
5,422
def register_msgpack(): """See http://msgpack.sourceforge.net/""" pack = unpack = None try: import msgpack if msgpack.version >= (0, 4): from msgpack import packb, unpackb def pack(s): return packb(s, use_bin_type=True) def unpack(s): return unpackb(s, encoding='utf-8') else: def version_mismatch(*args, **kwargs): raise SerializerNotInstalled( 'msgpack requires msgpack-python >= 0.4.0') pack = unpack = version_mismatch except (ImportError, __HOLE__): def not_available(*args, **kwargs): raise SerializerNotInstalled( 'No decoder installed for msgpack. ' 'Please install the msgpack-python library') pack = unpack = not_available registry.register( 'msgpack', pack, unpack, content_type='application/x-msgpack', content_encoding='binary', ) # Register the base serialization methods.
ValueError
dataset/ETHPy150Open celery/kombu/kombu/serialization.py/register_msgpack
5,423
def enable_insecure_serializers(choices=['pickle', 'yaml', 'msgpack']): """Enable serializers that are considered to be unsafe. Will enable ``pickle``, ``yaml`` and ``msgpack`` by default, but you can also specify a list of serializers (by name or content type) to enable. """ for choice in choices: try: registry.enable(choice) except __HOLE__: pass
KeyError
dataset/ETHPy150Open celery/kombu/kombu/serialization.py/enable_insecure_serializers
5,424
def getcolor(self, color): # experimental: given an rgb tuple, allocate palette entry if self.rawmode: raise ValueError("palette contains raw palette data") if Image.isTupleType(color): try: return self.colors[color] except __HOLE__: # allocate new color slot if Image.isStringType(self.palette): self.palette = map(int, self.palette) index = len(self.colors) if index >= 256: raise ValueError("cannot allocate more than 256 colors") self.colors[color] = index self.palette[index] = color[0] self.palette[index+256] = color[1] self.palette[index+512] = color[2] self.dirty = 1 return index else: raise ValueError("unknown color specifier: %r" % color)
KeyError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/ImagePalette.py/ImagePalette.getcolor
5,425
def load(filename): # FIXME: supports GIMP gradients only fp = open(filename, "rb") lut = None if not lut: try: import GimpPaletteFile fp.seek(0) p = GimpPaletteFile.GimpPaletteFile(fp) lut = p.getpalette() except (SyntaxError, ValueError): pass if not lut: try: import GimpGradientFile fp.seek(0) p = GimpGradientFile.GimpGradientFile(fp) lut = p.getpalette() except (SyntaxError, __HOLE__): pass if not lut: try: import PaletteFile fp.seek(0) p = PaletteFile.PaletteFile(fp) lut = p.getpalette() except (SyntaxError, ValueError): pass if not lut: raise IOError, "cannot load palette" return lut # data, rawmode # add some psuedocolour palettes as well
ValueError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/ImagePalette.py/load
5,426
def test_power_representation(): tests = [(1729, 3, 2), (234, 2, 4), (2, 1, 2), (3, 1, 3), (5, 2, 2), (12352, 2, 4), (32760, 2, 3)] for test in tests: n, p, k = test f = power_representation(n, p, k) while True: try: l = next(f) assert len(l) == k chk_sum = 0 for l_i in l: chk_sum = chk_sum + l_i**p assert chk_sum == n except __HOLE__: break assert list(power_representation(20, 2, 4, True)) == \ [(1, 1, 3, 3), (0, 0, 2, 4)] raises(ValueError, lambda: list(power_representation(1.2, 2, 2))) raises(ValueError, lambda: list(power_representation(2, 0, 2))) raises(ValueError, lambda: list(power_representation(2, 2, 0))) assert list(power_representation(-1, 2, 2)) == [] assert list(power_representation(1, 1, 1)) == [(1,)] assert list(power_representation(3, 2, 1)) == [] assert list(power_representation(4, 2, 1)) == [(2,)] assert list(power_representation(3**4, 4, 6, zeros=True)) == \ [(1, 2, 2, 2, 2, 2), (0, 0, 0, 0, 0, 3)] assert list(power_representation(3**4, 4, 5, zeros=False)) == [] assert list(power_representation(-2, 3, 2)) == [(-1, -1)] assert list(power_representation(-2, 4, 2)) == [] assert list(power_representation(0, 3, 2, True)) == [(0, 0)] assert list(power_representation(0, 3, 2, False)) == [] # when we are dealing with squares, do feasibility checks assert len(list(power_representation(4**10*(8*10 + 7), 2, 3))) == 0 # there will be a recursion error if these aren't recognized big = 2**30 for i in [13, 10, 7, 5, 4, 2, 1]: assert list(sum_of_powers(big, 2, big - i)) == []
StopIteration
dataset/ETHPy150Open sympy/sympy/sympy/solvers/tests/test_diophantine.py/test_power_representation
5,427
def handle_noargs(self, migrate_all=False, **options): # Import the 'management' module within each installed app, to register # dispatcher events. # This is copied from Django, to fix bug #511. try: from django.utils.importlib import import_module except ImportError: pass # TODO: Remove, only for Django1.0 else: for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except __HOLE__ as exc: msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise # Work out what uses migrations and so doesn't need syncing apps_needing_sync = [] apps_migrated = [] for app in models.get_apps(): app_label = get_app_label(app) if migrate_all: apps_needing_sync.append(app_label) else: try: migrations = migration.Migrations(app_label) except NoMigrations: # It needs syncing apps_needing_sync.append(app_label) else: # This is a migrated app, leave it apps_migrated.append(app_label) verbosity = int(options.get('verbosity', 0)) # Run syncdb on only the ones needed if verbosity: print("Syncing...") old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync old_app_store, cache.app_store = cache.app_store, SortedDict([ (k, v) for (k, v) in cache.app_store.items() if get_app_label(k) in apps_needing_sync ]) # This will allow the setting of the MySQL storage engine, for example. for db in dbs.values(): db.connection_init() # OK, run the actual syncdb syncdb.Command().execute(**options) settings.INSTALLED_APPS = old_installed cache.app_store = old_app_store # Migrate if needed if options.get('migrate', True): if verbosity: print("Migrating...") # convert from store_true to store_false options['no_initial_data'] = not options.get('load_initial_data', True) management.call_command('migrate', **options) # Be obvious about what we did if verbosity: print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)) if options.get('migrate', True): if verbosity: print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated)) else: if verbosity: print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)) print("(use ./manage.py migrate to migrate these)")
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/management/commands/syncdb.py/Command.handle_noargs
5,428
def _safe_log(log_func, msg, msg_data): """Sanitizes the msg_data field before logging.""" SANITIZE = {'set_admin_password': [('args', 'new_pass')], 'run_instance': [('args', 'admin_password')], 'route_message': [('args', 'message', 'args', 'method_info', 'method_kwargs', 'password'), ('args', 'message', 'args', 'method_info', 'method_kwargs', 'admin_password')]} has_method = 'method' in msg_data and msg_data['method'] in SANITIZE has_context_token = '_context_auth_token' in msg_data has_token = 'auth_token' in msg_data if not any([has_method, has_context_token, has_token]): return log_func(msg, msg_data) msg_data = copy.deepcopy(msg_data) if has_method: for arg in SANITIZE.get(msg_data['method'], []): try: d = msg_data for elem in arg[:-1]: d = d[elem] d[arg[-1]] = '<SANITIZED>' except __HOLE__ as e: LOG.info(_('Failed to sanitize %(item)s. Key error %(err)s'), {'item': arg, 'err': e}) if has_context_token: msg_data['_context_auth_token'] = '<SANITIZED>' if has_token: msg_data['auth_token'] = '<SANITIZED>' return log_func(msg, msg_data)
KeyError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/openstack/common/rpc/common.py/_safe_log
5,429
def deserialize_remote_exception(conf, data): failure = jsonutils.loads(str(data)) trace = failure.get('tb', []) message = failure.get('message', "") + "\n" + "\n".join(trace) name = failure.get('class') module = failure.get('module') # NOTE(ameade): We DO NOT want to allow just any module to be imported, in # order to prevent arbitrary code execution. if module not in conf.allowed_rpc_exception_modules: return RemoteError(name, failure.get('message'), trace) try: mod = importutils.import_module(module) klass = getattr(mod, name) if not issubclass(klass, Exception): raise TypeError("Can only deserialize Exceptions") failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) except (__HOLE__, TypeError, ImportError): return RemoteError(name, failure.get('message'), trace) ex_type = type(failure) str_override = lambda self: message new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,), {'__str__': str_override, '__unicode__': str_override}) try: # NOTE(ameade): Dynamically create a new exception type and swap it in # as the new type for the exception. This only works on user defined # Exceptions and not core python exceptions. This is important because # we cannot necessarily change an exception message so we must override # the __str__ method. failure.__class__ = new_ex_type except TypeError: # NOTE(ameade): If a core exception then just add the traceback to the # first exception argument. failure.args = (message,) + failure.args[1:] return failure
AttributeError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/openstack/common/rpc/common.py/deserialize_remote_exception
5,430
def __getattr__(self, key): try: return self.values[key] except __HOLE__: raise AttributeError(key)
KeyError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/openstack/common/rpc/common.py/CommonRpcContext.__getattr__
5,431
def _get_tag(config): """ Fetch the current deploy file from the repo on the deployment server and return the current tag associated with it. :param config: Config hash as fetched from get_config. :type config: hash :rtype: str """ # Fetch the .deploy file from the server and get the current tag deployfile = config['url'] + '/.git/deploy/deploy' try: f = urllib.urlopen(deployfile) deployinfo = f.read() except __HOLE__: return None try: deployinfo = json.loads(deployinfo) tag = deployinfo['tag'] except (KeyError, ValueError): return None # tags are user-input and are used in shell commands, ensure they are # only passing alphanumeric, dashes, or /. if re.search(r'[^a-zA-Z0-9_\-/]', tag): return None return tag
IOError
dataset/ETHPy150Open trebuchet-deploy/trebuchet/modules/deploy.py/_get_tag
5,432
def testGeoIP(self): try: import GeoIP except __HOLE__: print >> sys.stderr, "GeoIP Python package not available - skipping geoip unittest." return output = [(geocode, ip, line) for geocode, ip, line in geoip(fh=self.fh, **self.options)] self.assertEquals(len(output), 2, "Output size was different than expected: %s" % str(len(output)))
ImportError
dataset/ETHPy150Open adamhadani/logtools/logtools/test/test_logtools.py/GeoIPTestCase.testGeoIP
5,433
def testFilter(self): """Test GeoIP filtering functionality""" try: import GeoIP except __HOLE__: print >> sys.stderr, "GeoIP Python package not available - skipping geoip unittest." return # Check positive filter self.options['filter'] = 'United States' output = [(geocode, ip, line) for geocode, ip, line in geoip(fh=self.fh, **self.options)] self.assertEquals(len(output), 2, "Output size was different than expected: %s" % str(len(output))) # Check negative filter self.options['filter'] = 'India' output = [(geocode, ip, line) for geocode, ip, line in geoip(fh=self.fh, **self.options)] self.assertEquals(len(output), 0, "Output size was different than expected: %s" % str(len(output)))
ImportError
dataset/ETHPy150Open adamhadani/logtools/logtools/test/test_logtools.py/GeoIPTestCase.testFilter
5,434
def testGChart(self): try: import pygooglechart except __HOLE__: print >> sys.stderr, "pygooglechart Python package not available - skipping logplot gchart unittest." return options = AttrDict({ 'backend': 'gchart', 'output': False, 'limit': 10, 'field': 1, 'delimiter': ' ', 'legend': True, 'width': 600, 'height': 300 }) chart = None for plot_type in ('pie', 'line'): self.fh.seek(0) options['type'] = plot_type chart = logplot(options, None, self.fh) self.assertNotEquals(chart, None, "logplot returned None. Expected a Plot object") # Should raise ValueError here due to fh being at EOF self.assertRaises(ValueError, logplot, options, None, self.fh) tmp_fh, tmp_fname = mkstemp() chart.download(tmp_fname) os.remove(tmp_fname)
ImportError
dataset/ETHPy150Open adamhadani/logtools/logtools/test/test_logtools.py/PlotTestCase.testGChart
5,435
def absent(name): ''' Ensures that the user group does not exist, eventually delete user group. .. versionadded:: 2016.3.0 :param name: name of the user group :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml delete_thai_monks_usrgrp: zabbix_usergroup.absent: - name: 'Thai monks' ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_usergroup_deleted = 'User group {0} deleted.'.format(name) comment_usergroup_notdeleted = 'Unable to delete user group: {0}. '.format(name) comment_usergroup_notexists = 'User group {0} does not exist.'.format(name) changes_usergroup_deleted = {name: {'old': 'User group {0} exists.'.format(name), 'new': 'User group {0} deleted.'.format(name), } } usergroup_exists = __salt__['zabbix.usergroup_exists'](name) # Dry run, test=true mode if __opts__['test']: if not usergroup_exists: ret['result'] = True ret['comment'] = comment_usergroup_notexists else: ret['result'] = None ret['comment'] = comment_usergroup_deleted return ret usergroup_get = __salt__['zabbix.usergroup_get'](name) if not usergroup_get: ret['result'] = True ret['comment'] = comment_usergroup_notexists else: try: usrgrpid = usergroup_get[0]['usrgrpid'] usergroup_delete = __salt__['zabbix.usergroup_delete'](usrgrpid) except __HOLE__: usergroup_delete = False if usergroup_delete and 'error' not in usergroup_delete: ret['result'] = True ret['comment'] = comment_usergroup_deleted ret['changes'] = changes_usergroup_deleted else: ret['result'] = False ret['comment'] = comment_usergroup_notdeleted + str(usergroup_delete['error']) return ret
KeyError
dataset/ETHPy150Open saltstack/salt/salt/states/zabbix_usergroup.py/absent
5,436
def render(self, context): try: request = template.resolve_variable(self.request, context) obj = template.resolve_variable(self.obj, context) field = getattr(obj, self.field_name) except (template.VariableDoesNotExist, __HOLE__): return '' try: vote = field.get_rating_for_user(request.user, request.META['REMOTE_ADDR'], request.COOKIES) context[self.context_var] = vote except ObjectDoesNotExist: context[self.context_var] = 0 return ''
AttributeError
dataset/ETHPy150Open dcramer/django-ratings/djangoratings/templatetags/ratings.py/RatingByRequestNode.render
5,437
def render(self, context): try: user = template.resolve_variable(self.request, context) obj = template.resolve_variable(self.obj, context) field = getattr(obj, self.field_name) except template.VariableDoesNotExist: return '' try: vote = field.get_rating_for_user(user) context[self.context_var] = vote except __HOLE__: context[self.context_var] = 0 return ''
ObjectDoesNotExist
dataset/ETHPy150Open dcramer/django-ratings/djangoratings/templatetags/ratings.py/RatingByUserNode.render
5,438
def checkAuth(ip, port, title, version): """ """ if title == TINTERFACES.MAN: url = "http://{0}:{1}/manager/html".format(ip, port) # check with given auth if state.usr_auth: (usr, pswd) = state.usr_auth.split(":") return _auth(usr, pswd, url) # else try default credentials for (usr, pswd) in default_credentials: cook = _auth(usr, pswd, url) if cook: return cook # if we're still here, check if they supplied a wordlist if state.bf_wordlist and not state.hasbf: state.hasbf = True wordlist = [] with open(state.bf_wordlist, "r") as f: wordlist = [x.decode("ascii", "ignore").rstrip() for x in f.readlines()] utility.Msg("Brute forcing %s account with %d passwords..." % (state.bf_user, len(wordlist)), LOG.DEBUG) try: for (idx, word) in enumerate(wordlist): stdout.flush() stdout.write("\r\033[32m [%s] Brute forcing password for %s [%d/%d]\033[0m" % (utility.timestamp(), state.bf_user, idx+1, len(wordlist))) cook = _auth(state.bf_user, word, url) if cook: print '' # lets insert these credentials to the default list so we # don't need to bruteforce it each time if not (state.bf_user, word) in default_credentials: default_credentials.insert(0, (state.bf_user, word)) utility.Msg("Successful login %s:%s" % (state.bf_user, word), LOG.SUCCESS) return cook print '' except __HOLE__: pass
KeyboardInterrupt
dataset/ETHPy150Open hatRiot/clusterd/src/platform/tomcat/authenticate.py/checkAuth
5,439
def __init__(self, workflow): super(Step, self).__init__() self.workflow = workflow cls = self.__class__.__name__ if not (self.action_class and issubclass(self.action_class, Action)): raise AttributeError("You must specify an action for %s." % cls) self.slug = self.action_class.slug self.name = self.action_class.name self.permissions = self.action_class.permissions self.has_errors = False self._handlers = {} if self.connections is None: # We want a dict, but don't want to declare a mutable type on the # class directly. self.connections = {} # Gather our connection handlers and make sure they exist. for key, handlers in self.connections.items(): self._handlers[key] = [] # TODO(gabriel): This is a poor substitute for broader handling if not isinstance(handlers, (list, tuple)): raise TypeError("The connection handlers for %s must be a " "list or tuple." % cls) for possible_handler in handlers: if callable(possible_handler): # If it's callable we know the function exists and is valid self._handlers[key].append(possible_handler) continue elif not isinstance(possible_handler, basestring): return TypeError("Connection handlers must be either " "callables or strings.") bits = possible_handler.split(".") if bits[0] == "self": root = self for bit in bits[1:]: try: root = getattr(root, bit) except AttributeError: raise AttributeError("The connection handler %s " "could not be found on %s." % (possible_handler, cls)) handler = root elif len(bits) == 1: # Import by name from local module not supported raise ValueError("Importing a local function as a string " "is not supported for the connection " "handler %s on %s." % (possible_handler, cls)) else: # Try a general import module_name = ".".join(bits[:-1]) try: mod = import_module(module_name) handler = getattr(mod, bits[-1]) except __HOLE__: raise ImportError("Could not import %s from the " "module %s as a connection " "handler on %s." % (bits[-1], module_name, cls)) except AttributeError: raise AttributeError("Could not import %s from the " "module %s as a connection " "handler on %s." % (bits[-1], module_name, cls)) self._handlers[key].append(handler)
ImportError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/horizon/workflows/base.py/Step.__init__
5,440
def _order_steps(self): steps = list(copy.copy(self.default_steps)) additional = self._registry.keys() for step in additional: try: min_pos = steps.index(step.after) except __HOLE__: min_pos = 0 try: max_pos = steps.index(step.before) except ValueError: max_pos = len(steps) if min_pos > max_pos: raise exceptions.WorkflowError("The step %(new)s can't be " "placed between the steps " "%(after)s and %(before)s; the " "step %(before)s comes before " "%(after)s." % {"new": additional, "after": step.after, "before": step.before}) steps.insert(max_pos, step) return steps
ValueError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/horizon/workflows/base.py/Workflow._order_steps
5,441
@classmethod def unregister(cls, step_class): """Unregisters a :class:`~horizon.workflows.Step` from the workflow. """ try: cls._cls_registry.remove(step_class) except __HOLE__: raise base.NotRegistered('%s is not registered' % cls) return cls._unregister(step_class)
KeyError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/horizon/workflows/base.py/Workflow.unregister
5,442
def add_resource_path_alias(self, alias_resource_path, existing_resource_path): """Add resource path alias. Once added, request to alias_resource_path would be handled by handler registered for existing_resource_path. Args: alias_resource_path: alias resource path existing_resource_path: existing resource path """ try: handler_suite = self._handler_suite_map[existing_resource_path] self._handler_suite_map[alias_resource_path] = handler_suite except __HOLE__: raise DispatchException('No handler for: %r' % existing_resource_path)
KeyError
dataset/ETHPy150Open google/pywebsocket/mod_pywebsocket/dispatch.py/Dispatcher.add_resource_path_alias
5,443
@property def json(self): """ Return deserialized JSON body, if one included in the output and is parseable. """ if not hasattr(self, '_json'): self._json = None # De-serialize JSON body if possible. if COLOR in self: # Colorized output cannot be parsed. pass elif self.strip().startswith('{'): # Looks like JSON body. self._json = json.loads(self) elif (self.count('Content-Type:') == 1 and 'application/json' in self): # Looks like a whole JSON HTTP message, # try to extract its body. try: j = self.strip()[self.strip().rindex('\r\n\r\n'):] except ValueError: pass else: try: self._json = json.loads(j) except __HOLE__: pass return self._json
ValueError
dataset/ETHPy150Open jkbrzt/httpie/tests/utils.py/StrCLIResponse.json
5,444
def http(*args, **kwargs): # noinspection PyUnresolvedReferences """ Run HTTPie and capture stderr/out and exit status. Invoke `httpie.core.main()` with `args` and `kwargs`, and return a `CLIResponse` subclass instance. The return value is either a `StrCLIResponse`, or `BytesCLIResponse` if unable to decode the output. The response has the following attributes: `stdout` is represented by the instance itself (print r) `stderr`: text written to stderr `exit_status`: the exit status `json`: decoded JSON (if possible) or `None` Exceptions are propagated. If you pass ``error_exit_ok=True``, then error exit statuses won't result into an exception. Example: $ http --auth=user:password GET httpbin.org/basic-auth/user/password >>> httpbin = getfixture('httpbin') >>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw') >>> type(r) == StrCLIResponse True >>> r.exit_status 0 >>> r.stderr '' >>> 'HTTP/1.1 200 OK' in r True >>> r.json == {'authenticated': True, 'user': 'user'} True """ error_exit_ok = kwargs.pop('error_exit_ok', False) env = kwargs.get('env') if not env: env = kwargs['env'] = TestEnvironment() stdout = env.stdout stderr = env.stderr args = list(args) args_with_config_defaults = args + env.config.default_options add_to_args = [] if '--debug' not in args_with_config_defaults: if '--traceback' not in args_with_config_defaults: add_to_args.append('--traceback') if not any('--timeout' in arg for arg in args_with_config_defaults): add_to_args.append('--timeout=3') args = add_to_args + args def dump_stderr(): stderr.seek(0) sys.stderr.write(stderr.read()) try: try: exit_status = main(args=args, **kwargs) if '--download' in args: # Let the progress reporter thread finish. time.sleep(.5) except __HOLE__: if error_exit_ok: exit_status = ExitStatus.ERROR else: dump_stderr() raise except Exception: stderr.seek(0) sys.stderr.write(stderr.read()) raise else: if not error_exit_ok and exit_status != ExitStatus.OK: dump_stderr() raise ExitStatusError( 'httpie.core.main() unexpectedly returned' ' a non-zero exit status: {0} ({1})'.format( exit_status, EXIT_STATUS_LABELS[exit_status] ) ) stdout.seek(0) stderr.seek(0) output = stdout.read() try: output = output.decode('utf8') except UnicodeDecodeError: # noinspection PyArgumentList r = BytesCLIResponse(output) else: # noinspection PyArgumentList r = StrCLIResponse(output) r.stderr = stderr.read() r.exit_status = exit_status if r.exit_status != ExitStatus.OK: sys.stderr.write(r.stderr) return r finally: stdout.close() stderr.close() env.cleanup()
SystemExit
dataset/ETHPy150Open jkbrzt/httpie/tests/utils.py/http
5,445
def parse(self, img_info_output): """Returns dictionary based on human-readable output from `qemu-img info` Known problem: breaks if path contains opening parenthesis `(` or colon `:`""" result = {} for l in img_info_output.split('\n'): if not l.strip(): continue try: name, value = l.split(':', 1) except __HOLE__: continue name = name.strip() if name == 'backing file': file_end = value.find('(') if file_end == -1: file_end = len(value) result['backing-filename'] = value[:file_end].strip() elif name == 'file format': result['format'] = value.strip() return result
ValueError
dataset/ETHPy150Open MirantisWorkloadMobility/CloudFerry/cloudferry/lib/utils/qemu_img.py/TextQemuImgInfoParser.parse
5,446
def parse(self, img_info_output): try: return json.loads(img_info_output) except __HOLE__: LOG.debug('Unable to convert json data: %s', img_info_output) return {}
TypeError
dataset/ETHPy150Open MirantisWorkloadMobility/CloudFerry/cloudferry/lib/utils/qemu_img.py/JsonQemuImgInfoParser.parse
5,447
def get_auth_url(): ''' Try and get the URL from the config, else return localhost ''' try: return __opts__['keystone.auth_url'] except __HOLE__: return 'http://localhost:35357/v2.0'
KeyError
dataset/ETHPy150Open saltstack/salt/salt/auth/keystone.py/get_auth_url
5,448
def add(self, read): if self.tag in ['LENGTH', 'LEN']: val = len(read.seq) elif self.tag == 'MAPQ': val = read.mapq elif self.tag == 'MISMATCH': val = read_calc_mismatches(read) else: try: val = read.opt(self.tag) except __HOLE__: self.missing += 1 return if not val in self.bins: self.bins[val] = 0 self._keys.append(val) self.bins[val] += 1 if not self._min or self._min > val: self._min = val if not self._max or self._max < val: self._max = val
KeyError
dataset/ETHPy150Open ngsutils/ngsutils/ngsutils/bam/stats.py/FeatureBin.add
5,449
def __init__(self, bamfile, gtf=None, region=None, delim=None, tags=[], show_all=False): regiontagger = None flag_counts = FlagCounts() ref = None start = None end = None if gtf: regiontagger = RegionTagger(gtf, bamfile.references, only_first_fragment=True) if region: ref, startend = region.rsplit(':', 1) if '-' in startend: start, end = [int(x) for x in startend.split('-')] start = start - 1 sys.stderr.write('Region: %s:%s-%s\n' % (ref, start + 1, end)) else: start = int(startend) - 1 end = int(startend) sys.stderr.write('Region: %s:%s\n' % (ref, start + 1)) total = 0 mapped = 0 unmapped = 0 tlen_counts = {} names = set() refs = {} tagbins = {} for tag in tags: tagbins[tag] = FeatureBin(tag) for rname in bamfile.references: if delim: refs[rname.split(delim)[0]] = 0 else: refs[rname] = 0 # setup region or whole-file readers def _foo1(): for read in bamfile.fetch(ref, start, end): yield read def _foo2(): for read in bam_iter(bamfile): yield read if region: read_gen = _foo1 else: read_gen = _foo2 has_ih = True has_nh = True try: for read in read_gen(): if not show_all and read.is_paired and not read.is_read1: # only operate on the first fragment continue try: if has_ih and read.opt('IH') > 1: if read.qname in names: # reads only count once for this... continue names.add(read.qname) except KeyError: if not read.is_unmapped: has_ih = False #missing IH tag - ignore pass try: if has_nh and read.opt('NH') > 1: if read.qname in names: # reads only count once for this... continue names.add(read.qname) except __HOLE__: if not read.is_unmapped: has_nh = False #missing NH tag - ignore pass flag_counts.add(read.flag) total += 1 if read.is_unmapped: unmapped += 1 continue mapped += 1 if read.is_proper_pair and read.tid == read.mrnm: # we don't care about reads that don't map to the same reference # note: this doesn't work for RNA mapped to a reference genome... # for RNA, you'd need to map to a transcript library (refseq) to get # an accurate template length # # just skipping 'N' cigar values won't cut it either... since the pairs # will likely silently span a gap. if read.is_reverse: k = -read.tlen else: k = read.tlen if not k in tlen_counts: tlen_counts[k] = 1 else: tlen_counts[k] += 1 if delim: refs[bamfile.getrname(read.rname).split(delim)[0]] += 1 else: refs[bamfile.getrname(read.rname)] += 1 if regiontagger: regiontagger.add_read(read, bamfile.getrname(read.rname)) for tag in tagbins: tagbins[tag].add(read) except KeyboardInterrupt: sys.stderr.write('*** Interrupted - displaying stats up to this point! ***\n\n') self.total = total self.mapped = mapped self.unmapped = unmapped self.flag_counts = flag_counts self.tagbins = tagbins self.refs = refs self.regiontagger = regiontagger self.tlen_counts = tlen_counts
KeyError
dataset/ETHPy150Open ngsutils/ngsutils/ngsutils/bam/stats.py/BamStats.__init__
5,450
def bam_stats(infiles, gtf_file=None, region=None, delim=None, tags=[], show_all=False, fillin_stats=True): if gtf_file: gtf = GTF(gtf_file) else: gtf = None sys.stderr.write('Calculating Read stats...\n') stats = [BamStats(bam_open(x), gtf, region, delim, tags, show_all=show_all) for x in infiles] sys.stdout.write('\t') for fname, stat in zip(infiles, stats): sys.stdout.write('%s\t\t' % fname) sys.stdout.write('\n') sys.stdout.write('Reads:\t') for stat in stats: sys.stdout.write('%s\t\t' % stat.total) sys.stdout.write('\n') sys.stdout.write('Mapped:\t') for stat in stats: sys.stdout.write('%s\t\t' % stat.mapped) sys.stdout.write('\n') sys.stdout.write('Unmapped:\t') for stat in stats: sys.stdout.write('%s\t\t' % stat.unmapped) sys.stdout.write('\n') sys.stdout.write('\nFlag distribution\n') validflags = set() maxsize = 0 for flag in flag_descriptions: for stat in stats: if stat.flag_counts.counts[flag] > 0: validflags.add(flag) maxsize = max(maxsize, len(flag_descriptions[flag])) for flag in sorted(validflags): sys.stdout.write("[0x%03x] %-*s" % (flag, maxsize, flag_descriptions[flag])) for stat in stats: sys.stdout.write('\t%s\t%0.2f%%' % (stat.flag_counts.counts[flag], (float(stat.flag_counts.counts[flag]) * 100 / stat.total))) sys.stdout.write('\n') sys.stdout.write('\n') if stats[0].tlen_counts: sys.stdout.write('Template length:') for stat in stats: mean, stdev = counts_mean_stdev(stat.tlen_counts) sys.stdout.write('\t%0.2f\t+/- %0.2f' % (mean, stdev)) sys.stdout.write('\n') sys.stdout.write('\n') stat_tags = {} for tag in stats[0].tagbins: stat_tags[tag] = [] for stat in stats: stat_tags[tag].append(stat.tagbins[tag]) for tag in stat_tags: asc = stats[0].tagbins[tag].asc sys.stdout.write("Ave %s:" % tag) for i, tagbin in enumerate(stat_tags[tag]): sys.stdout.write('\t%s' % tagbin.mean) if i != len(stats): sys.stdout.write('\t') sys.stdout.write('\n') sys.stdout.write("Max %s:" % tag) for i, tagbin in enumerate(stat_tags[tag]): sys.stdout.write('\t%s' % tagbin.max) if i != len(stats): sys.stdout.write('\t') sys.stdout.write('\n') sys.stdout.write('%s distribution:\n' % tag) gens = [] gen_vals = [] last_pcts = [] for stat in stats: gens.append(stat.distribution_gen(tag)) gen_vals.append(None) last_pcts.append(0.0) good = True last = None while good: good = False for i, stat in enumerate(stats): if not gen_vals[i]: try: gen_vals[i] = gens[i].next() except __HOLE__: pass vals = [tup[0] for tup in gen_vals if tup] if not vals: continue if asc: minval = min(vals) else: minval = max(vals) if last and type(last) == int and fillin_stats: if asc: last += 1 # fill in missing values while last < minval: sys.stdout.write('%s' % last) for i, stat in enumerate(stats): sys.stdout.write('\t0\t%s' % last_pcts[i]) sys.stdout.write('\n') last += 1 else: last -= 1 # fill in missing values while last > minval: sys.stdout.write('%s' % last) for i, stat in enumerate(stats): sys.stdout.write('\t0\t%s' % last_pcts[i]) sys.stdout.write('\n') last -= 1 last = minval sys.stdout.write(str(minval)) for i, tup in enumerate(gen_vals): if tup and tup[0] == minval: sys.stdout.write('\t%s\t%s' % (tup[1], tup[2])) last_pcts[i] = tup[2] gen_vals[i] = None good = True else: sys.stdout.write('\t0\t%s' % (last_pcts[i])) sys.stdout.write('\n') sys.stdout.write('\n') sys.stdout.write('Reference counts') for stat in stats: sys.stdout.write('\tcount\t') sys.stdout.write('\n') for k in sorted([x for x in stats[0].refs]): sys.stdout.write('%s' % k) for stat in stats: sys.stdout.write('\t%s\t' % stat.refs[k]) sys.stdout.write('\n') if gtf_file: sys.stdout.write('Mapping regions') for stat in stats: sys.stdout.write('\tcount\tCPM') sys.stdout.write('\n') sorted_keys = [x for x in stats[0].regiontagger.counts] sorted_keys.sort() for k in sorted_keys: sys.stdout.write('%s' % k) for stat in stats: sys.stdout.write('\t%s\t%s' % (stat.regiontagger.counts[k], float(stat.regiontagger.counts[k]) / stat.mapped / 1000000)) sys.stdout.write('\n')
StopIteration
dataset/ETHPy150Open ngsutils/ngsutils/ngsutils/bam/stats.py/bam_stats
5,451
def mkdir(path): try: os.mkdir(path) except __HOLE__: pass
OSError
dataset/ETHPy150Open lisa-lab/pylearn2/doc/scripts/docgen.py/mkdir
5,452
def poll_results_check(self): """Check the polling results by checking to see if the stats queue is empty. If it is not, try and collect stats. If it is set a timer to call ourselves in _POLL_RESULTS_INTERVAL. """ LOGGER.debug('Checking for poll results') while True: try: stats = self.stats_queue.get(False) except queue.Empty: break try: self.poll_data['processes'].remove(stats['name']) except __HOLE__: pass self.collect_results(stats) if self.poll_data['processes']: LOGGER.warning('Did not receive results from %r', self.poll_data['processes'])
ValueError
dataset/ETHPy150Open gmr/rejected/rejected/mcp.py/MasterControlProgram.poll_results_check
5,453
def remove_consumer_process(self, consumer, name): """Remove all details for the specified consumer and process name. :param str consumer: The consumer name :param str name: The process name """ my_pid = os.getpid() for conn in self.consumers[consumer].connections: if name in self.consumers[consumer].connections[conn]: self.consumers[consumer].connections[conn].remove(name) if name in self.consumers[consumer].processes: child = self.consumers[consumer].processes[name] if child.is_alive(): if child.pid != my_pid: try: child.terminate() except __HOLE__: pass else: LOGGER.debug('Child has my pid? %r, %r', my_pid, child.pid) del self.consumers[consumer].processes[name]
OSError
dataset/ETHPy150Open gmr/rejected/rejected/mcp.py/MasterControlProgram.remove_consumer_process
5,454
def stop_processes(self): """Iterate through all of the consumer processes shutting them down.""" self.set_state(self.STATE_SHUTTING_DOWN) LOGGER.info('Stopping consumer processes') signal.signal(signal.SIGABRT, signal.SIG_IGN) signal.signal(signal.SIGALRM, signal.SIG_IGN) signal.signal(signal.SIGCHLD, signal.SIG_IGN) signal.signal(signal.SIGPROF, signal.SIG_IGN) signal.setitimer(signal.ITIMER_REAL, 0, 0) # Send SIGABRT LOGGER.info('Sending SIGABRT to active children') for proc in multiprocessing.active_children(): if int(proc.pid) != os.getpid(): os.kill(int(proc.pid), signal.SIGABRT) # Wait for them to finish up to MAX_SHUTDOWN_WAIT iterations = 0 processes = self.total_process_count while processes: LOGGER.info('Waiting on %i active processes to shut down', processes) try: time.sleep(0.5) except __HOLE__: LOGGER.info('Caught CTRL-C, Killing Children') self.kill_processes() self.set_state(self.STATE_STOPPED) return iterations += 1 if iterations == self.MAX_SHUTDOWN_WAIT: self.kill_processes() break processes = self.total_process_count LOGGER.debug('All consumer processes stopped') self.set_state(self.STATE_STOPPED)
KeyboardInterrupt
dataset/ETHPy150Open gmr/rejected/rejected/mcp.py/MasterControlProgram.stop_processes
5,455
@staticmethod def parse(value): """ Parse ``Accept-*`` style header. Return iterator of ``(value, quality)`` pairs. ``quality`` defaults to 1. """ for match in part_re.finditer(','+value): name = match.group(1) if name == 'q': continue quality = match.group(2) or '' if quality: try: quality = max(min(float(quality), 1), 0) yield (name, quality) continue except __HOLE__: pass yield (name, 1)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webob-1.1.1/webob/acceptparse.py/Accept.parse
5,456
@staticmethod def parse(value): for mask, q in Accept.parse(value): try: mask_major, mask_minor = mask.split('/') except __HOLE__: continue if mask_major == '*' and mask_minor != '*': continue yield (mask, q)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webob-1.1.1/webob/acceptparse.py/MIMEAccept.parse
5,457
def collect_error_snapshots(): """Scheduled task to collect error snapshots from files and push into Error Snapshot table""" if frappe.conf.disable_error_snapshot: return try: path = get_error_snapshot_path() if not os.path.exists(path): return for fname in os.listdir(path): fullpath = os.path.join(path, fname) try: with open(fullpath, 'rb') as filedata: data = json.load(filedata) except __HOLE__: # empty file os.remove(fullpath) continue for field in ['locals', 'exception', 'frames']: data[field] = frappe.as_json(data[field]) doc = frappe.new_doc('Error Snapshot') doc.update(data) doc.save() frappe.db.commit() os.remove(fullpath) clear_old_snapshots() except Exception as e: make_error_snapshot(e) # prevent creation of unlimited error snapshots raise
ValueError
dataset/ETHPy150Open frappe/frappe/frappe/utils/error.py/collect_error_snapshots
5,458
@responses.activate def test_retrier_does_not_catch_unwanted_exception(self): # Prepare client = HubstorageClient(auth=self.auth, endpoint=self.endpoint, max_retries=2, max_retry_time=1) job_metadata = {'project': self.projectid, 'spider': self.spidername, 'state': 'pending'} callback, attempts_count = self.make_request_callback(3, job_metadata, http_error_status=403) self.mock_api(callback=callback) # Act job, metadata, err = None, None, None try: job = client.get_job('%s/%s/%s' % (self.projectid, self.spiderid, 42)) metadata = dict(job.metadata) except __HOLE__ as e: err = e # Assert self.assertIsNone(metadata) self.assertIsNotNone(err) self.assertEqual(err.response.status_code, 403) self.assertEqual(attempts_count[0], 1)
HTTPError
dataset/ETHPy150Open scrapinghub/python-hubstorage/tests/test_retry.py/RetryTest.test_retrier_does_not_catch_unwanted_exception
5,459
@responses.activate def test_api_delete_can_be_set_to_non_idempotent(self): # Prepare client = HubstorageClient(auth=self.auth, endpoint=self.endpoint, max_retries=3, max_retry_time=1) job_metadata = {'project': self.projectid, 'spider': self.spidername, 'state': 'pending'} callback_delete, attempts_count_delete = self.make_request_callback(2, job_metadata) self.mock_api(method=DELETE, callback=callback_delete) # Act job = client.get_job('%s/%s/%s' % (self.projectid, self.spiderid, 42)) err = None try: job.metadata.apidelete('/my/non/idempotent/delete/', is_idempotent=False) except __HOLE__ as e: err = e # Assert self.assertEqual(attempts_count_delete[0], 1) self.assertIsNotNone(err)
HTTPError
dataset/ETHPy150Open scrapinghub/python-hubstorage/tests/test_retry.py/RetryTest.test_api_delete_can_be_set_to_non_idempotent
5,460
@responses.activate def test_push_job_does_not_retry(self): # Prepare client = HubstorageClient(auth=self.auth, endpoint=self.endpoint, max_retries=3) callback, attempts_count = self.make_request_callback(2, {'key': '1/2/3'}) self.mock_api(POST, callback=callback) # Act job, err = None, None try: job = client.push_job(self.projectid, self.spidername) except __HOLE__ as e: err = e # Assert self.assertIsNone(job) self.assertIsNotNone(err) self.assertEqual(err.response.status_code, 504) self.assertEqual(attempts_count[0], 1)
HTTPError
dataset/ETHPy150Open scrapinghub/python-hubstorage/tests/test_retry.py/RetryTest.test_push_job_does_not_retry
5,461
@responses.activate def test_get_job_does_fails_if_no_retries(self): # Prepare client = HubstorageClient(auth=self.auth, endpoint=self.endpoint, max_retries=0) job_metadata = {'project': self.projectid, 'spider': self.spidername, 'state': 'pending'} callback, attempts_count = self.make_request_callback(2, job_metadata) self.mock_api(callback=callback) # Act job, metadata, err = None, None, None try: job = client.get_job('%s/%s/%s' % (self.projectid, self.spiderid, 42)) metadata = dict(job.metadata) except __HOLE__ as e: err = e # Assert self.assertIsNone(metadata) self.assertIsNotNone(err) self.assertEqual(err.response.status_code, 504) self.assertEqual(attempts_count[0], 1)
HTTPError
dataset/ETHPy150Open scrapinghub/python-hubstorage/tests/test_retry.py/RetryTest.test_get_job_does_fails_if_no_retries
5,462
@responses.activate def test_get_job_does_fails_on_too_many_retries(self): # Prepare client = HubstorageClient(auth=self.auth, endpoint=self.endpoint, max_retries=2, max_retry_time=1) job_metadata = {'project': self.projectid, 'spider': self.spidername, 'state': 'pending'} callback, attempts_count = self.make_request_callback(3, job_metadata) self.mock_api(callback=callback) # Act job, metadata, err = None, None, None try: job = client.get_job('%s/%s/%s' % (self.projectid, self.spiderid, 42)) metadata = dict(job.metadata) except __HOLE__ as e: err = e # Assert self.assertIsNone(metadata) self.assertIsNotNone(err) self.assertEqual(err.response.status_code, 504) self.assertEqual(attempts_count[0], 3)
HTTPError
dataset/ETHPy150Open scrapinghub/python-hubstorage/tests/test_retry.py/RetryTest.test_get_job_does_fails_on_too_many_retries
5,463
def import_module(name, deprecated=False): """Import and return the module to be tested, raising SkipTest if it is not available. If deprecated is True, any module or package deprecation messages will be suppressed.""" with _ignore_deprecated_imports(deprecated): try: return importlib.import_module(name) except __HOLE__ as msg: raise unittest.SkipTest(str(msg))
ImportError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/import_module
5,464
def _save_and_block_module(name, orig_modules): """Helper function to save and block a module in sys.modules Return True if the module was in sys.modules, False otherwise. """ saved = True try: orig_modules[name] = sys.modules[name] except __HOLE__: saved = False sys.modules[name] = None return saved
KeyError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/_save_and_block_module
5,465
def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): """Import and return a module, deliberately bypassing sys.modules. This function imports and returns a fresh copy of the named Python module by removing the named module from sys.modules before doing the import. Note that unlike reload, the original module is not affected by this operation. *fresh* is an iterable of additional module names that are also removed from the sys.modules cache before doing the import. *blocked* is an iterable of module names that are replaced with None in the module cache during the import to ensure that attempts to import them raise ImportError. The named module and any modules named in the *fresh* and *blocked* parameters are saved before starting the import and then reinserted into sys.modules when the fresh import is complete. Module and package deprecation messages are suppressed during this import if *deprecated* is True. This function will raise ImportError if the named module cannot be imported. If deprecated is True, any module or package deprecation messages will be suppressed. """ # NOTE: test_heapq, test_json and test_warnings include extra sanity checks # to make sure that this utility function is working as expected with _ignore_deprecated_imports(deprecated): # Keep track of modules saved for later restoration as well # as those which just need a blocking entry removed orig_modules = {} names_to_remove = [] _save_and_remove_module(name, orig_modules) try: for fresh_name in fresh: _save_and_remove_module(fresh_name, orig_modules) for blocked_name in blocked: if not _save_and_block_module(blocked_name, orig_modules): names_to_remove.append(blocked_name) fresh_module = importlib.import_module(name) except __HOLE__: fresh_module = None finally: for orig_name, module in orig_modules.items(): sys.modules[orig_name] = module for name_to_remove in names_to_remove: del sys.modules[name_to_remove] return fresh_module
ImportError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/import_fresh_module
5,466
def get_attribute(obj, name): """Get an attribute, raising SkipTest if AttributeError is raised.""" try: attribute = getattr(obj, name) except __HOLE__: raise unittest.SkipTest("object %r has no attribute %r" % (obj, name)) else: return attribute
AttributeError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/get_attribute
5,467
def unload(name): try: del sys.modules[name] except __HOLE__: pass
KeyError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/unload
5,468
def unlink(filename): try: _unlink(filename) except __HOLE__ as error: # The filename need not exist. if error.errno not in (errno.ENOENT, errno.ENOTDIR): raise
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/unlink
5,469
def rmdir(dirname): try: _rmdir(dirname) except __HOLE__ as error: # The directory need not exist. if error.errno != errno.ENOENT: raise
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/rmdir
5,470
def rmtree(path): try: _rmtree(path) except __HOLE__ as error: if error.errno != errno.ENOENT: raise
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/rmtree
5,471
def _requires_unix_version(sysname, min_version): """Decorator raising SkipTest if the OS is `sysname` and the version is less than `min_version`. For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if the FreeBSD version is less than 7.2. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): if platform.system() == sysname: version_txt = platform.release().split('-', 1)[0] try: version = tuple(map(int, version_txt.split('.'))) except __HOLE__: pass else: if version < min_version: min_version_txt = '.'.join(map(str, min_version)) raise unittest.SkipTest( "%s version %s or higher required, not %s" % (sysname, min_version_txt, version_txt)) return func(*args, **kw) wrapper.min_version = min_version return wrapper return decorator
ValueError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/_requires_unix_version
5,472
def requires_mac_ver(*min_version): """Decorator raising SkipTest if the OS is Mac OS X and the OS X version if less than min_version. For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version is lesser than 10.5. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): if sys.platform == 'darwin': version_txt = platform.mac_ver()[0] try: version = tuple(map(int, version_txt.split('.'))) except __HOLE__: pass else: if version < min_version: min_version_txt = '.'.join(map(str, min_version)) raise unittest.SkipTest( "Mac OS X %s or higher required, not %s" % (min_version_txt, version_txt)) return func(*args, **kw) wrapper.min_version = min_version return wrapper return decorator # Don't use "localhost", since resolving it uses the DNS under recent # Windows versions (see issue #18792).
ValueError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/requires_mac_ver
5,473
@contextlib.contextmanager def temp_cwd(name='tempcwd', quiet=False, path=None): """ Context manager that temporarily changes the CWD. An existing path may be provided as *path*, in which case this function makes no changes to the file system. Otherwise, the new CWD is created in the current directory and it's named *name*. If *quiet* is False (default) and it's not possible to create or change the CWD, an error is raised. If it's True, only a warning is raised and the original CWD is used. """ saved_dir = os.getcwd() is_temporary = False if path is None: path = name try: os.mkdir(name) is_temporary = True except __HOLE__: if not quiet: raise warnings.warn('tests may fail, unable to create temp CWD ' + name, RuntimeWarning, stacklevel=3) try: os.chdir(path) except OSError: if not quiet: raise warnings.warn('tests may fail, unable to change the CWD to ' + path, RuntimeWarning, stacklevel=3) try: yield os.getcwd() finally: os.chdir(saved_dir) if is_temporary: rmtree(name)
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/temp_cwd
5,474
@contextlib.contextmanager def transient_internet(resource_name, timeout=30.0, errnos=()): """Return a context manager that raises ResourceDenied when various issues with the Internet connection manifest themselves as exceptions.""" default_errnos = [ ('ECONNREFUSED', 111), ('ECONNRESET', 104), ('EHOSTUNREACH', 113), ('ENETUNREACH', 101), ('ETIMEDOUT', 110), ] default_gai_errnos = [ ('EAI_AGAIN', -3), ('EAI_FAIL', -4), ('EAI_NONAME', -2), ('EAI_NODATA', -5), # Encountered when trying to resolve IPv6-only hostnames ('WSANO_DATA', 11004), ] denied = ResourceDenied("Resource %r is not available" % resource_name) captured_errnos = errnos gai_errnos = [] if not captured_errnos: captured_errnos = [getattr(errno, name, num) for (name, num) in default_errnos] gai_errnos = [getattr(socket, name, num) for (name, num) in default_gai_errnos] def filter_error(err): n = getattr(err, 'errno', None) if (isinstance(err, socket.timeout) or (isinstance(err, socket.gaierror) and n in gai_errnos) or n in captured_errnos): if not verbose: sys.stderr.write(denied.args[0] + "\n") # Was: raise denied from err # For Python-Future: exc = denied exc.__cause__ = err raise exc old_timeout = socket.getdefaulttimeout() try: if timeout is not None: socket.setdefaulttimeout(timeout) yield except __HOLE__ as err: # urllib can wrap original socket errors multiple times (!), we must # unwrap to get at the original error. while True: a = err.args if len(a) >= 1 and isinstance(a[0], IOError): err = a[0] # The error can also be wrapped as args[1]: # except socket.error as msg: # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) elif len(a) >= 2 and isinstance(a[1], IOError): err = a[1] else: break filter_error(err) raise # XXX should we catch generic exceptions and look for their # __cause__ or __context__? finally: socket.setdefaulttimeout(old_timeout)
IOError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/transient_internet
5,475
def run_with_locale(catstr, *locales): def decorator(func): def inner(*args, **kwds): try: import locale category = getattr(locale, catstr) orig_locale = locale.setlocale(category) except __HOLE__: # if the test author gives us an invalid category string raise except: # cannot retrieve original locale, so do nothing locale = orig_locale = None else: for loc in locales: try: locale.setlocale(category, loc) break except: pass # now run the function, resetting the locale on exceptions try: return func(*args, **kwds) finally: if locale and orig_locale: locale.setlocale(category, orig_locale) inner.__name__ = func.__name__ inner.__doc__ = func.__doc__ return inner return decorator #======================================================================= # Decorator for running a function in a specific timezone, correctly # resetting it afterwards.
AttributeError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/run_with_locale
5,476
def run_with_tz(tz): def decorator(func): def inner(*args, **kwds): try: tzset = time.tzset except __HOLE__: raise unittest.SkipTest("tzset required") if 'TZ' in os.environ: orig_tz = os.environ['TZ'] else: orig_tz = None os.environ['TZ'] = tz tzset() # now run the function, resetting the tz on exceptions try: return func(*args, **kwds) finally: if orig_tz is None: del os.environ['TZ'] else: os.environ['TZ'] = orig_tz time.tzset() inner.__name__ = func.__name__ inner.__doc__ = func.__doc__ return inner return decorator #======================================================================= # Big-memory-test support. Separate from 'resources' because memory use # should be configurable. # Some handy shorthands. Note that these are used for byte-limits as well # as size-limits, in the various bigmem tests
AttributeError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/run_with_tz
5,477
def start(self): try: f = open(self.procfile, 'r') except __HOLE__ as e: warnings.warn('/proc not available for stats: {0}'.format(e), RuntimeWarning) sys.stderr.flush() return watchdog_script = findfile("memory_watchdog.py") self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], stdin=f, stderr=subprocess.DEVNULL) f.close() self.started = True
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/_MemoryWatchdog.start
5,478
def can_symlink(): global _can_symlink if _can_symlink is not None: return _can_symlink symlink_path = TESTFN + "can_symlink" try: os.symlink(TESTFN, symlink_path) can = True except (__HOLE__, NotImplementedError, AttributeError): can = False else: os.remove(symlink_path) _can_symlink = can return can
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/can_symlink
5,479
def can_xattr(): global _can_xattr if _can_xattr is not None: return _can_xattr if not hasattr(os, "setxattr"): can = False else: tmp_fp, tmp_name = tempfile.mkstemp() try: with open(TESTFN, "wb") as fp: try: # TESTFN & tempfile may use different file systems with # different capabilities os.setxattr(tmp_fp, b"user.test", b"") os.setxattr(fp.fileno(), b"user.test", b"") # Kernels < 2.6.39 don't respect setxattr flags. kernel_version = platform.release() m = re.match("2.6.(\d{1,2})", kernel_version) can = m is None or int(m.group(1)) >= 39 except __HOLE__: can = False finally: unlink(TESTFN) unlink(tmp_name) _can_xattr = can return can
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/can_xattr
5,480
def patch(test_instance, object_to_patch, attr_name, new_value): """Override 'object_to_patch'.'attr_name' with 'new_value'. Also, add a cleanup procedure to 'test_instance' to restore 'object_to_patch' value for 'attr_name'. The 'attr_name' should be a valid attribute for 'object_to_patch'. """ # check that 'attr_name' is a real attribute for 'object_to_patch' # will raise AttributeError if it does not exist getattr(object_to_patch, attr_name) # keep a copy of the old value attr_is_local = False try: old_value = object_to_patch.__dict__[attr_name] except (AttributeError, __HOLE__): old_value = getattr(object_to_patch, attr_name, None) else: attr_is_local = True # restore the value when the test is done def cleanup(): if attr_is_local: setattr(object_to_patch, attr_name, old_value) else: delattr(object_to_patch, attr_name) test_instance.addCleanup(cleanup) # actually override the attribute setattr(object_to_patch, attr_name, new_value)
KeyError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/future/backports/test/support.py/patch
5,481
def _IndexedScan(self, i, max_records=None): """Scan records starting with index i.""" if not self._index: self._ReadIndex() # The record number that we will read next. idx = 0 # The timestamp that we will start reading from. start_ts = 0 if i >= self._max_indexed: start_ts = max( (0, 0), (self._index[self._max_indexed][0], self._index[self._max_indexed][1] - 1)) idx = self._max_indexed else: try: possible_idx = i - i % self.INDEX_SPACING start_ts = (max(0, self._index[possible_idx][0]), self._index[possible_idx][1] - 1) idx = possible_idx except __HOLE__: pass if max_records is not None: max_records += i - idx for (ts, value) in self.Scan(after_timestamp=start_ts, max_records=max_records, include_suffix=True): self._MaybeWriteIndex(idx, ts) if idx >= i: yield (idx, ts, value) idx += 1
KeyError
dataset/ETHPy150Open google/grr/grr/lib/aff4_objects/sequential_collection.py/IndexedSequentialCollection._IndexedScan
5,482
def decode(self, payload): try: self.reason = ord(payload[0]) if self.reason == 1: self.data = ord(payload[1]) elif self.reason == 2: self.data = struct.unpack(">L", payload[1:])[0] elif self.reason == 3: self.data = ord(payload[1]) else: log.warning("unknown error reason value {0}" .format(self.reason)) except (__HOLE__, struct.error): raise DecodeError("non matching error reason and data")
TypeError
dataset/ETHPy150Open javgh/greenaddress-pos-tools/nfc/ndef/handover.py/HandoverError.decode
5,483
def encode(self): try: payload = chr(self.reason) except __HOLE__: raise EncodeError("error reason out of limits") try: if self.reason == 1: payload += chr(self.data) elif self.reason == 2: payload += struct.pack(">L", self.data) elif self.reason == 3: payload += chr(self.data) else: raise EncodeError("reserved error reason %d" % self.reason) except (TypeError, struct.error): raise EncodeError("invalid data for error reason %d" % self.reason) return payload #---------------------------------------------------------------------- Version
ValueError
dataset/ETHPy150Open javgh/greenaddress-pos-tools/nfc/ndef/handover.py/HandoverError.encode
5,484
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None): """ Connect receiver to sender for signal. Arguments: receiver A function or an instance method which is to receive signals. Receivers must be hashable objects. If weak is True, then receiver must be weak-referencable (more precisely saferef.safeRef() must be able to create a reference to the receiver). Receivers must be able to accept keyword arguments. If receivers have a dispatch_uid attribute, the receiver will not be added if another receiver already exists with that dispatch_uid. sender The sender to which the receiver should respond. Must either be of type Signal, or None to receive events from any sender. weak Whether to use weak references to the receiver. By default, the module will attempt to use weak references to the receiver objects. If this parameter is false, then strong references will be used. dispatch_uid An identifier used to uniquely identify a particular instance of a receiver. This will usually be a string, though it may be anything hashable. """ from django.conf import settings # If DEBUG is on, check that we got a good receiver if settings.DEBUG: import inspect assert callable(receiver), "Signal receivers must be callable." # Check for **kwargs # Not all callables are inspectable with getargspec, so we'll # try a couple different ways but in the end fall back on assuming # it is -- we don't want to prevent registration of valid but weird # callables. try: argspec = inspect.getargspec(receiver) except __HOLE__: try: argspec = inspect.getargspec(receiver.__call__) except (TypeError, AttributeError): argspec = None if argspec: assert argspec[2] is not None, \ "Signal receivers must accept keyword arguments (**kwargs)." if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) if weak: receiver = saferef.safeRef(receiver, onDelete=self._remove_receiver) self.lock.acquire() try: for r_key, _ in self.receivers: if r_key == lookup_key: break else: self.receivers.append((lookup_key, receiver)) finally: self.lock.release()
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/dispatch/dispatcher.py/Signal.connect
5,485
@property def tables_involved(self): """A rreally ather rudimentary way to work out tables involved in a query. TODO: Can probably parse the SQL using sqlparse etc and pull out table info that way?""" components = [x.strip() for x in self.query.split()] tables = [] for idx, component in enumerate(components): # TODO: If django uses aliases on column names they will be falsely identified as tables... if component.lower() == 'from' or component.lower() == 'join' or component.lower() == 'as': try: _next = components[idx + 1] if not _next.startswith('('): # Subquery stripped = _next.strip().strip(',') if stripped: tables.append(stripped) except __HOLE__: # Reach the end pass return tables
IndexError
dataset/ETHPy150Open django-silk/silk/silk/models.py/SQLQuery.tables_involved
5,486
def _get_system_paths(executable): """Return lists of standard lib and site paths for executable. """ # We want to get a list of the site packages, which is not easy. # The canonical way to do this is to use # distutils.sysconfig.get_python_lib(), but that only returns a # single path, which does not reflect reality for many system # Pythons, which have multiple additions. Instead, we start Python # with -S, which does not import site.py and set up the extra paths # like site-packages or (Ubuntu/Debian) dist-packages and # python-support. We then compare that sys.path with the normal one # (minus user packages if this is Python 2.6, because we don't # support those (yet?). The set of the normal one minus the set of # the ones in ``python -S`` is the set of packages that are # effectively site-packages. # # The given executable might not be the current executable, so it is # appropriate to do another subprocess to figure out what the # additional site-package paths are. Moreover, even if this # executable *is* the current executable, this code might be run in # the context of code that has manipulated the sys.path--for # instance, to add local zc.buildout or setuptools eggs. def get_sys_path(*args, **kwargs): cmd = [executable] cmd.extend(args) cmd.extend([ "-c", "import sys, os;" "print repr([os.path.normpath(p) for p in sys.path if p])"]) # Windows needs some (as yet to be determined) part of the real env. env = os.environ.copy() # We need to make sure that PYTHONPATH, which will often be set # to include a custom buildout-generated site.py, is not set, or # else we will not get an accurate sys.path for the executable. env.pop('PYTHONPATH', None) env.update(kwargs) _proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) stdout, stderr = _proc.communicate(); if _proc.returncode: raise RuntimeError( 'error trying to get system packages:\n%s' % (stderr,)) res = eval(stdout.strip()) try: res.remove('.') except __HOLE__: pass return res stdlib = get_sys_path('-S') # stdlib only no_user_paths = get_sys_path(PYTHONNOUSERSITE='x') site_paths = [p for p in no_user_paths if p not in stdlib] return (stdlib, site_paths)
ValueError
dataset/ETHPy150Open moraes/tipfy/manage/easy_install.py/_get_system_paths
5,487
def _get_version(executable): try: return _versions[executable] except __HOLE__: cmd = _safe_arg(executable) + ' -V' p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=not is_win32) i, o = (p.stdin, p.stdout) i.close() version = o.read().strip() o.close() pystring, version = version.split() assert pystring == 'Python' version = re.match('(\d[.]\d)([.].*\d)?$', version).group(1) _versions[executable] = version return version
KeyError
dataset/ETHPy150Open moraes/tipfy/manage/easy_install.py/_get_version
5,488
def _write_script(full_name, contents, logged_type): """Write contents of script in full_name, logging the action. The only tricky bit in this function is that it supports Windows by creating exe files using a pkg_resources helper. """ generated = [] script_name = full_name if is_win32: script_name += '-script.py' # Generate exe file and give the script a magic name. exe = full_name + '.exe' new_data = pkg_resources.resource_string('setuptools', 'cli.exe') if not os.path.exists(exe) or (open(exe, 'rb').read() != new_data): # Only write it if it's different. open(exe, 'wb').write(new_data) generated.append(exe) changed = not (os.path.exists(script_name) and open(script_name).read() == contents) if changed: open(script_name, 'w').write(contents) try: os.chmod(script_name, 0755) except (__HOLE__, os.error): pass logger.info("Generated %s %r.", logged_type, full_name) generated.append(script_name) return generated
AttributeError
dataset/ETHPy150Open moraes/tipfy/manage/easy_install.py/_write_script
5,489
def render(self, context): if not include_is_allowed(self.filepath): if settings.DEBUG: return "[Didn't have permission to include file]" else: return '' # Fail silently for invalid includes. try: fp = open(self.filepath, 'r') output = fp.read() fp.close() except __HOLE__: output = '' if self.parsed: try: t = Template(output, name=self.filepath) return t.render(context) except TemplateSyntaxError, e: if settings.DEBUG: return "[Included template had syntax error: %s]" % e else: return '' # Fail silently for invalid included templates. return output
IOError
dataset/ETHPy150Open dcramer/django-compositepks/django/template/defaulttags.py/SsiNode.render
5,490
def render(self, context): try: value = self.val_expr.resolve(context) maxvalue = self.max_expr.resolve(context) except VariableDoesNotExist: return '' try: value = float(value) maxvalue = float(maxvalue) ratio = (value / maxvalue) * int(self.max_width) except (__HOLE__, ZeroDivisionError): return '' return str(int(round(ratio)))
ValueError
dataset/ETHPy150Open dcramer/django-compositepks/django/template/defaulttags.py/WidthRatioNode.render
5,491
def do_if(parser, token): """ The ``{% if %}`` tag evaluates a variable, and if that variable is "true" (i.e., exists, is not empty, and is not a false boolean value), the contents of the block are output: :: {% if athlete_list %} Number of athletes: {{ athlete_list|count }} {% else %} No athletes. {% endif %} In the above, if ``athlete_list`` is not empty, the number of athletes will be displayed by the ``{{ athlete_list|count }}`` variable. As you can see, the ``if`` tag can take an option ``{% else %}`` clause that will be displayed if the test fails. ``if`` tags may use ``or``, ``and`` or ``not`` to test a number of variables or to negate a given variable:: {% if not athlete_list %} There are no athletes. {% endif %} {% if athlete_list or coach_list %} There are some athletes or some coaches. {% endif %} {% if athlete_list and coach_list %} Both atheletes and coaches are available. {% endif %} {% if not athlete_list or coach_list %} There are no athletes, or there are some coaches. {% endif %} {% if athlete_list and not coach_list %} There are some athletes and absolutely no coaches. {% endif %} ``if`` tags do not allow ``and`` and ``or`` clauses with the same tag, because the order of logic would be ambigous. For example, this is invalid:: {% if athlete_list and coach_list or cheerleader_list %} If you need to combine ``and`` and ``or`` to do advanced logic, just use nested if tags. For example:: {% if athlete_list %} {% if coach_list or cheerleader_list %} We have athletes, and either coaches or cheerleaders! {% endif %} {% endif %} """ bits = token.contents.split() del bits[0] if not bits: raise TemplateSyntaxError("'if' statement requires at least one argument") # Bits now looks something like this: ['a', 'or', 'not', 'b', 'or', 'c.d'] bitstr = ' '.join(bits) boolpairs = bitstr.split(' and ') boolvars = [] if len(boolpairs) == 1: link_type = IfNode.LinkTypes.or_ boolpairs = bitstr.split(' or ') else: link_type = IfNode.LinkTypes.and_ if ' or ' in bitstr: raise TemplateSyntaxError, "'if' tags can't mix 'and' and 'or'" for boolpair in boolpairs: if ' ' in boolpair: try: not_, boolvar = boolpair.split() except __HOLE__: raise TemplateSyntaxError, "'if' statement improperly formatted" if not_ != 'not': raise TemplateSyntaxError, "Expected 'not' in if statement" boolvars.append((True, parser.compile_filter(boolvar))) else: boolvars.append((False, parser.compile_filter(boolpair))) nodelist_true = parser.parse(('else', 'endif')) token = parser.next_token() if token.contents == 'else': nodelist_false = parser.parse(('endif',)) parser.delete_first_token() else: nodelist_false = NodeList() return IfNode(boolvars, nodelist_true, nodelist_false, link_type)
ValueError
dataset/ETHPy150Open dcramer/django-compositepks/django/template/defaulttags.py/do_if
5,492
def widthratio(parser, token): """ For creating bar charts and such, this tag calculates the ratio of a given value to a maximum value, and then applies that ratio to a constant. For example:: <img src='bar.gif' height='10' width='{% widthratio this_value max_value 100 %}' /> Above, if ``this_value`` is 175 and ``max_value`` is 200, the the image in the above example will be 88 pixels wide (because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88). """ bits = token.contents.split() if len(bits) != 4: raise TemplateSyntaxError("widthratio takes three arguments") tag, this_value_expr, max_value_expr, max_width = bits try: max_width = int(max_width) except __HOLE__: raise TemplateSyntaxError("widthratio final argument must be an integer") return WidthRatioNode(parser.compile_filter(this_value_expr), parser.compile_filter(max_value_expr), max_width)
ValueError
dataset/ETHPy150Open dcramer/django-compositepks/django/template/defaulttags.py/widthratio
5,493
@classmethod def get_object(self, desc, value): klass = desc['klass'] attr = desc['attr'] try: if desc['prefetch']: # build up relation cache if klass not in self.relation_cache: self.buildup_relation_cache(klass, attr, value, desc['assign_by_id']) # get the instance out of relation cache inst = self.relation_cache[klass].get(value, None) if inst: return inst raise ObjectDoesNotExist( "%s matching query (%s=%s) does not exist in relation cache." % ( klass.__name__, attr, value)) else: # get the related object out of the DB return klass.objects.get(**{ attr: value }) except __HOLE__ as e: if desc['skip_missing']: return None else: raise
ObjectDoesNotExist
dataset/ETHPy150Open pboehm/django-data-migration/data_migration/migration.py/Migration.get_object
5,494
@classmethod def import_all(self, excludes=[]): """ this does an `from X import *` for all existing migration specs """ for app in self.possible_existing_migrations(): matches = [ ex for ex in excludes if ex in app ] if len(matches) > 0: continue try: m = __import__(app) try: attrlist = m.__all__ except __HOLE__: attrlist = dir(m) for attr in attrlist: globals()[attr] = getattr(m, attr) except ImportError as e: pass
AttributeError
dataset/ETHPy150Open pboehm/django-data-migration/data_migration/migration.py/Importer.import_all
5,495
def _run_job_in_hadoop(self): for step_num in range(self._num_steps()): step_args = self._args_for_step(step_num) # log this *after* _args_for_step(), which can start a search # for the Hadoop streaming jar log.info('Running step %d of %d...' % (step_num + 1, self._num_steps())) log.debug('> %s' % cmd_line(step_args)) log_interpretation = {} self._log_interpretations.append(log_interpretation) # try to use a PTY if it's available try: pid, master_fd = pty.fork() except (AttributeError, __HOLE__): # no PTYs, just use Popen # user won't get much feedback for a while, so tell them # Hadoop is running log.debug('No PTY available, using Popen() to invoke Hadoop') step_proc = Popen(step_args, stdout=PIPE, stderr=PIPE) step_interpretation = _interpret_hadoop_jar_command_stderr( step_proc.stderr, record_callback=_log_record_from_hadoop) # there shouldn't be much output to STDOUT for line in step_proc.stdout: _log_line_from_hadoop(to_string(line).strip('\r\n')) step_proc.stdout.close() step_proc.stderr.close() returncode = step_proc.wait() else: # we have PTYs if pid == 0: # we are the child process os.execvp(step_args[0], step_args) else: log.debug('Invoking Hadoop via PTY') with os.fdopen(master_fd, 'rb') as master: # reading from master gives us the subprocess's # stderr and stdout (it's a fake terminal) step_interpretation = ( _interpret_hadoop_jar_command_stderr( master, record_callback=_log_record_from_hadoop)) _, returncode = os.waitpid(pid, 0) # make sure output_dir is filled if 'output_dir' not in step_interpretation: step_interpretation['output_dir'] = ( self._hdfs_step_output_dir(step_num)) log_interpretation['step'] = step_interpretation counters = self._pick_counters(log_interpretation) if counters: log.info(_format_counters(counters)) else: log.warning('No counters found') if returncode: error = self._pick_error(log_interpretation) if error: log.error('Probable cause of failure:\n\n%s\n' % _format_error(error)) # use CalledProcessError's well-known message format reason = str(CalledProcessError(returncode, step_args)) raise StepFailedException( reason=reason, step_num=step_num, num_steps=self._num_steps())
OSError
dataset/ETHPy150Open Yelp/mrjob/mrjob/hadoop.py/HadoopJobRunner._run_job_in_hadoop
5,496
def get_tokens_unprocessed(self, text, stack=('root',)): """ Split ``text`` into (tokentype, text) pairs. Monkeypatched to store the final stack on the object itself. """ pos = 0 tokendefs = self._tokens if hasattr(self, '_saved_state_stack'): statestack = list(self._saved_state_stack) else: statestack = list(stack) statetokens = tokendefs[statestack[-1]] while 1: for rexmatch, action, new_state in statetokens: m = rexmatch(text, pos) if m: if action is not None: if type(action) is _TokenType: yield pos, action, m.group() else: for item in action(self, m): yield item pos = m.end() if new_state is not None: # state transition if isinstance(new_state, tuple): for state in new_state: if state == '#pop': statestack.pop() elif state == '#push': statestack.append(statestack[-1]) else: statestack.append(state) elif isinstance(new_state, int): # pop del statestack[new_state:] elif new_state == '#push': statestack.append(statestack[-1]) else: assert False, "wrong state def: %r" % new_state statetokens = tokendefs[statestack[-1]] break else: try: if text[pos] == '\n': # at EOL, reset state to "root" pos += 1 statestack = ['root'] statetokens = tokendefs['root'] yield pos, Text, '\n' continue yield pos, Error, text[pos] pos += 1 except __HOLE__: break self._saved_state_stack = list(statestack) # Monkeypatch!
IndexError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/modes/pygments_sh.py/get_tokens_unprocessed
5,497
def set_mime_type(self, mime_type): """ Update the highlighter lexer based on a mime type. :param mime_type: mime type of the new lexer to setup. """ try: self.set_lexer_from_mime_type(mime_type) except ClassNotFound: _logger().exception('failed to get lexer from mimetype') self._lexer = TextLexer() return False except __HOLE__: # import error while loading some pygments plugins, the editor # should not crash _logger().warning('failed to get lexer from mimetype (%s)' % mime_type) self._lexer = TextLexer() return False else: return True
ImportError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/modes/pygments_sh.py/PygmentsSH.set_mime_type
5,498
def set_lexer_from_filename(self, filename): """ Change the lexer based on the filename (actually only the extension is needed) :param filename: Filename or extension """ self._lexer = None if filename.endswith("~"): filename = filename[0:len(filename) - 1] try: self._lexer = get_lexer_for_filename(filename) except (ClassNotFound, ImportError): print('class not found for url', filename) try: m = mimetypes.guess_type(filename) print(m) self._lexer = get_lexer_for_mimetype(m[0]) except (ClassNotFound, __HOLE__, ImportError): self._lexer = get_lexer_for_mimetype('text/plain') if self._lexer is None: _logger().warning('failed to get lexer from filename: %s, using ' 'plain text instead...', filename) self._lexer = TextLexer()
IndexError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/modes/pygments_sh.py/PygmentsSH.set_lexer_from_filename
5,499
def _get_format_from_style(self, token, style): """ Returns a QTextCharFormat for token by reading a Pygments style. """ result = QtGui.QTextCharFormat() try: style = style.style_for_token(token) except __HOLE__: # fallback to plain text style = style.style_for_token(Text) for key, value in list(style.items()): if value: if key == 'color': result.setForeground(self._get_brush(value)) elif key == 'bgcolor': result.setBackground(self._get_brush(value)) elif key == 'bold': result.setFontWeight(QtGui.QFont.Bold) elif key == 'italic': result.setFontItalic(True) elif key == 'underline': result.setUnderlineStyle( QtGui.QTextCharFormat.SingleUnderline) elif key == 'sans': result.setFontStyleHint(QtGui.QFont.SansSerif) elif key == 'roman': result.setFontStyleHint(QtGui.QFont.Times) elif key == 'mono': result.setFontStyleHint(QtGui.QFont.TypeWriter) return result
KeyError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/modes/pygments_sh.py/PygmentsSH._get_format_from_style