Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,700
def handle(self, app_name=None, target=None, **options): self.validate_name(app_name, "app") # Check that the app_name cannot be imported. try: import_module(app_name) except __HOLE__: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as an app " "name. Please try another name." % app_name) super(Command, self).handle('app', app_name, target, **options)
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/management/commands/startapp.py/Command.handle
6,701
def _httpfilename(response): """ Python2/3 compatibility function. Returns the filename stored in the `Content-Disposition` HTTP header of given `response`, or `None` if that header is absent. """ try: # Py3 return response.info().get_filename() except __HOLE__: # Py2 import cgi _, params = cgi.parse_header(response.headers.get('Content-Disposition', '')) return params.get('filename', None)
AttributeError
dataset/ETHPy150Open lucasb-eyer/DeepFried2/DeepFried2/zoo/download.py/_httpfilename
6,702
def _getheader(response, header, default=None): """ Python2/3 compatibility function. Returns a HTTP `header` from given HTTP `response`, or `default` if that header is absent. """ try: # Py3 return response.getheader(header, default) except __HOLE__: # Py2 return response.info().getheader(header, default)
AttributeError
dataset/ETHPy150Open lucasb-eyer/DeepFried2/DeepFried2/zoo/download.py/_getheader
6,703
def parseXRDS(text): """Parse the given text as an XRDS document. @return: ElementTree containing an XRDS document @raises XRDSError: When there is a parse error or the document does not contain an XRDS. """ try: element = SafeElementTree.XML(text) except (SystemExit, MemoryError, AssertionError, __HOLE__): raise except Exception as why: exc = XRDSError('Error parsing document as XML') exc.reason = why raise exc else: tree = ElementTree.ElementTree(element) if not isXRDS(tree): raise XRDSError('Not an XRDS document') return tree
ImportError
dataset/ETHPy150Open necaris/python3-openid/openid/yadis/etxrd.py/parseXRDS
6,704
def getCanonicalID(iname, xrd_tree): """Return the CanonicalID from this XRDS document. @param iname: the XRI being resolved. @type iname: unicode @param xrd_tree: The XRDS output from the resolver. @type xrd_tree: ElementTree @returns: The XRI CanonicalID or None. @returntype: unicode or None """ xrd_list = xrd_tree.findall(xrd_tag) xrd_list.reverse() try: canonicalID = xri.XRI(xrd_list[0].findall(canonicalID_tag)[0].text) except __HOLE__: return None childID = canonicalID.lower() for xrd in xrd_list[1:]: parent_sought = childID.rsplit("!", 1)[0] parent = xri.XRI(xrd.findtext(canonicalID_tag)) if parent_sought != parent.lower(): raise XRDSFraud("%r can not come from %s" % (childID, parent)) childID = parent_sought root = xri.rootAuthority(iname) if not xri.providerIsAuthoritative(root, childID): raise XRDSFraud("%r can not come from root %r" % (childID, root)) return canonicalID
IndexError
dataset/ETHPy150Open necaris/python3-openid/openid/yadis/etxrd.py/getCanonicalID
6,705
def getPriority(element): """Get the priority of this element Returns Max if no priority is specified or the priority value is invalid. """ try: return getPriorityStrict(element) except __HOLE__: return Max
ValueError
dataset/ETHPy150Open necaris/python3-openid/openid/yadis/etxrd.py/getPriority
6,706
def paginate(request, queryset_or_list, per_page=25, endless=True): if endless: paginator_class = EndlessPaginator else: paginator_class = BetterPaginator paginator = paginator_class(queryset_or_list, per_page) query_dict = request.GET.copy() if 'p' in query_dict: del query_dict['p'] try: page = int(request.GET.get('p', 1)) except (ValueError, __HOLE__): page = 1 if page < 1: page = 1 context = { 'query_string': query_dict.urlencode(), 'paginator': paginator.get_context(page), } return context
TypeError
dataset/ETHPy150Open dcramer/django-paging/paging/helpers.py/paginate
6,707
def teardown(state): from django.conf import settings try: # Removing the temporary TEMP_DIR. Ensure we pass in unicode # so that it will successfully remove temp trees containing # non-ASCII filenames on Windows. (We're assuming the temp dir # name itself does not contain non-ASCII characters.) shutil.rmtree(six.text_type(TEMP_DIR)) except __HOLE__: print('Failed to remove temp directory: %s' % TEMP_DIR) # Restore the old settings. for key, value in state.items(): setattr(settings, key, value)
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/tests/runtests.py/teardown
6,708
def bisect_tests(bisection_label, options, test_labels): state = setup(int(options.verbosity), test_labels) test_labels = test_labels or get_installed() print('***** Bisecting test suite: %s' % ' '.join(test_labels)) # Make sure the bisection point isn't in the test list # Also remove tests that need to be run in specific combinations for label in [bisection_label, 'model_inheritance_same_model_name']: try: test_labels.remove(label) except __HOLE__: pass subprocess_args = [ sys.executable, upath(__file__), '--settings=%s' % options.settings] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: subprocess_args.append('--verbosity=%s' % options.verbosity) if not options.interactive: subprocess_args.append('--noinput') iteration = 1 while len(test_labels) > 1: midpoint = len(test_labels)/2 test_labels_a = test_labels[:midpoint] + [bisection_label] test_labels_b = test_labels[midpoint:] + [bisection_label] print('***** Pass %da: Running the first half of the test suite' % iteration) print('***** Test labels: %s' % ' '.join(test_labels_a)) failures_a = subprocess.call(subprocess_args + test_labels_a) print('***** Pass %db: Running the second half of the test suite' % iteration) print('***** Test labels: %s' % ' '.join(test_labels_b)) print('') failures_b = subprocess.call(subprocess_args + test_labels_b) if failures_a and not failures_b: print("***** Problem found in first half. Bisecting again...") iteration = iteration + 1 test_labels = test_labels_a[:-1] elif failures_b and not failures_a: print("***** Problem found in second half. Bisecting again...") iteration = iteration + 1 test_labels = test_labels_b[:-1] elif failures_a and failures_b: print("***** Multiple sources of failure found") break else: print("***** No source of failure found... try pair execution (--pair)") break if len(test_labels) == 1: print("***** Source of error: %s" % test_labels[0]) teardown(state)
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/tests/runtests.py/bisect_tests
6,709
def paired_tests(paired_test, options, test_labels): state = setup(int(options.verbosity), test_labels) test_labels = test_labels or get_installed() print('***** Trying paired execution') # Make sure the constant member of the pair isn't in the test list # Also remove tests that need to be run in specific combinations for label in [paired_test, 'model_inheritance_same_model_name']: try: test_labels.remove(label) except __HOLE__: pass subprocess_args = [ sys.executable, upath(__file__), '--settings=%s' % options.settings] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: subprocess_args.append('--verbosity=%s' % options.verbosity) if not options.interactive: subprocess_args.append('--noinput') for i, label in enumerate(test_labels): print('***** %d of %d: Check test pairing with %s' % ( i + 1, len(test_labels), label)) failures = subprocess.call(subprocess_args + [label, paired_test]) if failures: print('***** Found problem pair with %s' % label) return print('***** No problem pair found') teardown(state)
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/tests/runtests.py/paired_tests
6,710
def ImportPackage(packagepath, curs): """ Imports package data from the receipt at packagepath into our internal package database. """ bompath = os.path.join(packagepath, 'Contents/Archive.bom') infopath = os.path.join(packagepath, 'Contents/Info.plist') pkgname = os.path.basename(packagepath) if not os.path.exists(packagepath): munkicommon.display_error("%s not found.", packagepath) return if not os.path.isdir(packagepath): # Every machine I've seen has a bogus BSD.pkg, # so we won't print a warning for that specific one. if pkgname != "BSD.pkg": munkicommon.display_warning( "%s is not a valid receipt. Skipping.", packagepath) return if not os.path.exists(bompath): # look in receipt's Resources directory bomname = os.path.splitext(pkgname)[0] + '.bom' bompath = os.path.join( packagepath, "Contents/Resources", bomname) if not os.path.exists(bompath): munkicommon.display_warning( "%s has no BOM file. Skipping.", packagepath) return if not os.path.exists(infopath): munkicommon.display_warning( "%s has no Info.plist. Skipping.", packagepath) return timestamp = os.stat(packagepath).st_mtime owner = 0 plist = FoundationPlist.readPlist(infopath) if "CFBundleIdentifier" in plist: pkgid = plist["CFBundleIdentifier"] elif "Bundle identifier" in plist: # special case for JAMF Composer generated packages. WTF? pkgid = plist["Bundle identifier"] else: pkgid = pkgname if "CFBundleShortVersionString" in plist: vers = plist["CFBundleShortVersionString"] elif "Bundle versions string, short" in plist: # another special case for JAMF Composer-generated packages. Wow. vers = plist["Bundle versions string, short"] else: vers = "1.0" if "IFPkgRelocatedPath" in plist: ppath = plist["IFPkgRelocatedPath"] ppath = ppath.lstrip('./').rstrip('/') else: ppath = "" values_t = (timestamp, owner, pkgid, vers, ppath, pkgname) curs.execute( '''INSERT INTO pkgs (timestamp, owner, pkgid, vers, ppath, pkgname) values (?, ?, ?, ?, ?, ?)''', values_t) pkgkey = curs.lastrowid cmd = ["/usr/bin/lsbom", bompath] proc = subprocess.Popen(cmd, shell=False, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while True: line = proc.stdout.readline().decode('UTF-8') if not line and (proc.poll() != None): break try: item = line.rstrip("\n").split("\t") path = item[0] perms = item[1] uidgid = item[2].split("/") uid = uidgid[0] gid = uidgid[1] except __HOLE__: # we really only care about the path perms = "0000" uid = "0" gid = "0" try: if path != ".": # special case for MS Office 2008 installers if ppath == "tmp/com.microsoft.updater/office_location": ppath = "Applications" # prepend the ppath so the paths match the actual install # locations path = path.lstrip("./") if ppath: path = ppath + "/" + path values_t = (path, ) row = curs.execute( 'SELECT path_key from paths where path = ?', values_t).fetchone() if not row: curs.execute( 'INSERT INTO paths (path) values (?)', values_t) pathkey = curs.lastrowid else: pathkey = row[0] values_t = (pkgkey, pathkey, uid, gid, perms) curs.execute( 'INSERT INTO pkgs_paths (pkg_key, path_key, uid, gid, ' 'perms) values (?, ?, ?, ?, ?)', values_t) except sqlite3.DatabaseError: pass
IndexError
dataset/ETHPy150Open munki/munki/code/client/munkilib/removepackages.py/ImportPackage
6,711
def ImportBom(bompath, curs): """ Imports package data into our internal package database using a combination of the bom file and data in Apple's package database into our internal package database. """ # If we completely trusted the accuracy of Apple's database, we wouldn't # need the bom files, but in my environment at least, the bom files are # a better indicator of what flat packages have actually been installed # on the current machine. # We still need to consult Apple's package database # because the bom files are missing metadata about the package. pkgname = os.path.basename(bompath) timestamp = os.stat(bompath).st_mtime owner = 0 pkgid = os.path.splitext(pkgname)[0] vers = "1.0" ppath = "" # try to get metadata from applepkgdb proc = subprocess.Popen(["/usr/sbin/pkgutil", "--pkg-info-plist", pkgid], bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (pliststr, dummy_err) = proc.communicate() if pliststr: plist = FoundationPlist.readPlistFromString(pliststr) if "install-location" in plist: ppath = plist["install-location"] ppath = ppath.lstrip('./').rstrip('/') if "pkg-version" in plist: vers = plist["pkg-version"] if "install-time" in plist: timestamp = plist["install-time"] values_t = (timestamp, owner, pkgid, vers, ppath, pkgname) curs.execute( '''INSERT INTO pkgs (timestamp, owner, pkgid, vers, ppath, pkgname) values (?, ?, ?, ?, ?, ?)''', values_t) pkgkey = curs.lastrowid cmd = ["/usr/bin/lsbom", bompath] proc = subprocess.Popen(cmd, shell=False, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while True: line = proc.stdout.readline().decode('UTF-8') if not line and (proc.poll() != None): break try: item = line.rstrip("\n").split("\t") path = item[0] perms = item[1] uidgid = item[2].split("/") uid = uidgid[0] gid = uidgid[1] except __HOLE__: # we really only care about the path perms = "0000" uid = "0" gid = "0" if path != ".": # special case for MS Office 2008 installers if ppath == "tmp/com.microsoft.updater/office_location": ppath = "Applications" #prepend the ppath so the paths match the actual install locations path = path.lstrip("./") if ppath: path = ppath + "/" + path values_t = (path, ) row = curs.execute( 'SELECT path_key from paths where path = ?', values_t).fetchone() if not row: curs.execute( 'INSERT INTO paths (path) values (?)', values_t) pathkey = curs.lastrowid else: pathkey = row[0] values_t = (pkgkey, pathkey, uid, gid, perms) curs.execute( 'INSERT INTO pkgs_paths (pkg_key, path_key, uid, gid, perms) ' 'values (?, ?, ?, ?, ?)', values_t)
IndexError
dataset/ETHPy150Open munki/munki/code/client/munkilib/removepackages.py/ImportBom
6,712
def initDatabase(forcerebuild=False): """ Builds or rebuilds our internal package database. """ if not shouldRebuildDB(packagedb) and not forcerebuild: return True munkicommon.display_status_minor( 'Gathering information on installed packages') if os.path.exists(packagedb): try: os.remove(packagedb) except (OSError, __HOLE__): munkicommon.display_error( "Could not remove out-of-date receipt database.") return False os_version = munkicommon.getOsVersion(as_tuple=True) pkgcount = 0 receiptsdir = "/Library/Receipts" bomsdir = "/Library/Receipts/boms" if os.path.exists(receiptsdir): receiptlist = munkicommon.listdir(receiptsdir) for item in receiptlist: if item.endswith(".pkg"): pkgcount += 1 if os.path.exists(bomsdir): bomslist = munkicommon.listdir(bomsdir) for item in bomslist: if item.endswith(".bom"): pkgcount += 1 if os_version >= (10, 6): # Snow Leopard or later pkglist = [] cmd = ['/usr/sbin/pkgutil', '--pkgs'] proc = subprocess.Popen(cmd, shell=False, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) while True: line = proc.stdout.readline() if not line and (proc.poll() != None): break pkglist.append(line.rstrip('\n')) pkgcount += 1 conn = sqlite3.connect(packagedb) conn.text_factory = str curs = conn.cursor() CreateTables(curs) currentpkgindex = 0 munkicommon.display_percent_done(0, pkgcount) if os.path.exists(receiptsdir): receiptlist = munkicommon.listdir(receiptsdir) for item in receiptlist: if munkicommon.stopRequested(): curs.close() conn.close() #our package db isn't valid, so we should delete it os.remove(packagedb) return False if item.endswith(".pkg"): receiptpath = os.path.join(receiptsdir, item) munkicommon.display_detail("Importing %s...", receiptpath) ImportPackage(receiptpath, curs) currentpkgindex += 1 munkicommon.display_percent_done(currentpkgindex, pkgcount) if os.path.exists(bomsdir): bomslist = munkicommon.listdir(bomsdir) for item in bomslist: if munkicommon.stopRequested(): curs.close() conn.close() #our package db isn't valid, so we should delete it os.remove(packagedb) return False if item.endswith(".bom"): bompath = os.path.join(bomsdir, item) munkicommon.display_detail("Importing %s...", bompath) ImportBom(bompath, curs) currentpkgindex += 1 munkicommon.display_percent_done(currentpkgindex, pkgcount) if os_version >= (10, 6): # Snow Leopard or later for pkg in pkglist: if munkicommon.stopRequested(): curs.close() conn.close() #our package db isn't valid, so we should delete it os.remove(packagedb) return False munkicommon.display_detail("Importing %s...", pkg) ImportFromPkgutil(pkg, curs) currentpkgindex += 1 munkicommon.display_percent_done(currentpkgindex, pkgcount) # in case we didn't quite get to 100% for some reason if currentpkgindex < pkgcount: munkicommon.display_percent_done(pkgcount, pkgcount) # commit and close the db when we're done. conn.commit() curs.close() conn.close() return True
IOError
dataset/ETHPy150Open munki/munki/code/client/munkilib/removepackages.py/initDatabase
6,713
def removeFilesystemItems(removalpaths, forcedeletebundles): """ Attempts to remove all the paths in the array removalpaths """ # we sort in reverse because we can delete from the bottom up, # clearing a directory before we try to remove the directory itself removalpaths.sort(reverse=True) removalerrors = "" removalcount = len(removalpaths) munkicommon.display_status_minor( 'Removing %s filesystem items' % removalcount) itemcount = len(removalpaths) itemindex = 0 munkicommon.display_percent_done(itemindex, itemcount) for item in removalpaths: itemindex += 1 pathtoremove = "/" + item # use os.path.lexists so broken links return true # so we can remove them if os.path.lexists(pathtoremove): munkicommon.display_detail("Removing: " + pathtoremove) if (os.path.isdir(pathtoremove) and not os.path.islink(pathtoremove)): diritems = munkicommon.listdir(pathtoremove) if diritems == ['.DS_Store']: # If there's only a .DS_Store file # we'll consider it empty ds_storepath = pathtoremove + "/.DS_Store" try: os.remove(ds_storepath) except (OSError, IOError): pass diritems = munkicommon.listdir(pathtoremove) if diritems == []: # directory is empty try: os.rmdir(pathtoremove) except (OSError, IOError), err: msg = "Couldn't remove directory %s - %s" % ( pathtoremove, err) munkicommon.display_error(msg) removalerrors = removalerrors + "\n" + msg else: # the directory is marked for deletion but isn't empty. # if so directed, if it's a bundle (like .app), we should # remove it anyway - no use having a broken bundle hanging # around if forcedeletebundles and isBundle(pathtoremove): munkicommon.display_warning( "Removing non-empty bundle: %s", pathtoremove) retcode = subprocess.call(['/bin/rm', '-r', pathtoremove]) if retcode: msg = "Couldn't remove bundle %s" % pathtoremove munkicommon.display_error(msg) removalerrors = removalerrors + "\n" + msg else: # if this path is inside a bundle, and we've been # directed to force remove bundles, # we don't need to warn because it's going to be # removed with the bundle. # Otherwise, we should warn about non-empty # directories. if not insideBundle(pathtoremove) or \ not forcedeletebundles: msg = \ "Did not remove %s because it is not empty." % \ pathtoremove munkicommon.display_error(msg) removalerrors = removalerrors + "\n" + msg else: # not a directory, just unlink it # I was using rm instead of Python because I don't trust # handling of resource forks with Python #retcode = subprocess.call(['/bin/rm', pathtoremove]) # but man that's slow. # I think there's a lot of overhead with the # subprocess call. I'm going to use os.remove. # I hope I don't regret it. retcode = '' try: os.remove(pathtoremove) except (__HOLE__, IOError), err: msg = "Couldn't remove item %s: %s" % (pathtoremove, err) munkicommon.display_error(msg) removalerrors = removalerrors + "\n" + msg munkicommon.display_percent_done(itemindex, itemcount) if removalerrors: munkicommon.display_info( "---------------------------------------------------") munkicommon.display_info( "There were problems removing some filesystem items.") munkicommon.display_info( "---------------------------------------------------") munkicommon.display_info(removalerrors)
OSError
dataset/ETHPy150Open munki/munki/code/client/munkilib/removepackages.py/removeFilesystemItems
6,714
def test_binary_operators(self): data1 = np.random.randn(20) data2 = np.random.randn(20) data1[::2] = np.nan data2[::3] = np.nan arr1 = SparseArray(data1) arr2 = SparseArray(data2) data1[::2] = 3 data2[::3] = 3 farr1 = SparseArray(data1, fill_value=3) farr2 = SparseArray(data2, fill_value=3) def _check_op(op, first, second): res = op(first, second) exp = SparseArray(op(first.values, second.values), fill_value=first.fill_value) tm.assertIsInstance(res, SparseArray) assert_almost_equal(res.values, exp.values) res2 = op(first, second.values) tm.assertIsInstance(res2, SparseArray) tm.assert_sp_array_equal(res, res2) res3 = op(first.values, second) tm.assertIsInstance(res3, SparseArray) tm.assert_sp_array_equal(res, res3) res4 = op(first, 4) tm.assertIsInstance(res4, SparseArray) # ignore this if the actual op raises (e.g. pow) try: exp = op(first.values, 4) exp_fv = op(first.fill_value, 4) assert_almost_equal(res4.fill_value, exp_fv) assert_almost_equal(res4.values, exp) except __HOLE__: pass def _check_inplace_op(op): tmp = arr1.copy() self.assertRaises(NotImplementedError, op, tmp, arr2) bin_ops = [operator.add, operator.sub, operator.mul, operator.truediv, operator.floordiv, operator.pow] for op in bin_ops: _check_op(op, arr1, arr2) _check_op(op, farr1, farr2) inplace_ops = ['iadd', 'isub', 'imul', 'itruediv', 'ifloordiv', 'ipow'] for op in inplace_ops: _check_inplace_op(getattr(operator, op))
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/sparse/tests/test_array.py/TestSparseArray.test_binary_operators
6,715
def validate(self, value): for t in self.types: try: return wtypes.validate_value(t, value) except (ValueError, __HOLE__): pass else: raise ValueError( _("Wrong type. Expected '%(type)s', got '%(value)s'") % {'type': self.types, 'value': type(value)})
TypeError
dataset/ETHPy150Open openstack/solum/solum/api/controllers/v1/datamodel/types.py/MultiType.validate
6,716
def save(self, force_insert=False, force_update=False, using=None): """ Calculate position (max+1) for new records """ if not self.position: max = self.__class__.objects.filter().aggregate(models.Max('position')) try: self.position = max['position__max'] + 1 except __HOLE__: self.position = 1 return super(OrderableModel, self).save(force_insert=force_insert, force_update=force_update, using=using)
TypeError
dataset/ETHPy150Open baskoopmans/djcommon/djcommon/models.py/OrderableModel.save
6,717
def get_action_args(self, args): """Parse dictionary created by routes library.""" try: del args['controller'] except __HOLE__: pass try: del args['format'] except KeyError: pass return args
KeyError
dataset/ETHPy150Open openstack/ooi/ooi/wsgi/__init__.py/Resource.get_action_args
6,718
def __call__(self, request, args): """Control the method dispatch.""" action_args = self.get_action_args(args) action = action_args.pop('action', None) try: accept = request.get_best_match_content_type() content_type = request.get_content_type() except exception.InvalidContentType as e: msg = e.format_message() return Fault(webob.exc.HTTPNotAcceptable(explanation=msg)) body = request.body # Get the implementing method try: method = self.get_method(request, action, content_type, body) except (AttributeError, __HOLE__): return Fault(webob.exc.HTTPNotFound()) except KeyError as ex: msg = "There is no such action: %s" % ex.args[0] return Fault(webob.exc.HTTPBadRequest(explanation=msg)) contents = {} if request.should_have_body(): # allow empty body with PUT and POST if request.content_length == 0: contents = {'body': None} else: contents["body"] = body action_args.update(contents) response = None try: with ResourceExceptionHandler(): action_result = self.dispatch(method, request, action_args) except Fault as ex: response = ex # No exceptions, so create a response # NOTE(aloga): if the middleware returns None, the pipeline will # continue, but we do not want to do so, so we convert the action # result to a ResponseObject. if not response: if isinstance(action_result, ResponseObject): resp_obj = action_result else: resp_obj = ResponseObject(action_result) response = resp_obj.serialize(request, accept, self.default_serializers) return response
TypeError
dataset/ETHPy150Open openstack/ooi/ooi/wsgi/__init__.py/Resource.__call__
6,719
def get_serializer(self, content_type, default_serializers=None): """Returns the serializer for the wrapped object. Returns the serializer for the wrapped object subject to the indicated content type. If no serializer matching the content type is attached, an appropriate serializer drawn from the default serializers will be used. If no appropriate serializer is available, raises InvalidContentType. """ default_serializers = default_serializers or {} try: if content_type is None: content_type = "text/plain" mtype = serializers.get_media_map().get(content_type, content_type) if mtype in self.serializers: return mtype, self.serializers[mtype] else: return mtype, default_serializers[mtype] except (KeyError, __HOLE__): raise exception.InvalidContentType(content_type=content_type)
TypeError
dataset/ETHPy150Open openstack/ooi/ooi/wsgi/__init__.py/ResponseObject.get_serializer
6,720
def authorize_same_account(account_to_match): def auth_callback_same_account(req): try: _ver, acc, _rest = req.split_path(2, 3, True) except __HOLE__: return HTTPUnauthorized(request=req) if acc == account_to_match: return None else: return HTTPUnauthorized(request=req) return auth_callback_same_account
ValueError
dataset/ETHPy150Open openstack/swift/swift/common/middleware/tempurl.py/authorize_same_account
6,721
def authorize_same_container(account_to_match, container_to_match): def auth_callback_same_container(req): try: _ver, acc, con, _rest = req.split_path(3, 4, True) except __HOLE__: return HTTPUnauthorized(request=req) if acc == account_to_match and con == container_to_match: return None else: return HTTPUnauthorized(request=req) return auth_callback_same_container
ValueError
dataset/ETHPy150Open openstack/swift/swift/common/middleware/tempurl.py/authorize_same_container
6,722
def _get_account_and_container(self, env): """ Returns just the account and container for the request, if it's an object request and one of the configured methods; otherwise, None is returned. :param env: The WSGI environment for the request. :returns: (Account str, container str) or (None, None). """ if env['REQUEST_METHOD'] in self.conf['methods']: try: ver, acc, cont, obj = split_path(env['PATH_INFO'], 4, 4, True) except __HOLE__: return (None, None) if ver == 'v1' and obj.strip('/'): return (acc, cont) return (None, None)
ValueError
dataset/ETHPy150Open openstack/swift/swift/common/middleware/tempurl.py/TempURL._get_account_and_container
6,723
def _get_temp_url_info(self, env): """ Returns the provided temporary URL parameters (sig, expires), if given and syntactically valid. Either sig or expires could be None if not provided. If provided, expires is also converted to an int if possible or 0 if not, and checked for expiration (returns 0 if expired). :param env: The WSGI environment for the request. :returns: (sig, expires, filename, inline) as described above. """ temp_url_sig = temp_url_expires = filename = inline = None qs = parse_qs(env.get('QUERY_STRING', ''), keep_blank_values=True) if 'temp_url_sig' in qs: temp_url_sig = qs['temp_url_sig'][0] if 'temp_url_expires' in qs: try: temp_url_expires = int(qs['temp_url_expires'][0]) except __HOLE__: temp_url_expires = 0 if temp_url_expires < time(): temp_url_expires = 0 if 'filename' in qs: filename = qs['filename'][0] if 'inline' in qs: inline = True return temp_url_sig, temp_url_expires, filename, inline
ValueError
dataset/ETHPy150Open openstack/swift/swift/common/middleware/tempurl.py/TempURL._get_temp_url_info
6,724
@classmethod def deserialize(cls, data): message = openxc_pb2.VehicleMessage() try: message.ParseFromString(data) except google.protobuf.message.DecodeError as e: pass except __HOLE__ as e: LOG.warn("Unable to parse protobuf: %s", e) else: return cls._protobuf_to_dict(message)
UnicodeDecodeError
dataset/ETHPy150Open openxc/openxc-python/openxc/formats/binary.py/ProtobufFormatter.deserialize
6,725
@staticmethod def get_sync_data(): try: requestor_ids = os.listdir(cfg.CONF.pd_confs) except __HOLE__: return [] sync_data = [] requestors = (r.split(':') for r in requestor_ids if r.count(':') == 2) for router_id, subnet_id, ri_ifname in requestors: pd_info = pd.PDInfo() pd_info.router_id = router_id pd_info.subnet_id = subnet_id pd_info.ri_ifname = ri_ifname pd_info.driver = PDDibbler(router_id, subnet_id, ri_ifname) pd_info.client_started = ( pd_info.driver._is_dibbler_client_running()) pd_info.prefix = pd_info.driver.get_prefix() sync_data.append(pd_info) return sync_data
OSError
dataset/ETHPy150Open openstack/neutron/neutron/agent/linux/dibbler.py/PDDibbler.get_sync_data
6,726
def run(self): """Run the rejected Application""" self.setup() self._mcp = self._master_control_program() try: self._mcp.run() except __HOLE__: LOGGER.info('Caught CTRL-C, shutting down') if self.is_running: self.stop()
KeyboardInterrupt
dataset/ETHPy150Open gmr/rejected/rejected/controller.py/Controller.run
6,727
def nextToken(self): # skip whitespace while not self.isEOF() and self.is_whitespace(): self.next_ch() if self.isEOF(): return Tok(type = EOF) # first, try to match token with 2 or more chars t = self.match_pattern() if t: return t # second, we want 1-char tokens te = self.curr_ch() try: ty = single_char_lookup[te] except __HOLE__: raise TokenStreamException( "Unexpected char %r in column %u." % (self.curr_ch(), self._pos)) self.next_ch() return Tok(type=ty, text=te, col=self._pos)
KeyError
dataset/ETHPy150Open kennethreitz/tablib/tablib/packages/xlwt3/ExcelFormulaLexer.py/Lexer.nextToken
6,728
def prepare_subprocess(): # don't create core file try: setrlimit(RLIMIT_CORE, (0, 0)) except (__HOLE__, resource_error): pass
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_faulthandler.py/prepare_subprocess
6,729
def mkdir(path): """ mkdir(path) Creates the directory including parent directories """ try: os.makedirs(path) except __HOLE__: pass
OSError
dataset/ETHPy150Open facebook/IT-CPE/code/lib/modules/fs_tools.py/mkdir
6,730
def append(self, member, tarobj): header = '' for field in self.header_fields: value = getattr(member, field) if field == 'type': field = 'typeflag' elif field == 'name': if member.isdir() and not value.endswith('/'): value += '/' header += '{0}{1}'.format(field, value) h = None try: if member.size > 0: f = tarobj.extractfile(member) h = sha256_file(f, header) else: h = sha256_string(header) except __HOLE__: h = sha256_string(header) self.hashes.append(h)
KeyError
dataset/ETHPy150Open docker/docker-registry/docker_registry/lib/checksums.py/TarSum.append
6,731
def fetchone(self): # PEP 249 self._wait_to_finish() if not self.has_result_set: raise ProgrammingError("Tried to fetch but no results.") log.debug('Fetching a single row') try: return next(self) except __HOLE__: return None
StopIteration
dataset/ETHPy150Open cloudera/impyla/impala/hiveserver2.py/HiveServer2Cursor.fetchone
6,732
def fetchmany(self, size=None): # PEP 249 self._wait_to_finish() if not self.has_result_set: raise ProgrammingError("Tried to fetch but no results.") if size is None: size = self.arraysize log.debug('Fetching up to %s result rows', size) local_buffer = [] i = 0 while i < size: try: local_buffer.append(next(self)) i += 1 except __HOLE__: break return local_buffer
StopIteration
dataset/ETHPy150Open cloudera/impyla/impala/hiveserver2.py/HiveServer2Cursor.fetchmany
6,733
def fetchall(self): # PEP 249 self._wait_to_finish() log.debug('Fetching all result rows') try: return list(self) except __HOLE__: return []
StopIteration
dataset/ETHPy150Open cloudera/impyla/impala/hiveserver2.py/HiveServer2Cursor.fetchall
6,734
def connect(host, port, timeout=None, use_ssl=False, ca_cert=None, user=None, password=None, kerberos_service_name='impala', auth_mechanism=None): log.debug('Connecting to HiveServer2 %s:%s with %s authentication ' 'mechanism', host, port, auth_mechanism) sock = get_socket(host, port, use_ssl, ca_cert) if timeout is not None: timeout = timeout * 1000. # TSocket expects millis if six.PY2: sock.setTimeout(timeout) elif six.PY3: try: # thriftpy has a release where set_timeout is missing sock.set_timeout(timeout) except __HOLE__: sock.socket_timeout = timeout sock.connect_timeout = timeout transport = get_transport(sock, host, kerberos_service_name, auth_mechanism, user, password) transport.open() protocol = TBinaryProtocol(transport) if six.PY2: # ThriftClient == ImpalaHiveServer2Service.Client service = ThriftClient(protocol) elif six.PY3: # ThriftClient == TClient service = ThriftClient(ImpalaHiveServer2Service, protocol) log.debug('sock=%s transport=%s protocol=%s service=%s', sock, transport, protocol, service) return HS2Service(service)
AttributeError
dataset/ETHPy150Open cloudera/impyla/impala/hiveserver2.py/connect
6,735
def build_summary_table(summary, idx, is_fragment_root, indent_level, output): """Direct translation of Coordinator::PrintExecSummary() to recursively build a list of rows of summary statistics, one per exec node summary: the TExecSummary object that contains all the summary data idx: the index of the node to print is_fragment_root: true if the node to print is the root of a fragment (and therefore feeds into an exchange) indent_level: the number of spaces to print before writing the node's label, to give the appearance of a tree. The 0th child of a node has the same indent_level as its parent. All other children have an indent_level of one greater than their parent. output: the list of rows into which to append the rows produced for this node and its children. Returns the index of the next exec node in summary.exec_nodes that should be processed, used internally to this method only. """ # pylint: disable=too-many-locals attrs = ["latency_ns", "cpu_time_ns", "cardinality", "memory_used"] # Initialise aggregate and maximum stats agg_stats, max_stats = TExecStats(), TExecStats() for attr in attrs: setattr(agg_stats, attr, 0) setattr(max_stats, attr, 0) node = summary.nodes[idx] for stats in node.exec_stats: for attr in attrs: val = getattr(stats, attr) if val is not None: setattr(agg_stats, attr, getattr(agg_stats, attr) + val) setattr(max_stats, attr, max(getattr(max_stats, attr), val)) if len(node.exec_stats) > 0: avg_time = agg_stats.latency_ns / len(node.exec_stats) else: avg_time = 0 # If the node is a broadcast-receiving exchange node, the cardinality of # rows produced is the max over all instances (which should all have # received the same number of rows). Otherwise, the cardinality is the sum # over all instances which process disjoint partitions. if node.is_broadcast and is_fragment_root: cardinality = max_stats.cardinality else: cardinality = agg_stats.cardinality est_stats = node.estimated_stats label_prefix = "" if indent_level > 0: label_prefix = "|" if is_fragment_root: label_prefix += " " * indent_level else: label_prefix += "--" * indent_level def prettyprint(val, units, divisor): for unit in units: if val < divisor: if unit == units[0]: return "%d%s" % (val, unit) else: return "%3.2f%s" % (val, unit) val /= divisor def prettyprint_bytes(byte_val): return prettyprint( byte_val, [' B', ' KB', ' MB', ' GB', ' TB'], 1024.0) def prettyprint_units(unit_val): return prettyprint(unit_val, ["", "K", "M", "B"], 1000.0) def prettyprint_time(time_val): return prettyprint(time_val, ["ns", "us", "ms", "s"], 1000.0) row = [label_prefix + node.label, len(node.exec_stats), prettyprint_time(avg_time), prettyprint_time(max_stats.latency_ns), prettyprint_units(cardinality), prettyprint_units(est_stats.cardinality), prettyprint_bytes(max_stats.memory_used), prettyprint_bytes(est_stats.memory_used), node.label_detail] output.append(row) try: sender_idx = summary.exch_to_sender_map[idx] # This is an exchange node, so the sender is a fragment root, and # should be printed next. build_summary_table(summary, sender_idx, True, indent_level, output) except (__HOLE__, TypeError): # Fall through if idx not in map, or if exch_to_sender_map itself is # not set pass idx += 1 if node.num_children > 0: first_child_output = [] idx = build_summary_table(summary, idx, False, indent_level, first_child_output) # pylint: disable=unused-variable # TODO: is child_idx supposed to be unused? See #120 for child_idx in range(1, node.num_children): # All other children are indented (we only have 0, 1 or 2 children # for every exec node at the moment) idx = build_summary_table(summary, idx, False, indent_level + 1, output) output += first_child_output return idx
KeyError
dataset/ETHPy150Open cloudera/impyla/impala/hiveserver2.py/build_summary_table
6,736
def _get(self, call, url=None, **kwargs): """ Sends an HTTP GET request to the specified URL, and returns the JSON object received (if any), or whatever answer it got otherwise. """ if not url: url = self.get_api_url() response = self.session.get(url + call, **kwargs) try: return response.json() except __HOLE__: return response.text
ValueError
dataset/ETHPy150Open Doist/todoist-python/todoist/api.py/TodoistAPI._get
6,737
def _post(self, call, url=None, **kwargs): """ Sends an HTTP POST request to the specified URL, and returns the JSON object received (if any), or whatever answer it got otherwise. """ if not url: url = self.get_api_url() response = self.session.post(url + call, **kwargs) try: return response.json() except __HOLE__: return response.text
ValueError
dataset/ETHPy150Open Doist/todoist-python/todoist/api.py/TodoistAPI._post
6,738
def readchar(fd): """ Read a character from the PTY at `fd`, or nothing if no data to read. """ while True: ready = wait(fd) if len(ready) == 0: return six.binary_type() else: for s in ready: try: return os.read(s, 1) except __HOLE__ as ex: if ex.errno == errno.EIO: # exec ends with: # OSError: [Errno 5] Input/output error # no idea why return "" raise
OSError
dataset/ETHPy150Open d11wtq/dockerpty/tests/util.py/readchar
6,739
def proc_f32(filename): '''Load an f32 file that has already been processed by the official matlab file converter. That matlab data is saved to an m-file, which is then converted to a numpy '.npz' file. This numpy file is the file actually loaded. This function converts it to a neo block and returns the block. This block can be compared to the block produced by BrainwareF32IO to make sure BrainwareF32IO is working properly block = proc_f32(filename) filename: The file name of the numpy file to load. It should end with '*_f32_py?.npz'. This will be converted to a neo 'file_origin' property with the value '*.f32', so the filename to compare should fit that pattern. 'py?' should be 'py2' for the python 2 version of the numpy file or 'py3' for the python 3 version of the numpy file. example: filename = 'file1_f32_py2.npz' f32 file name = 'file1.f32' ''' filenameorig = os.path.basename(filename[:-12]+'.f32') # create the objects to store other objects block = Block(file_origin=filenameorig) rcg = RecordingChannelGroup(file_origin=filenameorig) rcg.channel_indexes = np.array([], dtype=np.int) rcg.channel_names = np.array([], dtype='S') unit = Unit(file_origin=filenameorig) # load objects into their containers block.recordingchannelgroups.append(rcg) rcg.units.append(unit) try: with np.load(filename) as f32obj: f32file = f32obj.items()[0][1].flatten() except __HOLE__ as exc: if 'as a pickle' in exc.message: block.create_many_to_one_relationship() return block else: raise sweeplengths = [res[0, 0].tolist() for res in f32file['sweeplength']] stims = [res.flatten().tolist() for res in f32file['stim']] sweeps = [res['spikes'].flatten() for res in f32file['sweep'] if res.size] fullf32 = zip(sweeplengths, stims, sweeps) for sweeplength, stim, sweep in fullf32: for trainpts in sweep: if trainpts.size: trainpts = trainpts.flatten().astype('float32') else: trainpts = [] paramnames = ['Param%s' % i for i in range(len(stim))] params = dict(zip(paramnames, stim)) train = SpikeTrain(trainpts, units=pq.ms, t_start=0, t_stop=sweeplength, file_origin=filenameorig) segment = Segment(file_origin=filenameorig, **params) segment.spiketrains = [train] unit.spiketrains.append(train) block.segments.append(segment) block.create_many_to_one_relationship() return block
IOError
dataset/ETHPy150Open NeuralEnsemble/python-neo/neo/test/iotest/test_brainwaref32io.py/proc_f32
6,740
def grep(query): matcher = re.compile(re.escape(query)) t = time() - 1 result = [] for r in get_projects(): for name, path, root, top, fullpath in get_files(r): if time() - t >= 1: redraw() print fullpath t = time() try: if os.stat(fullpath).st_size > MAX_FILESIZE: continue with open(fullpath) as f: source = f.read() matches = matcher.finditer(source) lines = source.splitlines() except __HOLE__: continue for m in matches: start = m.start() line = source.count('\n', 0, start) + 1 offset = start - source.rfind('\n', 0, start) text = lines[line - 1] if len(text) > 100: offstart = max(0, offset - 30) text = text[offstart:offstart+60] + '...' if offstart: text = '...' + text result.append({ 'bufnr': '', 'filename': fullpath, 'pattern': '', 'valid': 1, 'nr': -1, 'lnum': line, 'vcol': 0, 'col': offset, 'text': text.replace('\x00', ' '), 'type': '' }) vfunc.setqflist(result) if result: vim.command('cw') redraw() print '{} matches found'.format(len(result))
OSError
dataset/ETHPy150Open baverman/vial/vial/plugins/grep/plugin.py/grep
6,741
def __init__(self, ourParent, ourMsg, ourIcon=None, *args, **kwargs): Popup.__init__(self, ourParent, *args, **kwargs) self.callback_block_clicked_add(lambda obj: self.delete()) # Add a table to hold dialog image and text to Popup tb = Table(self, size_hint_weight=EXPAND_BOTH) self.part_content_set("default", tb) tb.show() # Add dialog-error Image to table need_ethumb() icon = Icon(self, thumb='True') icon.standard_set(ourIcon) # Using gksudo or sudo fails to load Image here # unless options specify using preserving their existing environment. # may also fail to load other icons but does not raise an exception # in that situation. # Works fine using eSudo as a gksudo alternative, # other alternatives not tested try: dialogImage = Image(self, size_hint_weight=EXPAND_HORIZ, size_hint_align=FILL_BOTH, file=icon.file_get()) tb.pack(dialogImage, 0, 0, 1, 1) dialogImage.show() except __HOLE__: # An error message is displayed for this same error # when aboutWin is initialized so no need to redisplay. pass # Add dialog text to table dialogLabel = Label(self, line_wrap=ELM_WRAP_WORD, size_hint_weight=EXPAND_HORIZ, size_hint_align=FILL_BOTH) dialogLabel.text = ourMsg tb.pack(dialogLabel, 1, 0, 1, 1) dialogLabel.show() # Ok Button ok_btt = Button(self) ok_btt.text = "Ok" ok_btt.callback_clicked_add(lambda obj: self.delete()) ok_btt.show() # add button to popup self.part_content_set("button3", ok_btt)
RuntimeError
dataset/ETHPy150Open JeffHoogland/python-elm-extensions/elmextensions/StandardPopup.py/StandardPopup.__init__
6,742
@contextlib.contextmanager def open_spinner(message): # Interactive spinner goes directly to sys.stdout rather than being routed # through the logging system, but it acts like it has level INFO, # i.e. it's only displayed if we're at level INFO or better. # Non-interactive spinner goes through the logging system, so it is always # in sync with logging configuration. if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: spinner = InteractiveSpinner(message) else: spinner = NonInteractiveSpinner(message) try: with hidden_cursor(sys.stdout): yield spinner except __HOLE__: spinner.finish("canceled") raise except Exception: spinner.finish("error") raise else: spinner.finish("done")
KeyboardInterrupt
dataset/ETHPy150Open pypa/pip/pip/utils/ui.py/open_spinner
6,743
def loadScript(script_name): # call other script prefix, suffix = os.path.splitext(script_name) dirname = os.path.relpath(os.path.dirname(script_name)) basename = os.path.basename(script_name)[:-3] if os.path.exists(prefix + ".pyc"): try: os.remove(prefix + ".pyc") except OSError: pass modulename = ".".join((re.sub("/", ".", dirname), basename)) try: module = importlib.import_module(modulename) except __HOLE__, msg: sys.stderr.write('could not import %s - skipped: %s\n' % (modulename, msg)) module = None return module, modulename
ImportError
dataset/ETHPy150Open CGATOxford/cgat/tests/test_commandline.py/loadScript
6,744
def test_cmdline(): '''test style of scripts ''' # start script in order to build the command line parser global ORIGINAL_START if ORIGINAL_START is None: ORIGINAL_START = E.Start # read the first two columns map_option2action = IOTools.readMap( IOTools.openFile(FILENAME_OPTIONLIST), columns=(0, 1), has_header=True) files = [] for label, expression in EXPRESSIONS: f = glob.glob(expression) files.extend(sorted(f)) files = filterFiles(files) # make sure to use the current working directory as # primary lookup. sys.path.insert(0, ".") # files = [ # 'scripts/check_db.py', # 'scripts/cgat_build_report_page.py'] for f in files: if os.path.isdir(f): continue if os.path.basename(f) in EXCLUDE: continue script_name = os.path.abspath(f) pyxfile = (os.path.join(os.path.dirname(f), "_") + os.path.basename(f) + "x") failTest.description = script_name # check if script contains getopt with IOTools.openFile(script_name) as inf: if "getopt" in inf.read(): yield (failTest, "script uses getopt directly: %s" % script_name) continue module, modulename = loadScript(script_name) if module is None: yield (failTest, "module could not be imported: %s\n" % script_name) continue E.Start = LocalStart try: module.main(argv=["--help"]) except AttributeError: yield (failTest, "no main method in %s\n" % script_name) ok_(False, "no main method in %s" % script_name) except __HOLE__: yield (failTest, "script does not use E.Start() %s\n" % script_name) except DummyError: pass for option in PARSER.option_list: # ignore options added by optparse if option.dest is None: continue optstring = option.get_opt_string() if optstring.startswith("--"): optstring = optstring[2:] check_option.description = script_name + ":" + optstring yield(check_option, optstring, os.path.abspath(f), map_option2action) # clear up del sys.modules[modulename] # scripts with pyximport need special handling. # # Multiple imports of pyximport seems to create # some confusion - here, clear up sys.meta_path after # each script if os.path.exists(pyxfile): sys.meta_path = []
SystemExit
dataset/ETHPy150Open CGATOxford/cgat/tests/test_commandline.py/test_cmdline
6,745
def _solve_1_slack_qp(self, constraints, n_samples): C = np.float(self.C) * n_samples # this is how libsvm/svmstruct do it joint_features = [c[0] for c in constraints] losses = [c[1] for c in constraints] joint_feature_matrix = np.vstack(joint_features) n_constraints = len(joint_features) P = cvxopt.matrix(np.dot(joint_feature_matrix, joint_feature_matrix.T)) # q contains loss from margin-rescaling q = cvxopt.matrix(-np.array(losses, dtype=np.float)) # constraints: all alpha must be >zero idy = np.identity(n_constraints) tmp1 = np.zeros(n_constraints) # positivity constraints: if self.negativity_constraint is None: #empty constraints zero_constr = np.zeros(0) joint_features_constr = np.zeros((0, n_constraints)) else: joint_features_constr = joint_feature_matrix.T[self.negativity_constraint] zero_constr = np.zeros(len(self.negativity_constraint)) # put together G = cvxopt.sparse(cvxopt.matrix(np.vstack((-idy, joint_features_constr)))) h = cvxopt.matrix(np.hstack((tmp1, zero_constr))) # equality constraint: sum of all alpha must be = C A = cvxopt.matrix(np.ones((1, n_constraints))) b = cvxopt.matrix([C]) # solve QP model cvxopt.solvers.options['feastol'] = 1e-5 try: solution = cvxopt.solvers.qp(P, q, G, h, A, b) except __HOLE__: solution = {'status': 'error'} if solution['status'] != "optimal": print("regularizing QP!") P = cvxopt.matrix(np.dot(joint_feature_matrix, joint_feature_matrix.T) + 1e-8 * np.eye(joint_feature_matrix.shape[0])) solution = cvxopt.solvers.qp(P, q, G, h, A, b) if solution['status'] != "optimal": raise ValueError("QP solver failed. Try regularizing your QP.") # Lagrange multipliers a = np.ravel(solution['x']) self.old_solution = solution self.prune_constraints(constraints, a) # Support vectors have non zero lagrange multipliers sv = a > self.inactive_threshold * C if self.verbose > 1: print("%d support vectors out of %d points" % (np.sum(sv), n_constraints)) self.w = np.dot(a, joint_feature_matrix) # we needed to flip the sign to make the dual into a minimization # model return -solution['primal objective']
ValueError
dataset/ETHPy150Open pystruct/pystruct/pystruct/learners/one_slack_ssvm.py/OneSlackSSVM._solve_1_slack_qp
6,746
def fit(self, X, Y, constraints=None, warm_start=False, initialize=True): """Learn parameters using cutting plane method. Parameters ---------- X : iterable Traing instances. Contains the structured input objects. No requirement on the particular form of entries of X is made. Y : iterable Training labels. Contains the strctured labels for inputs in X. Needs to have the same length as X. contraints : ignored warm_start : bool, default=False Whether we are warmstarting from a previous fit. initialize : boolean, default=True Whether to initialize the model for the data. Leave this true except if you really know what you are doing. """ if self.verbose: print("Training 1-slack dual structural SVM") cvxopt.solvers.options['show_progress'] = self.verbose > 3 if initialize: self.model.initialize(X, Y) # parse cache_tol parameter if self.cache_tol is None or self.cache_tol == 'auto': self.cache_tol_ = self.tol else: self.cache_tol_ = self.cache_tol if not warm_start: self.w = np.zeros(self.model.size_joint_feature) constraints = [] self.objective_curve_, self.primal_objective_curve_ = [], [] self.cached_constraint_ = [] self.alphas = [] # dual solutions # append constraint given by ground truth to make our life easier constraints.append((np.zeros(self.model.size_joint_feature), 0)) self.alphas.append([self.C]) self.inference_cache_ = None self.timestamps_ = [time()] elif warm_start == "soft": self.w = np.zeros(self.model.size_joint_feature) constraints = [] self.alphas = [] # dual solutions # append constraint given by ground truth to make our life easier constraints.append((np.zeros(self.model.size_joint_feature), 0)) self.alphas.append([self.C]) else: constraints = self.constraints_ self.last_slack_ = -1 # get the joint_feature of the ground truth if getattr(self.model, 'rescale_C', False): joint_feature_gt = self.model.batch_joint_feature(X, Y, Y) else: joint_feature_gt = self.model.batch_joint_feature(X, Y) try: # catch ctrl+c to stop training for iteration in range(self.max_iter): # main loop cached_constraint = False if self.verbose > 0: print("iteration %d" % iteration) if self.verbose > 2: print(self) try: Y_hat, djoint_feature, loss_mean = self._constraint_from_cache( X, Y, joint_feature_gt, constraints) cached_constraint = True except NoConstraint: try: Y_hat, djoint_feature, loss_mean = self._find_new_constraint( X, Y, joint_feature_gt, constraints) self._update_cache(X, Y, Y_hat) except NoConstraint: if self.verbose: print("no additional constraints") if (self.switch_to is not None and self.model.inference_method != self.switch_to): if self.verbose: print("Switching to %s inference" % str(self.switch_to)) self.model.inference_method_ = \ self.model.inference_method self.model.inference_method = self.switch_to continue else: break self.timestamps_.append(time() - self.timestamps_[0]) self._compute_training_loss(X, Y, iteration) constraints.append((djoint_feature, loss_mean)) # compute primal objective last_slack = -np.dot(self.w, djoint_feature) + loss_mean primal_objective = (self.C * len(X) * max(last_slack, 0) + np.sum(self.w ** 2) / 2) self.primal_objective_curve_.append(primal_objective) self.cached_constraint_.append(cached_constraint) objective = self._solve_1_slack_qp(constraints, n_samples=len(X)) # update cache tolerance if cache_tol is auto: if self.cache_tol == "auto" and not cached_constraint: self.cache_tol_ = (primal_objective - objective) / 4 self.last_slack_ = np.max([(-np.dot(self.w, djoint_feature) + loss_mean) for djoint_feature, loss_mean in constraints]) self.last_slack_ = max(self.last_slack_, 0) if self.verbose > 0: # the cutting plane objective can also be computed as # self.C * len(X) * self.last_slack_ + np.sum(self.w**2)/2 print("cutting plane objective: %f, primal objective %f" % (objective, primal_objective)) # we only do this here because we didn't add the gt to the # constraints, which makes the dual behave a bit oddly self.objective_curve_.append(objective) self.constraints_ = constraints if self.logger is not None: self.logger(self, iteration) if self.verbose > 5: print(self.w) except __HOLE__: pass if self.verbose and self.n_jobs == 1: print("calls to inference: %d" % self.model.inference_calls) # compute final objective: self.timestamps_.append(time() - self.timestamps_[0]) primal_objective = self._objective(X, Y) self.primal_objective_curve_.append(primal_objective) self.objective_curve_.append(objective) self.cached_constraint_.append(False) if self.logger is not None: self.logger(self, 'final') if self.verbose > 0: print("final primal objective: %f gap: %f" % (primal_objective, primal_objective - objective)) return self
KeyboardInterrupt
dataset/ETHPy150Open pystruct/pystruct/pystruct/learners/one_slack_ssvm.py/OneSlackSSVM.fit
6,747
def get(self, key, default=None): try: return self.__getitem__(key) except __HOLE__: return default
KeyError
dataset/ETHPy150Open graphql-python/graphene/graphene/utils/proxy_snake_dict.py/ProxySnakeDict.get
6,748
def _activate(locale): # XXX TODO: When it comes time to load .mo files on the fly and merge # them, this is the place to do it. We'll also need to implement our own # caching since the _translations stuff is built on a per locale basis, # not per locale + some key locale = django_trans.to_locale(locale) # Django caches the translation objects here t = django_trans._translations.get(locale, None) if t is not None: return t # Django's activate() simply calls translation() and adds it to a global. # We'll do the same here, first calling django's translation() so it can # do everything it needs to do, and then calling gettext directly to # load the rest. We make a deepcopy because Django will return the en-US # catalog if it doesn't have a locale (but we do). We don't want to merge # our foreign catalog into en-US. Since Django stuck the en-US catalog # into its cache for this locale, we have to update that too. t = copy.deepcopy(django_trans.translation(locale)) t.set_language(locale) try: # When trying to load css, js, and images through the Django server # gettext() throws an exception saying it can't find the .mo files. I # suspect this has something to do with Django trying not to load # extra stuff for requests that won't need it. I do know that I don't # want to try to debug it. This is what Django does in their function # also. # # We check for SETTINGS_MODULE here because if it's not here, then # it's possible we're in a test using override_settings and we don't # want to flip out. settings_module = getattr(settings, 'SETTINGS_MODULE', None) if settings_module: # If you've got extra .mo files to load, this is the place. path = import_module(settings_module).path domain = getattr(settings, 'TEXT_DOMAIN', 'messages') bonus = gettext.translation(domain, path('locale'), [locale], django_trans.DjangoTranslation) t.merge(bonus) # Overwrite t (defaults to en-US) with our real locale's plural form t.plural = bonus.plural except __HOLE__: pass django_trans._translations[locale] = t return t
IOError
dataset/ETHPy150Open clouserw/tower/tower/__init__.py/_activate
6,749
def build_path(repo, path, entries=None, root=None): """ Builds out a tree path, starting with the leaf node, and updating all trees up the parent chain, resulting in (potentially) a new OID for the root tree. If ``entries`` is provided, those entries are inserted (or updated) in the tree for the given path. If ``root`` is provided, the path will be built based off of that tree. Otherwise, it is built off of an empty tree. Accepts an OID or a pygit2.Tree object. The root tree OID is returned, so that it can be included in a commit or stage. """ path = path.strip(os.path.sep) if path is not None and path != '': parent, name = os.path.split(path) else: parent, name = None, None if root is None: # use an empty tree root_id = repo.TreeBuilder().write() root = repo[root_id] if isinstance(root, (basestring, pygit2.Oid)): root = repo[root] if parent is None: # we're at the root tree tb_args = (root.oid,) else: # see if current path exists try: tree = root[path] except __HOLE__: tb_args = () else: tb_args = (tree.oid,) # build tree tb = repo.TreeBuilder(*tb_args) for entry in entries: tb.insert(*entry) oid = tb.write() if parent is None: # we're at the root tree return oid entry = (name, oid, pygit2.GIT_FILEMODE_TREE) if parent == '': # parent is the root tree return build_path(repo, '', (entry,), root) return build_path(repo, parent, (entry,), root)
KeyError
dataset/ETHPy150Open bendavis78/python-gitmodel/gitmodel/utils/path.py/build_path
6,750
def glob1(repo, tree, dirname, pattern): if not dirname: dirname = os.curdir if isinstance(pattern, unicode) and not isinstance(dirname, unicode): dirname = unicode(dirname, sys.getfilesystemencoding() or sys.getdefaultencoding()) if dirname != os.curdir: try: tree = repo[tree[dirname].oid] except __HOLE__: return [] names = [e.name for e in tree] if pattern[0] != '.': names = filter(lambda n: n[0] != '.', names) return fnmatch.filter(names, pattern)
KeyError
dataset/ETHPy150Open bendavis78/python-gitmodel/gitmodel/utils/path.py/glob1
6,751
def path_exists(tree, path): try: tree[path] except __HOLE__: return False return True
KeyError
dataset/ETHPy150Open bendavis78/python-gitmodel/gitmodel/utils/path.py/path_exists
6,752
def format_int(val): try: # for python 2.7 and up return '{:,}'.format(val) except __HOLE__: # pragma nocover _format_int(val)
ValueError
dataset/ETHPy150Open lsbardel/python-stdnet/stdnet/utils/__init__.py/format_int
6,753
def process_response(self, request, response): try: if request.stop_editing and not request.continue_editing: request.session["editing"] = None except __HOLE__ as e: pass return response
AttributeError
dataset/ETHPy150Open lsaffre/lino/lino/utils/editing.py/EditingMiddleware.process_response
6,754
def __getattribute__(self, key): try: return object.__getattribute__(self, key) except __HOLE__: pass subject, buffer = [object.__getattribute__(self, x) for x in ('_subject', '_buffer')] try: result = type(subject).__getattribute__(subject, key) except AttributeError: buffer.append(ReplayableSession.NoAttribute) raise else: if type(result) not in ReplayableSession.Natives: buffer.append(ReplayableSession.Callable) return type(self)(buffer, result) else: buffer.append(result) return result
AttributeError
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/testing/replay_fixture.py/ReplayableSession.Recorder.__getattribute__
6,755
def __getattribute__(self, key): try: return object.__getattribute__(self, key) except __HOLE__: pass buffer = object.__getattribute__(self, '_buffer') result = buffer.popleft() if result is ReplayableSession.Callable: return self elif result is ReplayableSession.NoAttribute: raise AttributeError(key) else: return result
AttributeError
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/testing/replay_fixture.py/ReplayableSession.Player.__getattribute__
6,756
def _toint(self, it): try: return int(it) except __HOLE__: return it
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStat._toint
6,757
def read(self, pid=None): if pid is not None: self.pid = int(pid) if self.pid is not None: try: self.stats = self._get_psinfo(pid) except __HOLE__: # no such process self.pid = None self.stats = None else: self.stats = None return self
IOError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStat.read
6,758
def get_stat(self, name): if not self.stats: raise ValueError, "no stats - run read(pid)" try: return self.stats[self._STATINDEX[name]] except __HOLE__: raise ValueError, "no attribute %s" % name
KeyError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStat.get_stat
6,759
def __getattr__(self, name): try: return self.get_stat(name) except __HOLE__, err: raise AttributeError, err
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStat.__getattr__
6,760
def __getitem__(self, name): try: return getattr(self, name) except __HOLE__, err: raise KeyError, err
AttributeError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStat.__getitem__
6,761
def read(self): rv = self._ptable = {} for pfile in os.listdir("/proc"): try: pid = int(pfile) # filter out non-numeric entries in /proc except __HOLE__: continue rv[pid] = ProcStat(pid)
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStatTable.read
6,762
def tree(self): class _pholder: pass self.read() if not self._ptable.has_key(0): p0 = self._ptable[0] = _pholder() p0.pid = p0.ppid = 0 p0.cmdline = "<kernel>" for p in self._ptable.values(): try: self._ptable[p.ppid]._children.append(p.pid) except __HOLE__: # no child list yet self._ptable[p.ppid]._children = sortedlist([p.pid]) pslist = self._tree_helper(self._ptable[0], 0, []) return "\n".join(pslist) # recursive helper to indent according to child depth
AttributeError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/SunOS/procfs.py/ProcStatTable.tree
6,763
def _unwind(self): if hasattr(self, '_forms'): return related_form = self.related_form if related_form is None: raise FormError('Related form not specified') prefix = '' if related_form.prefix: prefix = '%s_' % related_form.prefix prefix = '%s%s_' % (prefix, self.name) self.prefix = prefix errors = self._errors = {} forms = self._forms = [] is_bound = self.is_bound nf = '%s%s' % (self.prefix, self.NUMBER_OF_FORMS_CODE) instances = [] try: if is_bound: if nf not in related_form.rawdata: raise ValidationError( 'Could not find number of "%s" forms' % self.name) num_forms = int(related_form.rawdata[nf]) else: related = related_form.instance num_forms = 0 if related is not None and related.id: if self.instances_from_related: instances = self.instances_from_related(related) else: instances = self.mapper.filter( **{self.related_name: related}) instances = list(instances) num_forms = self.extra_length + len(instances) num_forms = max(num_forms, self.initial_length) self.num_forms = HiddenInput(name=nf, value=num_forms) for idx, instance in zip_longest(range(num_forms), instances): f = self.get_form(self.prefix, idx, instance) if f is not None: forms.append(f) errors.update(f.errors) except __HOLE__ as err: self.related_form.add_error_message(err) errors['form'] = err else: if is_bound and not errors and self.clean: try: self.clean(self) except ValidationError as err: self.form.add_error(err)
ValidationError
dataset/ETHPy150Open quantmind/lux/lux/forms/formsets.py/FormSet._unwind
6,764
def scrape(self, url): """Scrapes a url by passing it through youtube-dl""" if not is_youtube(url): return # FIXME: Sometimes youtube-dl takes a *long* time to run. This # needs to give indication of progress. try: output = subprocess.check_output( ['youtube-dl', '-j', url], stderr=subprocess.STDOUT ) except subprocess.CalledProcessError as cpe: raise ScraperError('youtube-dl said "{0}".'.format(cpe.output)) except __HOLE__: raise ScraperError('youtube-dl not installed or not on PATH.') # Each line is a single JSON object. items = [] for line in output.splitlines(): items.append(json.loads(line)) items = [self.transform_item(item) for item in items] return items
OSError
dataset/ETHPy150Open pyvideo/steve/steve/scrapers.py/YoutubeScraper.scrape
6,765
@app.route('/cookies') def view_cookies(request, hide_env=True): """Returns cookie data.""" cookies = dict(request.cookies.items()) if hide_env and ('show_env' not in request.args): for key in ENV_COOKIES: try: del cookies[key] except __HOLE__: pass return jsonify(cookies=cookies)
KeyError
dataset/ETHPy150Open mozillazg/bustard/tests/httpbin/core.py/view_cookies
6,766
def find_template_source(name, dirs=None): # Calculate template_source_loaders the first time the function is executed # because putting this logic in the module-level namespace may cause # circular import errors. See Django ticket #1292. global template_source_loaders if template_source_loaders is None: loaders = [] for path in settings.TEMPLATE_LOADERS: i = path.rfind('.') module, attr = path[:i], path[i+1:] try: mod = import_module(module) except __HOLE__, e: raise ImproperlyConfigured, 'Error importing template source loader %s: "%s"' % (module, e) try: func = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured, 'Module "%s" does not define a "%s" callable template source loader' % (module, attr) if not func.is_usable: import warnings warnings.warn("Your TEMPLATE_LOADERS setting includes %r, but your Python installation doesn't support that type of template loading. Consider removing that line from TEMPLATE_LOADERS." % path) else: loaders.append(func) template_source_loaders = tuple(loaders) for loader in template_source_loaders: try: source, display_name = loader(name, dirs) return (source, make_origin(display_name, loader, name, dirs)) except TemplateDoesNotExist: pass raise TemplateDoesNotExist, name
ImportError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/template/loader.py/find_template_source
6,767
def get_project_id(): """Retrieves Cloud Storage project id from user or file. Returns: The string project id. """ project_file = None project_id = None try: project_file = open(PROJECT_FILE, 'r') project_id = project_file.read() except __HOLE__: project_file = open(PROJECT_FILE, 'w') project_id = raw_input( 'Enter your Cloud Storage project id (found in the API console): ') project_file.write(project_id) project_file.close() return project_id
IOError
dataset/ETHPy150Open googlearchive/storage-getting-started-python/main.py/get_project_id
6,768
def main(argv): """Main application control.""" try: argv = FLAGS(argv) except gflags.FlagsError, e: logging.error('%s\\nUsage: %s ARGS\\n%s', e, argv[0], FLAGS) sys.exit(1) # Set the logging according to the command-line flag numeric_level = getattr(logging, FLAGS.logging_level.upper()) if not isinstance(numeric_level, int): logging.error('Invalid log level: %s', FLAGS.logging_level) logging.basicConfig(level=numeric_level) if FLAGS.logging_level == 'DEBUG': httplib2.debuglevel = 1 auth_http = get_auth_http() project_id = get_project_id() gcs_client = init_client(auth_http, project_id) commands = [ gcs_commands.GetBucketsCommand('Get all buckets', gcs_client), gcs_commands.GetBucketCommand('Get a bucket', gcs_client), gcs_commands.GetBucketCorsCommand('Get bucket CORS', gcs_client), gcs_commands.GetBucketLocationCommand('Get bucket location', gcs_client), gcs_commands.InsertBucketCommand('Create a bucket', gcs_client), gcs_commands.SetBucketCorsCommand('Set bucket CORS', gcs_client), gcs_commands.DeleteBucketCommand('Delete a bucket', gcs_client), gcs_commands.GetObjectCommand('Download an object', gcs_client), gcs_commands.GetObjectAclsCommand('Get object ACLs', gcs_client), gcs_commands.GetObjectMetadataCommand('Get object metadata', gcs_client), gcs_commands.InsertObjectCommand('Upload an object', gcs_client), gcs_commands.CopyObjectCommand('Copy an object', gcs_client), gcs_commands.DeleteObjectCommand('Delete an object', gcs_client), ] while True: print 'What would you like to do? Enter the number.' for i in range(len(commands)): print '%d: %s' % (i, commands[i].description) print '%d: Quit' % len(commands) selection = raw_input('Enter your selection: ') try: selection = int(selection) except __HOLE__, e: logging.error('Enter a number.') continue if selection > len(commands) or selection < 0: logging.error('Selection not recognized.') continue if selection == len(commands): break try: commands[selection].run_command() except Exception, e: logging.error('Error running command. Please try again.') logging.error(e)
ValueError
dataset/ETHPy150Open googlearchive/storage-getting-started-python/main.py/main
6,769
def server_forever(self): try: logger.debug("Start Listening on %s:%s ...." % (self.address, self.port)) self.server = self.create_server(self.address, self.port) self.server.start() tornado.ioloop.IOLoop.instance().start() except __HOLE__: tornado.ioloop.IOLoop.instance().stop() return 0
KeyboardInterrupt
dataset/ETHPy150Open whiteclover/Fukei/fukei/server.py/FukeiSocksServer.server_forever
6,770
def __getattribute__(self, name): get = super(HandlerProxy, self).__getattribute__ try: return get(name) except AttributeError: pass handlers = get('_handlers') path = get('path') try: handler = getattr(handlers, path) except __HOLE__: handler = import_string(path)() setattr(handlers, path, handler) return getattr(handler, name)
AttributeError
dataset/ETHPy150Open charettes/django-mutant/mutant/state/utils.py/HandlerProxy.__getattribute__
6,771
def render(self, name, value, attrs=None): try: year_val, month_val, day_val = value.year, value.month, value.day except AttributeError: year_val = month_val = day_val = None if isinstance(value, six.string_types): if settings.USE_L10N: try: input_format = get_format('DATE_INPUT_FORMATS')[0] v = datetime.datetime.strptime(value, input_format) year_val, month_val, day_val = v.year, v.month, v.day except __HOLE__: pass else: match = RE_DATE.match(value) if match: year_val, month_val, day_val = [int(v) for v in match.groups()] choices = [(i, i) for i in self.years] year_html = self.create_select(name, self.year_field, value, year_val, choices) choices = list(six.iteritems(MONTHS)) month_html = self.create_select(name, self.month_field, value, month_val, choices) choices = [(i, i) for i in range(1, 32)] day_html = self.create_select(name, self.day_field, value, day_val, choices) output = [] for field in _parse_date_fmt(): if field == 'year': output.append(year_html) elif field == 'month': output.append(month_html) elif field == 'day': output.append(day_html) return mark_safe('\n'.join(output))
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/forms/extras/widgets.py/SelectDateWidget.render
6,772
def value_from_datadict(self, data, files, name): y = data.get(self.year_field % name) m = data.get(self.month_field % name) d = data.get(self.day_field % name) if y == m == d == "0": return None if y and m and d: if settings.USE_L10N: input_format = get_format('DATE_INPUT_FORMATS')[0] try: date_value = datetime.date(int(y), int(m), int(d)) except __HOLE__: return '%s-%s-%s' % (y, m, d) else: date_value = datetime_safe.new_date(date_value) return date_value.strftime(input_format) else: return '%s-%s-%s' % (y, m, d) return data.get(name, None)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/forms/extras/widgets.py/SelectDateWidget.value_from_datadict
6,773
def _has_changed(self, initial, data): try: input_format = get_format('DATE_INPUT_FORMATS')[0] data = datetime_safe.datetime.strptime(data, input_format).date() except (TypeError, __HOLE__): pass return super(SelectDateWidget, self)._has_changed(initial, data)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/forms/extras/widgets.py/SelectDateWidget._has_changed
6,774
def ready(self): """ Load modules returned by `get_load_modules_when_ready` by default when app is ready. """ super().ready() package = self.module.__name__ for module in self.get_load_modules_when_ready(): try: import_module('{}.{}'.format(package, module)) except __HOLE__: pass
ImportError
dataset/ETHPy150Open allegro/ralph/src/ralph/apps.py/RalphAppConfig.ready
6,775
def strip_tag(version): """ Strip trailing non-numeric components from a version leaving the Teradata 't' on the final version component if it's present. ['1', '2', 'THREE'] -> (1, 2) ['1', 'TWO', '3'] -> raises a ValueError ['0', '115t', 'SNAPSHOT'] -> (0, '115t') ['ZERO', '123t'] -> raises a ValueError This checks the components of the version from least to most significant. Tags are only allowed at the least significant place in a version number, i.e. as the right-most component. Anything that can't be parsed as an integer that isn't in the right-most position is considered an error. :param version: something that can be sliced :return: a tuple containing only integer components, except for possibly the last one, which will be a string iff it's an integer followed by the letter 't' """ is_teradata = False is_ancient = False result = list(version[:]) while True: try: rightmost = result[-1] int(rightmost) # Once we find the right-most/least significant component that # can be represented as an int (doesn't raise a ValueError), break # out of the loop. break except ValueError: # Ancient tagged versions had the tag delimited by a - rather than # a ., spilt on -, and take the left-most token. The pattern # ensures that the component consists of numbers followed by a tag. # Once we've matched the pattern, we know the left-most token can # be converted by the int() function, and we're done removing # components. if ANCIENT_TAGGED_VERSION.match(rightmost): is_ancient = True result[-1] = rightmost.split('-')[0] # Do this second, and get the right-most component by index to get # the updated value for an ancient tagged version. If the pattern # matches, we know that except for the trailing t, the remainder of # the last component is a number int can parse. if TD_VERSION.match(result[-1]): is_teradata = True break # Non-teradata ancient tag. See above. We know this component is # numeric and we should break out of the loop and check the # components to the left of it. if is_ancient: break result = result[:-1] except __HOLE__: # If every component of the version has been removed because it's # non-numeric, we'll try to slice [][-1], and get an IndexError. # In that case, we've started with something that wasn't a version. raise ValueError( '%s does not contain any numeric version information' % (version,)) # Verify that every component left of the right-most int() parseable # component is parseable by int(). For Teradata versions, preserve the # Teradata 't' on the final component. if is_teradata: result = [int(x) for x in result[:-1]] + [result[-1]] else: result = [int(x) for x in result] return tuple(result)
IndexError
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/util/version_util.py/strip_tag
6,776
def _read_swift_topology(): LOG.debug("Reading Swift nodes topology from {config}".format( config=CONF.swift_topology_file)) topology = {} try: with open(CONF.swift_topology_file) as f: for line in f: line = line.strip() if not line: continue (host, path) = line.split() topology[host] = path except __HOLE__: LOG.warning(_LW("Unable to read Swift nodes topology from {config}") .format(config=CONF.swift_topology_file)) return {} return topology
IOError
dataset/ETHPy150Open openstack/sahara/sahara/topology/topology_helper.py/_read_swift_topology
6,777
def _read_compute_topology(): LOG.debug("Reading compute nodes topology from {config}".format( config=CONF.compute_topology_file)) ctx = context.ctx() tenant_id = str(ctx.tenant_id) topology = {} try: with open(CONF.compute_topology_file) as f: for line in f: line = line.strip() if not line: continue (host, path) = line.split() # Calulating host id based on tenant id and host # using the same algorithm as in nova # see nova/api/openstack/compute/views/servers.py # def _get_host_id(instance): sha_hash = hashlib.sha224(tenant_id + host) topology[sha_hash.hexdigest()] = path except __HOLE__: raise ex.NotFoundException( CONF.compute_topology_file, _("Unable to find file %s with compute topology")) return topology
IOError
dataset/ETHPy150Open openstack/sahara/sahara/topology/topology_helper.py/_read_compute_topology
6,778
def __init__(self, fd, hostname=None, port=None, use_ssl=False): """Create a request. fd should be either a socket descriptor or a string. In both case, it should contain a full request. To generate a request from a URL, see c()""" if isinstance(fd, basestring): fd = StringIO(fd) try: banner = read_banner(fd) # ASSUMPTION: The request line contains three elements seperated by # a space. self.method, url, self.http_version = banner except __HOLE__: raise NotConnected(' '.join(banner)) if self.method.upper() == "CONNECT": # ASSUMPTION: CONNECT method needs a hostname and port self.hostname, self.port = url.rsplit(":", 1) self.port = int(self.port) self.url = "" self.use_ssl = False elif hostname: self.hostname = hostname self.port = port if port else 80 self.use_ssl = use_ssl self.url = url else: p_url = urlparse.urlparse(url) self.url = urlparse.urlunparse(("", "") + p_url[2:]) if p_url.scheme: self.hostname = p_url.hostname if not self.hostname: raise BurstException("No hostname: " + str(url)) if p_url.scheme == 'https': self.use_ssl = True self.port = int(p_url.port) if p_url.port else 443 else: self.port = int(p_url.port) if p_url.port else 80 self.use_ssl = use_ssl self.raw_headers = read_headers(fd) if not hasattr(self, "hostname"): # Last chance, try the Host header hosts = self.get_header('Host') if not hosts: raise BurstException("Unable to find the host for the request") else: host = hosts[0] self.hostname = host.split(":")[0] self.port = int(host.split(":")[1]) if ":" in host else 80 self.use_ssl = False self.raw_content = read_content(fd, parse_headers(self.raw_headers), method=self.method) if self.raw_content: self.content = _clear_content(parse_headers(self.raw_headers), self.raw_content) else: self.content = "" self.response = None
ValueError
dataset/ETHPy150Open tweksteen/burst/burst/http.py/Request.__init__
6,779
def __init__(self, fd, request, chunk_func=None): if isinstance(fd, basestring): fd = StringIO(fd) try: banner = read_banner(fd) # ASSUMPTION: A response status line contains at least two elements # seperated by a space self.http_version, self.status = banner[:2] self.reason = banner[2] if len(banner) == 3 else "" except __HOLE__: raise BadStatusLine(banner) self.raw_headers = read_headers(fd) self.request = request if request.method == "HEAD": self.raw_content = self.content = "" else: self.raw_content = read_content(fd, parse_headers(self.raw_headers), self.status, chunk_func=chunk_func) if self.raw_content: self.content = _clear_content(parse_headers(self.raw_headers), self.raw_content) else: self.content = ""
ValueError
dataset/ETHPy150Open tweksteen/burst/burst/http.py/Response.__init__
6,780
@property def content_type(self): try: return self.get_header("Content-Type")[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open tweksteen/burst/burst/http.py/Response.content_type
6,781
def parallel(self, threads=4, verbose=True, **kw): stop = threading.Event() indices = range(len(self.reqs)) jobs = [] for ics in chunks(indices, threads): mkw = kw.copy() mkw.update({"indices":ics, "stop_event":stop, "verbose":False}) t = threading.Thread(target=self.__call__, kwargs=mkw) jobs.append(t) t.start() try: for j in jobs: while j.is_alive(): j.join(1) if verbose: done = len(self.filter(lambda x: x.response)) print "Running {} requests... {:.2f}%".format(len(self), done * 100. / len(self)), clear_line() except __HOLE__: stop.set() if verbose: ## the two extra spaces in the end erase the left over "00%" from "100%" print "Running {} requests... done. ".format(len(self))
KeyboardInterrupt
dataset/ETHPy150Open tweksteen/burst/burst/http.py/RequestSet.parallel
6,782
def _http_connect(hostname, port, use_ssl): p_url = urlparse.urlparse(conf.proxy) p_hostname = p_url.hostname p_port = p_url.port p_use_ssl = True if p_url.scheme[-1] == 's' else False try: sock = socket.create_connection((p_hostname, p_port)) except socket.error: raise ProxyError("Unable to connect to the proxy") if p_use_ssl: try: # No check is made to verify proxy certificate sock = ssl.wrap_socket(sock, ssl_version=conf._ssl_version) except socket.error: raise ProxyError("Unable to use SSL with the proxy") if use_ssl: f = sock.makefile("rwb", 0) f.write("CONNECT {}:{} HTTP/1.1\r\n\r\n".format(hostname, port)) try: v, s, m = read_banner(f) except __HOLE__: raise BadStatusLine() if s != "200": raise ProxyError("Bad status " + s + " " + m) _ = read_headers(f) sock = _wrap_socket(sock) return sock
ValueError
dataset/ETHPy150Open tweksteen/burst/burst/http.py/_http_connect
6,783
def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('sightings_count', node) if value is not None and 'sightings_count' not in already_processed: already_processed.add('sightings_count') try: self.sightings_count = int(value) except __HOLE__, exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp)
ValueError
dataset/ETHPy150Open STIXProject/python-stix/stix/bindings/indicator.py/SightingsType.buildAttributes
6,784
def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('timestamp', node) if value is not None and 'timestamp' not in already_processed: already_processed.add('timestamp') try: self.timestamp = self.gds_parse_datetime(value, node, 'timestamp') except __HOLE__, exp: raise ValueError('Bad date-time attribute (timestamp): %s' % exp) value = find_attr_value_('timestamp_precision', node) if value is not None and 'timestamp_precision' not in already_processed: already_processed.add('timestamp_precision') self.timestamp_precision = value
ValueError
dataset/ETHPy150Open STIXProject/python-stix/stix/bindings/indicator.py/SightingType.buildAttributes
6,785
def motion(self, *args): tvColId = self.treeView.identify_column(args[0].x) tvRowId = self.treeView.identify_row(args[0].y) if tvColId != self.toolTipColId or tvRowId != self.toolTipRowId: self.toolTipColId = tvColId self.toolTipRowId = tvRowId newValue = self.getToolTip(tvRowId, tvColId) if newValue is None and tvRowId and len(tvRowId) > 0: try: col = int(tvColId[1:]) if col == 0: newValue = self.treeView.item(tvRowId,"text") else: values = self.treeView.item(tvRowId,"values") if col <= len(values): newValue = values[col - 1] except __HOLE__: pass self.setToolTip(newValue, tvColId)
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/ViewWinTree.py/ViewTree.motion
6,786
def contextMenu(self): try: return self.menu except __HOLE__: try: self.menu = Menu( self.viewFrame, tearoff = 0 ) self.treeView.bind( self.modelXbrl.modelManager.cntlr.contextMenuClick, self.popUpMenu, '+' ) return self.menu except Exception as ex: # tkinter menu problem maybe self.modelXbrl.info("arelle:internalException", _("Exception creating context menu in %(title)s: %(error)s"), modelObject=self.modelXbrl.modelDocument, title=self.tabTitle, error=str(ex)) self.menu = None return None
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ViewWinTree.py/ViewTree.contextMenu
6,787
def render_GET(self, request): msg = base64.urlsafe_b64decode(request.args["t"][0]) tmppub, boxed0 = decrypt_list_request_1(msg) if tmppub in self.old_requests: request.setResponseCode(http.BAD_REQUEST, "Replay") return "Replay" ts, RT = decrypt_list_request_2(tmppub, boxed0, self.retrieval_privkey) now = time.time() if ts < now-self.CLOCK_WINDOW or ts > now+self.CLOCK_WINDOW: request.setResponseCode(http.BAD_REQUEST, "too much clock skew") return "Too much clock skew" try: tid, symkey = self.check_RT(RT) except __HOLE__: request.setResponseCode(http.NOT_FOUND, "no such RT") return "no such RT" # If check_RT() didn't throw KeyError, this is a new request, for a # known RT. It's worth preventing a replay. self.old_requests[tmppub] = ts all_messages = self.prepare_message_list(tid, symkey, tmppub) groups = [all_messages[i:i+self.MAX_MESSAGES_PER_ENTRY] for i in range(0, len(all_messages), self.MAX_MESSAGES_PER_ENTRY)] entries = [" ".join([base64.b64encode(e) for e in group]) for group in groups] if ("text/event-stream" in (request.getHeader("accept") or "") and self.ENABLE_EVENTSOURCE): # EventSource protocol if tid in self.subscribers: # close the EventsProtocol when a new GET occurs (since # that will reset the tokens anyways) self.subscribers[tid][0].stop() request.setHeader("content-type", "text/event-stream") p = EventsProtocol(request) p.sendComment("beginning Message List event stream") for e in entries: p.sendEvent(e) self.subscribers[tid] = (p, symkey, tmppub) # unsubscribe when the EventsProtocol is closed def _done(_): if tid in self.subscribers and self.subscribers[tid][0] is p: del self.subscribers[tid] request.notifyFinish().addErrback(_done) return server.NOT_DONE_YET for e in entries: request.write("data: %s\n\n" % e) return ""
KeyError
dataset/ETHPy150Open warner/petmail/petmail/mailbox/server.py/RetrievalListResource.render_GET
6,788
def shutdown(sups): global SHOULD_STOP SHOULD_STOP = True LOG.warn("Supervisor shutting down!") for pid in CHILD_PIDS: try: os.kill(pid, signal.SIGINT) except __HOLE__: pass LOG.warn("Waiting for children to exit for %d seconds..." % WAIT_FOR_DEATH) t = time.time() still_alive = False while time.time() < t + WAIT_FOR_DEATH: still_alive = False for sup in sups: sup.join(0.2) still_alive = still_alive or sup.isAlive() if not still_alive: break if still_alive: LOG.warn("Children have not exited after %d seconds. Killing them with SIGKILL." % WAIT_FOR_DEATH) for pid in CHILD_PIDS: try: os.kill(pid, signal.SIGKILL) except OSError: pass sys.exit(1)
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/supervisor.py/shutdown
6,789
def drop_privileges(): """Drop root privileges down to the specified SETUID_USER. N.B. DO NOT USE THE logging MODULE FROM WITHIN THIS FUNCTION. This function is run in forked processes right before it calls exec, but the fork may have occured while a different thread had locked the log. Since it's a forked process, the log will be locked forever in the subprocess and thus a logging.X may block forever. """ we_are_root = os.getuid() == 0 if not we_are_root: print >>sys.stdout, "[INFO] Not running as root, skipping privilege drop" return try: pw = pwd.getpwnam(SETUID_USER) except KeyError: print >>sys.stderr, "[ERROR] Couldn't get user information for user " + SETUID_USER raise try: gr = grp.getgrnam(SETGID_GROUP) except __HOLE__: print >>sys.stderr, "[ERROR] Couldn't get group information for group " + SETGID_GROUP raise # gid has to be set first os.setgid(gr.gr_gid) os.setuid(pw.pw_uid)
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/supervisor.py/drop_privileges
6,790
def load_known_hosts(self, filename=None): """Load host keys from an openssh :file:`known_hosts`-style file. Can be called multiple times. If *filename* is not specified, looks in the default locations i.e. :file:`~/.ssh/known_hosts` and :file:`~/ssh/known_hosts` for Windows. """ if filename is None: filename = os.path.expanduser('~/.ssh/known_hosts') try: self._host_keys.load(filename) except IOError: # for windows filename = os.path.expanduser('~/ssh/known_hosts') try: self._host_keys.load(filename) except __HOLE__: pass else: self._host_keys.load(filename)
IOError
dataset/ETHPy150Open osrg/ryu/ryu/contrib/ncclient/transport/ssh.py/SSHSession.load_known_hosts
6,791
def imitate_pydoc(string): """ It's not possible to get the pydoc's without starting the annoying pager stuff. """ # str needed because of possible unicode stuff in py2k (pydoc doesn't work # with unicode strings) string = str(string) h = pydoc.help with common.ignored(KeyError): # try to access symbols string = h.symbols[string] string, _, related = string.partition(' ') get_target = lambda s: h.topics.get(s, h.keywords.get(s)) while isinstance(string, str): string = get_target(string) try: # is a tuple now label, related = string except TypeError: return '' try: return pydoc_topics.topics[label] if pydoc_topics else '' except __HOLE__: return ''
KeyError
dataset/ETHPy150Open JulianEberius/SublimePythonIDE/server/lib/python_all/jedi/api/keywords.py/imitate_pydoc
6,792
def __new__(cls, name, bases, attrs): try: parents = [b for b in bases if issubclass(b, DocumentForm)] except __HOLE__: # We are defining ModelForm itself. parents = None declared_fields = get_declared_fields(bases, attrs, False) new_class = super(DocumentFormMetaClass, cls).__new__(cls, name, bases, attrs) if not parents: return new_class if 'media' not in attrs: new_class.media = media_property(new_class) opts = new_class._meta = DocumentFormOptions(getattr(new_class, 'Meta', None)) if opts.document: # If a document is defined, extract form fields from it. fields = fields_for_document(opts.document, opts.properties, opts.exclude) # Override default docuemnt fields with any custom declared ones # (plus, include all the other declared fields). fields.update(declared_fields) else: fields = declared_fields new_class.declared_fields = declared_fields new_class.base_fields = fields return new_class
NameError
dataset/ETHPy150Open benoitc/couchdbkit/couchdbkit/ext/django/forms.py/DocumentFormMetaClass.__new__
6,793
def __delitem__(self, name): field_name = self._convert_name(name) try: del self._fields[field_name] except __HOLE__: raise KeyError(name)
KeyError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/distlib/metadata.py/LegacyMetadata.__delitem__
6,794
def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._legacy = None self._data = None self.scheme = scheme #import pdb; pdb.set_trace() if mapping is not None: try: self._validate_mapping(mapping, scheme) self._data = mapping except MetadataUnrecognizedVersionError: self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) self.validate() else: data = None if path: with open(path, 'rb') as f: data = f.read() elif fileobj: data = fileobj.read() if data is None: # Initialised with no args - to be added self._data = { 'metadata_version': self.METADATA_VERSION, 'generator': self.GENERATOR, } else: if not isinstance(data, text_type): data = data.decode('utf-8') try: self._data = json.loads(data) self._validate_mapping(self._data, scheme) except __HOLE__: # Note: MetadataUnrecognizedVersionError does not # inherit from ValueError (it's a DistlibException, # which should not inherit from ValueError). # The ValueError comes from the json.load - if that # succeeds and we get a validation error, we want # that to propagate self._legacy = LegacyMetadata(fileobj=StringIO(data), scheme=scheme) self.validate()
ValueError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/distlib/metadata.py/Metadata.__init__
6,795
def is_subclass(o, bases): """ Similar to the ``issubclass`` builtin, but does not raise a ``TypeError`` if either ``o`` or ``bases`` is not an instance of ``type``. Example:: >>> is_subclass(IOError, Exception) True >>> is_subclass(Exception, None) False >>> is_subclass(None, Exception) False >>> is_subclass(IOError, (None, Exception)) True >>> is_subclass(Exception, (None, 42)) False """ try: return _issubclass(o, bases) except __HOLE__: pass if not isinstance(o, type): return False if not isinstance(bases, tuple): return False bases = tuple(b for b in bases if isinstance(b, type)) return _issubclass(o, bases)
TypeError
dataset/ETHPy150Open shazow/unstdlib.py/unstdlib/standard/type_.py/is_subclass
6,796
def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): code = code.strip() reply = { 'status': 'ok', 'user_expressions': {}, } if code == 'start': child = Popen(['bash', '-i', '-c', 'sleep 30'], stderr=PIPE) self.children.append(child) reply['user_expressions']['pid'] = self.children[-1].pid elif code == 'check': reply['user_expressions']['poll'] = [ child.poll() for child in self.children ] elif code == 'sleep': try: time.sleep(10) except __HOLE__: reply['user_expressions']['interrupted'] = True else: reply['user_expressions']['interrupted'] = False else: reply['status'] = 'error' reply['ename'] = 'Error' reply['evalue'] = code reply['traceback'] = ['no such command: %s' % code] return reply
KeyboardInterrupt
dataset/ETHPy150Open jupyter/jupyter_client/jupyter_client/tests/signalkernel.py/SignalTestKernel.do_execute
6,797
def update(self, key, isselected): logger.debug("Begin generating plots") if not isselected: try: self.colorList.append(self._seriesInfos[key].color) del self._seriesInfos[key] except __HOLE__: self.resetDates() else: #results = self.taskserver.getCompletedTasks() #self.memDB.setConnection(results["InitEditValues"]) self._seriesInfos[key] = self.getSeriesInfo(key) self.getUpdatedData(key)
KeyError
dataset/ETHPy150Open ODM2/ODMToolsPython/odmtools/controller/logicPlotOptions.py/SeriesPlotInfo.update
6,798
def test_errors(self): self.top.vehicle.ratio1 = 3.54 self.top.vehicle.ratio2 = 3.54 self.top.vehicle.ratio3 = 3.54 self.top.vehicle.ratio4 = 3.54 self.top.vehicle.ratio5 = 3.54 try: self.top.run() #sim_EPA_city.run() except __HOLE__, err: msg = "sim_EPA_city: Transmission gearing cannot " \ "achieve acceleration and speed required by EPA " \ "test." self.assertEqual(str(err), msg) else: self.fail('RuntimeError expected.') self.top.sim_acc.end_speed = 12.0 self.top.vehicle.ratio1 = 18.0 self.top.vehicle.ratio2 = 18.0 self.top.vehicle.ratio3 = 18.0 self.top.vehicle.ratio4 = 18.0 self.top.vehicle.ratio5 = 18.0 try: self.top.sim_acc.run() except RuntimeError, err: msg = "sim_acc: Gearing problem in Accel test." self.assertEqual(str(err), msg) else: self.fail('RuntimeError expected.') self.top.vehicle.ratio1 = 1.0 self.top.vehicle.ratio2 = 1.0 self.top.vehicle.ratio3 = 1.0 self.top.vehicle.ratio4 = 1.0 self.top.vehicle.ratio5 = 1.0 try: self.top.sim_EPA_city.run() except RuntimeError, err: msg = "sim_EPA_city: Vehicle is unable to achieve " \ "acceleration required to match EPA driving profile." self.assertEqual(str(err), msg) else: self.fail('RuntimeError expected.')
RuntimeError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/contrib/enginedesign/openmdao.examples.enginedesign/openmdao/examples/enginedesign/test/test_driving_sim.py/VehicleTestCase.test_errors
6,799
def prepare_prompt_env_variables(self, raw_prompt=None, module_prompt=None): if raw_prompt: os.environ["RSF_RAW_PROMPT"] = raw_prompt else: try: os.environ["RSF_RAW_PROMPT"] except __HOLE__: pass if module_prompt: os.environ["RSF_MODULE_PROMPT"] = module_prompt else: try: del os.environ["RSF_MODULE_PROMPT"] except KeyError: pass getattr(self.interpreter, '_{}__parse_prompt'.format(self.interpreter.__class__.__name__))()
KeyError
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/test/test_interpreter.py/RoutersploitInterpreterTest.prepare_prompt_env_variables