Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,200
def main(): try: signal.signal(signal.SIGTSTP, signal.SIG_IGN) # ignore CTRL+Z signal.signal(signal.SIGINT, signal_handler) # custom CTRL+C handler except AttributeError: # not all signals are supported on all platforms pass if len(sys.argv) == 1: usage() dnsrecord = 'A' count = 30 timeout = 1 quiet = False dnsserver = dns.resolver.get_default_resolver().nameservers[0] dest_port = 53 hops = 0 as_lookup = False should_resolve = True try: opts, args = getopt.getopt(sys.argv[1:], "aqhc:s:t:w:p:n", ["help", "count=", "server=", "quiet", "type=", "wait=", "asn", "port"]) except getopt.GetoptError as err: # print help information and exit: print(err) # will print something like "option -a not recognized" usage() if args and len(args) == 1: hostname = args[0] else: usage() for o, a in opts: if o in ("-h", "--help"): usage() elif o in ("-c", "--count"): count = int(a) elif o in ("-s", "--server"): dnsserver = a elif o in ("-q", "--quiet"): quiet = True elif o in ("-w", "--wait"): timeout = int(a) elif o in ("-t", "--type"): dnsrecord = a elif o in ("-p", "--port"): dest_port = int(a) elif o in ("-n"): should_resolve = False elif o in ("-a", "--asn"): if has_whois: as_lookup = True else: print('Warning: cymruwhois module cannot be loaded. AS Lookup disabled.') else: usage() resolver = dns.resolver.Resolver() resolver.nameservers = [dnsserver] resolver.timeout = timeout resolver.lifetime = timeout resolver.retry_servfail = 0 icmp = socket.getprotobyname('icmp') ttl = 1 reached = False if not quiet: print("%s DNS: %s:%d, hostname: %s, rdatatype: %s" % (__PROGNAME__, dnsserver, dest_port, hostname, dnsrecord)) while True: if should_stop: break # some platforms permit opening a DGRAM socket for ICMP without root permission # if not availble, we will fall back to RAW which explicitly requires root permission try: icmp_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp) except OSError: try: icmp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, icmp) except OSError: print("Error: Unable to create ICMP socket with unprivileged user. Please run as root.") exit(1) icmp_socket.bind(("", dest_port)) icmp_socket.settimeout(timeout) try: # send DNS request stime = time.time() resolver.query(hostname, dnsrecord, ipttl=ttl) except dns.resolver.NoNameservers as e: if not quiet: print("no or bad response:", e) exit(1) except dns.resolver.NXDOMAIN as e: if not quiet: print("Invalid hostname:", e) exit(1) except dns.resolver.Timeout: pass except dns.resolver.NoAnswer: if not quiet: print("invalid answer") pass except SystemExit: pass except: print("unxpected error: ", sys.exc_info()[0]) exit(1) else: reached = True curr_addr = None curr_host = None try: # expect ICMP response _, curr_addr = icmp_socket.recvfrom(512) curr_addr = curr_addr[0] except socket.error: pass finally: icmp_socket.close() etime = time.time() elapsed = (etime - stime) * 1000 # convert to milliseconds if reached: curr_addr = dnsserver elapsed -= timeout * 1000 if should_resolve: try: if curr_addr: curr_name = socket.gethostbyaddr(curr_addr)[0] except socket.error: curr_name = curr_addr except SystemExit: pass except: print("unxpected error: ", sys.exc_info()[0]) else: curr_name = curr_addr if curr_addr: as_name = "" if has_whois and as_lookup: ASN = whoisrecord(curr_addr) as_name = '' try: if ASN and ASN.asn != "NA": as_name = "[%s %s] " % (ASN.asn, ASN.owner) except __HOLE__: if should_stop: exit(0) pass print("%d\t%s (%s) %s%d ms" % (ttl, curr_name, curr_addr, as_name, elapsed)) else: print("%d\t *" % ttl) ttl += 1 hops += 1 if (hops >= count) or (curr_addr == dnsserver) or reached: break
AttributeError
dataset/ETHPy150Open farrokhi/dnstools/dnstraceroute.py/main
6,201
def execute(self, metadata, connection, filter_values): try: table = metadata.tables[self.table_name] except __HOLE__: raise TableNotFoundException("Unable to query table, table not found: %s" % self.table_name) return connection.execute(self._build_query(table, filter_values)).fetchall()
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/custom/care_pathways/sqldata.py/CareQueryMeta.execute
6,202
def get_class(self, name): """Given a class name in the given modules returns the class.""" klass = None for m in self.modules: if hasattr(m, name): return getattr(m, name) if hasattr(__builtin__, name): klass = getattr(__builtin__, name) if not klass: try: klass = self.nodes[name].klass except __HOLE__: raise KeyError, "Cannot find class of name %s"%name return klass
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/contrib/titan/class_tree.py/ClassTree.get_class
6,203
def begin_text_resource(self, resource, text): """ Replace @import statements with {% include %} statements. """ if not resource.source_file.kind == 'styl': return def import_to_include(match): """ Converts a css import statement to include statement. """ if not match.lastindex: return '' path = match.groups(1)[0] first_child = resource.source_file.parent.child(path) afile = File(File(first_child).fully_expanded_path) if len(afile.kind.strip()) == 0: afile = File(afile.path + '.styl') ref = self.site.content.resource_from_path(afile.path) if not ref: try: include = self.settings.args.include except __HOLE__: include = False if not include: raise HydeException( "Cannot import from path [%s]" % afile.path) else: ref.is_processable = False return "\n" + \ self.template.get_include_statement(ref.relative_path) + \ "\n" return '@import "' + path + '"\n' text = self.import_finder.sub(import_to_include, text) return text
AttributeError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/StylusPlugin.begin_text_resource
6,204
@property def defaults(self): """ Returns `compress` if not in development mode. """ try: mode = self.site.config.mode except __HOLE__: mode = "production" defaults = {"compress": ""} if mode.startswith('dev'): defaults = {} return defaults
AttributeError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/StylusPlugin.defaults
6,205
def __init__(self, site): super(CleverCSSPlugin, self).__init__(site) try: import clevercss except __HOLE__ as e: raise HydeException('Unable to import CleverCSS: ' + e.message) else: self.clevercss = clevercss
ImportError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/CleverCSSPlugin.__init__
6,206
def __init__(self, site): super(SassyCSSPlugin, self).__init__(site) try: import scss except __HOLE__ as e: raise HydeException('Unable to import pyScss: ' + e.message) else: self.scss = scss
ImportError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/SassyCSSPlugin.__init__
6,207
@property def options(self): """ Returns options depending on development mode """ try: mode = self.site.config.mode except __HOLE__: mode = "production" debug = mode.startswith('dev') opts = {'compress': not debug, 'debug_info': debug} site_opts = self.settings.get('options', {}) opts.update(site_opts) return opts
AttributeError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/SassyCSSPlugin.options
6,208
def __init__(self, site): super(SassPlugin, self).__init__(site) try: import sass except __HOLE__ as e: raise HydeException('Unable to import libsass: ' + e.message) else: self.sass = sass self.resources = []
ImportError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/SassPlugin.__init__
6,209
@property def options(self): """ Returns options depending on development mode """ try: mode = self.site.config.mode except __HOLE__: mode = "production" if 'sass' in self.site.config and \ 'output_style' in self.site.config.sass: output_style = self.site.config.sass.output_style else: debug = mode.startswith('dev') output_style = 'compressed' if not debug else 'nested' opts = {'output_style': output_style} site_opts = self.settings.get('options', {}) opts.update(site_opts) return opts
AttributeError
dataset/ETHPy150Open hyde/hyde/hyde/ext/plugins/css.py/SassPlugin.options
6,210
def copy_image(self, content_path, image): if image.startswith('http'): return if image.startswith('/'): image = image[1:] src = os.path.join(os.path.dirname(self.SVN_REPO_PATH), image) else: src = os.path.join(self.SVN_REPO_PATH, content_path, image) dst = os.path.join(settings.MEDIA_ROOT, 'pages', image) try: os.makedirs(os.path.dirname(dst)) except __HOLE__: pass try: shutil.copyfile(src, dst) except Exception as e: pass
OSError
dataset/ETHPy150Open python/pythondotorg/pages/management/commands/import_pages_from_svn.py/Command.copy_image
6,211
def do_write(self, obuf): my_flock = flock try: my_flock(self.log, LOCK_EX) except __HOLE__, e: # Catch ENOTSUP if e.args[0] != 45: raise e my_flock = lambda x, y: None try: self.log.write(obuf) self.log.flush() except: pass my_flock(self.log, LOCK_UN)
IOError
dataset/ETHPy150Open sippy/b2bua/sippy/SipLogger.py/AsyncLogger.do_write
6,212
def getConnectError(e): """Given a socket exception, return connection error.""" try: number, string = e except __HOLE__: return ConnectError(string=e) if hasattr(socket, 'gaierror') and isinstance(e, socket.gaierror): # only works in 2.2 klass = UnknownHostError else: klass = errnoMapping.get(number, ConnectError) return klass(number, string)
ValueError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/internet/error.py/getConnectError
6,213
def startElement(self, name, attrs): if self.first_elm: # Output the xml declaration prior to the first element, # done here instead of in startDocument to allow us to avoid # outputting the declaration when we try and parse non XML content # as can happen when we parse all files in a zip archive self.first_elm = False output('<?xml version="1.0" ?>') try: name = name.split(':')[1] except __HOLE__: pass # Determine if we are interested # in starting to record the raw # XML string so we can prepare # the feature when the feature ends if name in self.preparer.feat_types: self.buffer = [] self.recording = True # Process the attributes tmp = '<' + name for (name, value) in attrs.items(): try: name = name.split(':')[1] except IndexError: pass tmp += ' %s=%s' % (name, saxutils.quoteattr(value)) tmp += '>' if self.recording: self.buffer.append(tmp) else: output(tmp) return
IndexError
dataset/ETHPy150Open AstunTechnology/Loader/python/prepgml4ogr.py/gmlhandler.startElement
6,214
def endElement(self, name): try: name = name.split(':')[1] except __HOLE__: pass if self.recording: self.buffer.append('</' + name + '>') else: output('</' + name + '>') if name in self.preparer.feat_types: self.recording = False output(self.preparer.prepare_feature(''.join(self.buffer))) self.buffer = []
IndexError
dataset/ETHPy150Open AstunTechnology/Loader/python/prepgml4ogr.py/gmlhandler.endElement
6,215
def main(): if len(sys.argv) < 2: print('usage: python prepgml4ogr.py file [[prep_module.]prep_class]') sys.exit(1) inputfile = sys.argv[1] if os.path.exists(inputfile): # Create an instance of a preparer # class which is used to prepare # features as they are read prep_class = 'prep_gml' try: prep_class = sys.argv[2] except __HOLE__: pass prep_class = get_preparer(prep_class) preparer = prep_class(inputfile) parser = make_parser() parser.setContentHandler(gmlhandler(preparer)) if os.path.splitext(inputfile)[1].lower() == '.zip': archive = zipfile.ZipFile(inputfile, 'r') for filename in archive.namelist(): file = archive.open(filename) try: parser.parse(file) except: # Ignore any files that can't be parsed pass else: if os.path.splitext(inputfile)[1].lower() == '.gz': file = gzip.open(inputfile, 'r') else: # Assume non compressed gml, xml or no extension file = open(inputfile, 'r') parser.parse(file) else: print('Could not find input file: ' + inputfile)
IndexError
dataset/ETHPy150Open AstunTechnology/Loader/python/prepgml4ogr.py/main
6,216
def source(self): firstCommit = self.args[0] lastCommit = self.args[1] ref = self.args[2] if ref.find("refs/heads/") == 0: branch = ref[11:] else: syslog(LOG_WARNING, "Branch name could not be parsed from '%s' for '%s'" % (ref, self.config.repoPath)) return [] try: self.config.branches[branch] except __HOLE__: syslog(LOG_INFO, "No section in config for branch '%s' in '%s'" % (branch, self.config.repoPath)) return [] try: gitRepo = git.Git(self.config.repoPath) commits = gitRepo.getLog(since=firstCommit, until=lastCommit, branch=branch) except git.GitException as e: syslog(LOG_WARNING, "Git log could not be fetched: '%s'" % (e,)) return [] for commit in commits: commit.status = "deployment_queued" return commits
KeyError
dataset/ETHPy150Open seoester/Git-Deployment-Handler/gitdh/modules/postreceivesource.py/PostReceiveSource.source
6,217
def pep8(self, *args): del sys.stdout[:], sys.stderr[:] sys.argv[1:] = args try: pep8._main() errorcode = None except __HOLE__: errorcode = sys.exc_info()[1].code return sys.stdout.getvalue(), sys.stderr.getvalue(), errorcode
SystemExit
dataset/ETHPy150Open PyCQA/pycodestyle/testsuite/test_shell.py/ShellTestCase.pep8
6,218
def render(self, name, value, attrs=None): try: value = datetime.date(*map(int, value.split('-'))) year_val, month_val, day_val = value.year, value.month, value.day except (__HOLE__, TypeError, ValueError): year_val = month_val = day_val = None output = [] month_choices = MONTHS.items() month_choices.sort() select_html = Select(choices=month_choices).render(self.month_field % name, month_val) output.append(select_html) day_choices = [(i, i) for i in range(1, 32)] select_html = Select(choices=day_choices).render(self.day_field % name, day_val) output.append(select_html) year_choices = [(i, i) for i in self.years] select_html = Select(choices=year_choices).render(self.year_field % name, year_val) output.append(select_html) return u'\n'.join(output)
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/newforms/extras/widgets.py/SelectDateWidget.render
6,219
def has_unconditional_transfer(self): """Returns True if there is an unconditional transfer to an other block at the end of this block. This means there is no risk for the bytecode executer to go past this block's bytecode.""" try: op, arg = self.insts[-1] except (__HOLE__, ValueError): return return op in self._uncond_transfer
IndexError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/compiler/pyassem.py/Block.has_unconditional_transfer
6,220
def makeByteCode(self): assert self.stage == CONV self.lnotab = lnotab = LineAddrTable() for t in self.insts: opname = t[0] if len(t) == 1: lnotab.addCode(self.opnum[opname]) else: oparg = t[1] if opname == "SET_LINENO": lnotab.nextLine(oparg) continue hi, lo = twobyte(oparg) try: lnotab.addCode(self.opnum[opname], lo, hi) except __HOLE__: print opname, oparg print self.opnum[opname], lo, hi raise self.stage = DONE
ValueError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/compiler/pyassem.py/PyFlowGraph.makeByteCode
6,221
def __getitem__(self, key): """ Returns the value of the config variable. Builds the cache if it does not exist. """ try: return self._cache[key] except KeyError: raise ConfigNotFound(_('The config variable %s was not found.') % key) except __HOLE__: self.setup_cache() return self[key]
AttributeError
dataset/ETHPy150Open OpenSlides/OpenSlides/openslides/core/config.py/ConfigHandler.__getitem__
6,222
def __setitem__(self, key, value): """ Sets the new value. First it validates the input. """ # Check if the variable is defined. try: config_variable = config.get_config_variables()[key] except KeyError: raise ConfigNotFound(_('The config variable %s was not found.') % key) # Validate datatype and run validators. expected_type = INPUT_TYPE_MAPPING[config_variable.input_type] # Try to convert value into the expected datatype try: value = expected_type(value) except __HOLE__: raise ConfigError(_('Wrong datatype. Expected %(expected_type)s, got %(got_type)s.') % { 'expected_type': expected_type, 'got_type': type(value)}) if config_variable.input_type == 'choice' and value not in map(lambda choice: choice['value'], config_variable.choices): raise ConfigError(_('Invalid input. Choice does not match.')) for validator in config_variable.validators: try: validator(value) except DjangoValidationError as e: raise ConfigError(e.messages[0]) # Save the new value to the database. config_store, created = ConfigStore.objects.get_or_create(key=key, defaults={'value': value}) if not created: config_store.value = value config_store.save() # Update cache. if hasattr(self, '_cache'): self._cache[key] = value # Call on_change callback. if config_variable.on_change: config_variable.on_change()
ValueError
dataset/ETHPy150Open OpenSlides/OpenSlides/openslides/core/config.py/ConfigHandler.__setitem__
6,223
def _get_editable(self, request): """ Get the dictionary of editable settings for a given request. Settings are fetched from the database once per request and then stored in ``_editable_caches``, a WeakKeyDictionary that will automatically discard each entry when no more references to the request exist. """ try: editable_settings = self._editable_caches[request] except __HOLE__: editable_settings = self._editable_caches[request] = self._load() return editable_settings
KeyError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/conf/__init__.py/Settings._get_editable
6,224
@classmethod def _to_python(cls, setting, raw_value): """ Convert a value stored in the database for a particular setting to its correct type, as determined by ``register_setting()``. """ type_fn = cls.TYPE_FUNCTIONS.get(setting["type"], setting["type"]) try: value = type_fn(raw_value) except __HOLE__: # Shouldn't occur, but just a safeguard in case # the db value somehow ended up as an invalid type. warn("The setting %s should be of type %s, but the value " "retrieved from the database (%s) could not be converted. " "Using the default instead: %s" % (setting["name"], setting["type"].__name__, repr(raw_value), repr(setting["default"]))) value = setting["default"] return value
ValueError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/conf/__init__.py/Settings._to_python
6,225
def _load(self): """ Load editable settings from the database and return them as a dict. Delete any settings from the database that are no longer registered, and emit a warning if there are settings that are defined in both settings.py and the database. """ from mezzanine.conf.models import Setting removed_settings = [] conflicting_settings = [] new_cache = {} for setting_obj in Setting.objects.all(): # Check that the Setting object corresponds to a setting that has # been declared in code using ``register_setting()``. If not, add # it to a list of items to be deleted from the database later. try: setting = registry[setting_obj.name] except __HOLE__: removed_settings.append(setting_obj.name) continue # Convert a string from the database to the correct Python type. setting_value = self._to_python(setting, setting_obj.value) # If a setting is defined both in the database and in settings.py, # raise a warning and use the value defined in settings.py. if hasattr(django_settings, setting["name"]): if setting_value != setting["default"]: conflicting_settings.append(setting_obj.name) continue # If nothing went wrong, use the value from the database! new_cache[setting["name"]] = setting_value if removed_settings: Setting.objects.filter(name__in=removed_settings).delete() if conflicting_settings: warn("These settings are defined in both settings.py and " "the database: %s. The settings.py values will be used." % ", ".join(conflicting_settings)) return new_cache
KeyError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/conf/__init__.py/Settings._load
6,226
def __getattr__(self, name): # If this setting isn't registered, defer to Django's settings object try: setting = registry[name] except __HOLE__: return getattr(django_settings, name) # If the setting is editable, try the Django setting, then a value # fetched from the database, then the registered default. if setting["editable"]: editable_cache = self._get_editable(request=self._current_request) return getattr(django_settings, name, editable_cache.get(name, setting["default"])) # If if isn't editable, just try Django and then default. return getattr(django_settings, name, setting["default"])
KeyError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/conf/__init__.py/Settings.__getattr__
6,227
def get_filename(self): try: name = self.file_name or self.file_obj.name except __HOLE__: name = '' if not name and self.get_revision(): name = self.get_current_file_revision_data()['name'] return name
AttributeError
dataset/ETHPy150Open adlibre/Adlibre-DMS/adlibre_dms/apps/core/models.py/Document.get_filename
6,228
def get_revision(self): r = self.revision if r: try: r = int(r) except __HOLE__: raise # or r = None, I'm not sure which is more correct behaviour return r
ValueError
dataset/ETHPy150Open adlibre/Adlibre-DMS/adlibre_dms/apps/core/models.py/Document.get_revision
6,229
def get_caller_module_dict(levels): try: raise RuntimeError except __HOLE__: e,b,t = sys.exc_info() f = t.tb_frame while levels > 0: f = f.f_back levels -= 1 ldict = f.f_globals.copy() if f.f_globals != f.f_locals: ldict.update(f.f_locals) return ldict # ----------------------------------------------------------------------------- # _funcs_to_names() # # Given a list of regular expression functions, this converts it to a list # suitable for output to a table file # -----------------------------------------------------------------------------
RuntimeError
dataset/ETHPy150Open 0x-omicron/py-dcpu-c-compiler/external/ply/lex.py/get_caller_module_dict
6,230
def validate_literals(self): try: for c in self.literals: if not isinstance(c,StringTypes) or len(c) > 1: self.log.error("Invalid literal %s. Must be a single character", repr(c)) self.error = 1 continue except __HOLE__: self.log.error("Invalid literals specification. literals must be a sequence of characters") self.error = 1
TypeError
dataset/ETHPy150Open 0x-omicron/py-dcpu-c-compiler/external/ply/lex.py/LexerReflect.validate_literals
6,231
def validate_file(self,filename): import os.path base,ext = os.path.splitext(filename) if ext != '.py': return # No idea what the file is. Return OK try: f = open(filename) lines = f.readlines() f.close() except __HOLE__: return # Couldn't find the file. Don't worry about it fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(') sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=') counthash = { } linen = 1 for l in lines: m = fre.match(l) if not m: m = sre.match(l) if m: name = m.group(1) prev = counthash.get(name) if not prev: counthash[name] = linen else: self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev) self.error = 1 linen += 1 # ----------------------------------------------------------------------------- # lex(module) # # Build all of the regular expression rules from definitions in the supplied module # -----------------------------------------------------------------------------
IOError
dataset/ETHPy150Open 0x-omicron/py-dcpu-c-compiler/external/ply/lex.py/LexerReflect.validate_file
6,232
def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None): global lexer ldict = None stateinfo = { 'INITIAL' : 'inclusive'} lexobj = Lexer() lexobj.lexoptimize = optimize global token,input if errorlog is None: errorlog = PlyLogger(sys.stderr) if debug: if debuglog is None: debuglog = PlyLogger(sys.stderr) # Get the module dictionary used for the lexer if object: module = object if module: _items = [(k,getattr(module,k)) for k in dir(module)] ldict = dict(_items) else: ldict = get_caller_module_dict(2) # Collect parser information from the dictionary linfo = LexerReflect(ldict,log=errorlog,reflags=reflags) linfo.get_all() if not optimize: if linfo.validate_all(): raise SyntaxError("Can't build lexer") if optimize and lextab: try: lexobj.readtab(lextab,ldict) token = lexobj.token input = lexobj.input lexer = lexobj return lexobj except __HOLE__: pass # Dump some basic debugging information if debug: debuglog.info("lex: tokens = %r", linfo.tokens) debuglog.info("lex: literals = %r", linfo.literals) debuglog.info("lex: states = %r", linfo.stateinfo) # Build a dictionary of valid token names lexobj.lextokens = { } for n in linfo.tokens: lexobj.lextokens[n] = 1 # Get literals specification if isinstance(linfo.literals,(list,tuple)): lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals) else: lexobj.lexliterals = linfo.literals # Get the stateinfo dictionary stateinfo = linfo.stateinfo regexs = { } # Build the master regular expressions for state in stateinfo: regex_list = [] # Add rules defined by functions first for fname, f in linfo.funcsym[state]: line = func_code(f).co_firstlineno file = func_code(f).co_filename regex_list.append("(?P<%s>%s)" % (fname,f.__doc__)) if debug: debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state) # Now add all of the simple rules for name,r in linfo.strsym[state]: regex_list.append("(?P<%s>%s)" % (name,r)) if debug: debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state) regexs[state] = regex_list # Build the master regular expressions if debug: debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====") for state in regexs: lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames) lexobj.lexstatere[state] = lexre lexobj.lexstateretext[state] = re_text lexobj.lexstaterenames[state] = re_names if debug: for i in range(len(re_text)): debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i]) # For inclusive states, we need to add the regular expressions from the INITIAL state for state,stype in stateinfo.items(): if state != "INITIAL" and stype == 'inclusive': lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL']) lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL']) lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL']) lexobj.lexstateinfo = stateinfo lexobj.lexre = lexobj.lexstatere["INITIAL"] lexobj.lexretext = lexobj.lexstateretext["INITIAL"] lexobj.lexreflags = reflags # Set up ignore variables lexobj.lexstateignore = linfo.ignore lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","") # Set up error functions lexobj.lexstateerrorf = linfo.errorf lexobj.lexerrorf = linfo.errorf.get("INITIAL",None) if not lexobj.lexerrorf: errorlog.warning("No t_error rule is defined") # Check state information for ignore and error rules for s,stype in stateinfo.items(): if stype == 'exclusive': if not s in linfo.errorf: errorlog.warning("No error rule is defined for exclusive state '%s'", s) if not s in linfo.ignore and lexobj.lexignore: errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) elif stype == 'inclusive': if not s in linfo.errorf: linfo.errorf[s] = linfo.errorf.get("INITIAL",None) if not s in linfo.ignore: linfo.ignore[s] = linfo.ignore.get("INITIAL","") # Create global versions of the token() and input() functions token = lexobj.token input = lexobj.input lexer = lexobj # If in optimize mode, we write the lextab if lextab and optimize: lexobj.writetab(lextab,outputdir) return lexobj # ----------------------------------------------------------------------------- # runmain() # # This runs the lexer as a main program # -----------------------------------------------------------------------------
ImportError
dataset/ETHPy150Open 0x-omicron/py-dcpu-c-compiler/external/ply/lex.py/lex
6,233
def runmain(lexer=None,data=None): if not data: try: filename = sys.argv[1] f = open(filename) data = f.read() f.close() except __HOLE__: sys.stdout.write("Reading from standard input (type EOF to end):\n") data = sys.stdin.read() if lexer: _input = lexer.input else: _input = input _input(data) if lexer: _token = lexer.token else: _token = token while 1: tok = _token() if not tok: break sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos)) # ----------------------------------------------------------------------------- # @TOKEN(regex) # # This decorator function can be used to set the regex expression on a function # when its docstring might need to be set in an alternative way # -----------------------------------------------------------------------------
IndexError
dataset/ETHPy150Open 0x-omicron/py-dcpu-c-compiler/external/ply/lex.py/runmain
6,234
@method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, request_id): silk_request = Request.objects.get(pk=request_id) query_params = None if silk_request.query_params: query_params = json.loads(silk_request.query_params) body = silk_request.raw_body try: body = json.loads(body) # Incase encoded as JSON except (__HOLE__, TypeError): pass context = { 'silk_request': silk_request, 'curl': curl_cmd(url=request.build_absolute_uri(silk_request.path), method=silk_request.method, query_params=query_params, body=body, content_type=silk_request.content_type), 'query_params': json.dumps(query_params, sort_keys=True, indent=4) if query_params else None, 'client': gen(path=silk_request.path, method=silk_request.method, query_params=query_params, data=body, content_type=silk_request.content_type), 'request': request } return render_to_response('silk/request.html', context)
ValueError
dataset/ETHPy150Open django-silk/silk/silk/views/request_detail.py/RequestView.get
6,235
def gem(b, r): logging.info('searching for Ruby gems') # Precompile a pattern for extracting the version of Ruby that was used # to install the gem. pattern = re.compile(r'gems/([^/]+)/gems') # Look for gems in all the typical places. This is easier than looking # for `gem` commands, which may or may not be on `PATH`. for globname in ('/usr/lib/ruby/gems/*/gems', '/usr/local/lib/ruby/gems/*/gems', '/var/lib/gems/*/gems'): for dirname in glob.glob(globname): # The `ruby1.9.1` (really 1.9.2) package on Maverick begins # including RubyGems in the `ruby1.9.1` package and marks the # `rubygems1.9.1` package as virtual. So for Maverick and # newer, the manager is actually `ruby1.9.1`. match = pattern.search(dirname) if '1.9.1' == match.group(1) and util.rubygems_virtual(): manager = 'ruby{0}'.format(match.group(1)) # Oneiric and RPM-based distros just have one RubyGems package. elif util.rubygems_unversioned(): manager = 'rubygems' # Debian-based distros qualify the package name with the version # of Ruby it will use. else: manager = 'rubygems{0}'.format(match.group(1)) for entry in os.listdir(dirname): try: package, version = entry.rsplit('-', 1) except __HOLE__: logging.warning('skipping questionably named gem {0}'. format(entry)) continue if not r.ignore_package(manager, package): b.add_package(manager, package, version)
ValueError
dataset/ETHPy150Open devstructure/blueprint/blueprint/backend/gem.py/gem
6,236
def test_set_fullscreen(self): self.w = w = window.Window(200, 200) w.push_handlers(self) w.push_handlers(WindowEventLogger()) self.on_expose() try: while not w.has_exit: w.dispatch_events() w.close() except __HOLE__: # Child process on linux calls sys.exit(0) when it's done. pass
SystemExit
dataset/ETHPy150Open ardekantur/pyglet/tests/window/WINDOW_FULLSCREEN_SIZE.py/WINDOW_SET_FULLSCREEN.test_set_fullscreen
6,237
def handle_data(self): if self.path == '/': p = '/html/swfu.html' elif self.path.endswith('upload.html'): self.handleUpload() return else: p = self.path path = self.translate_path(p) if not os.path.exists(path): p = '/html'+p path = self.translate_path(p) ctype = self.guess_type(path) try: f = open(path) except __HOLE__: print 'File not found %s' % path self.send_error(404, 'File not found') return self.send_response(200) self.send_header('Content-type', ctype) self.send_header('Last-Modified', self.date_time_string()) self.end_headers() self.copyfile(f, self.wfile) f.close()
IOError
dataset/ETHPy150Open anandology/pyjamas/examples/misc/swfupload/server.py/TestRequestHandler.handle_data
6,238
def makedirs(path, mode=0o777): try: os.makedirs(path, mode) except __HOLE__: pass
OSError
dataset/ETHPy150Open 55minutes/python-epo-ops-client/epo_ops/utils.py/makedirs
6,239
def validate_date(date): if date is None or date == '': return '' try: datetime.strptime(date, '%Y%m%d') return date except __HOLE__: raise InvalidDate('{0} is not a valid YYYYMMDD date.'.format(date))
ValueError
dataset/ETHPy150Open 55minutes/python-epo-ops-client/epo_ops/utils.py/validate_date
6,240
def _to_python(self, value): if isinstance(value, util.strbase): try: value = date(*strptime(value, '%Y-%m-%d')[:3]) except __HOLE__: raise ValueError('Invalid ISO date %r' % value) return value
ValueError
dataset/ETHPy150Open djc/couchdb-python/couchdb/mapping.py/DateField._to_python
6,241
def _to_python(self, value): if isinstance(value, util.strbase): try: value = value.split('.', 1)[0] # strip out microseconds value = value.rstrip('Z') # remove timezone separator value = datetime(*strptime(value, '%Y-%m-%dT%H:%M:%S')[:6]) except __HOLE__: raise ValueError('Invalid ISO date/time %r' % value) return value
ValueError
dataset/ETHPy150Open djc/couchdb-python/couchdb/mapping.py/DateTimeField._to_python
6,242
def _to_python(self, value): if isinstance(value, util.strbase): try: value = value.split('.', 1)[0] # strip out microseconds value = time(*strptime(value, '%H:%M:%S')[3:6]) except __HOLE__: raise ValueError('Invalid ISO time %r' % value) return value
ValueError
dataset/ETHPy150Open djc/couchdb-python/couchdb/mapping.py/TimeField._to_python
6,243
def get_key(self, key): url = '%(base)s/%(key)s' % { 'base': self.url, 'key': key } self.logger.debug('Getting url: ' + url) response = requests.get(url) self.logger.debug('Response: ' + response.text) res = json.loads(response.text) if isinstance(res, list): raise ValueError('Key "%s" is a directory, expecting leaf (use \ list_directory() to get directory listing).' % key) #Check to see if Etcd returned an error if 'errorCode' in res: raise EtcdError(res['errorCode'], res['message']) try: return str(res['node']['value']) except __HOLE__: #Fallback on v1 functionality return str(res['value'])
KeyError
dataset/ETHPy150Open cholcombe973/autodock/etcd.py/Etcd.get_key
6,244
def list_directory(self, path): url = '%(base)s/%(path)s' % { 'base': self.url, 'path': path } response = requests.get(url) if response.status_code == requests.codes.ok: directory_list = [] json_txt = json.loads(response.text) try: for entry in json_txt['node']['nodes']: directory_list.append(str(entry['key'])) return directory_list except __HOLE__: self.logger.error("Key ['node']['nodes'] not found in %(data)s" %{ 'data': json_txt }) else: response.raise_for_status() return None
KeyError
dataset/ETHPy150Open cholcombe973/autodock/etcd.py/Etcd.list_directory
6,245
def run(self): prop_path = self.arguments[0] module_path, model_name, prop_name = prop_path.rsplit('.', 2) try: module = importlib.import_module(module_path) except __HOLE__: pass model = getattr(module, model_name, None) if model is None: pass if type(model) != Viewable: pass model_obj = model() prop = getattr(model_obj.__class__, prop_name) type_info = self._get_type_info(prop) rst_text = PROP_TEMPLATE.render( name=prop_name, module=module_path, type_info=type_info, doc="" if prop.__doc__ is None else textwrap.dedent(prop.__doc__), ) result = ViewList() for line in rst_text.split("\n"): result.append(line, "<bokeh-prop>") node = nodes.paragraph() node.document = self.state.document nested_parse_with_titles(self.state, result, node) return node.children
ImportError
dataset/ETHPy150Open bokeh/bokeh/bokeh/sphinxext/bokeh_prop.py/BokehPropDirective.run
6,246
def __init__(self, environ): script_name = base.get_script_name(environ) path_info = base.get_path_info(environ) if not path_info: # Sometimes PATH_INFO exists, but is empty (e.g. accessing # the SCRIPT_NAME URL without a trailing slash). We really need to # operate as if they'd requested '/'. Not amazingly nice to force # the path like this, but should be harmless. path_info = '/' self.environ = environ self.path_info = path_info self.path = '%s/%s' % (script_name.rstrip('/'), path_info.lstrip('/')) self.META = environ self.META['PATH_INFO'] = path_info self.META['SCRIPT_NAME'] = script_name self.method = environ['REQUEST_METHOD'].upper() _, content_params = self._parse_content_type(self.META.get('CONTENT_TYPE', '')) if 'charset' in content_params: try: codecs.lookup(content_params['charset']) except LookupError: pass else: self.encoding = content_params['charset'] self._post_parse_error = False try: content_length = int(self.environ.get('CONTENT_LENGTH')) except (__HOLE__, TypeError): content_length = 0 self._stream = LimitedStream(self.environ['wsgi.input'], content_length) self._read_started = False self.resolver_match = None
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/handlers/wsgi.py/WSGIRequest.__init__
6,247
def __call__(self, environ, start_response): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._request_middleware is None: with self.initLock: try: # Check that middleware is still uninitialised. if self._request_middleware is None: self.load_middleware() except: # Unload whatever middleware we got self._request_middleware = None raise set_script_prefix(base.get_script_name(environ)) signals.request_started.send(sender=self.__class__) try: request = self.request_class(environ) except __HOLE__: logger.warning('Bad Request (UnicodeDecodeError)', exc_info=sys.exc_info(), extra={ 'status_code': 400, } ) response = http.HttpResponseBadRequest() else: response = self.get_response(request) response._handler_class = self.__class__ status = '%s %s' % (response.status_code, response.reason_phrase) response_headers = [(str(k), str(v)) for k, v in response.items()] for c in response.cookies.values(): response_headers.append((str('Set-Cookie'), str(c.output(header='')))) start_response(force_str(status), response_headers) return response
UnicodeDecodeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/core/handlers/wsgi.py/WSGIHandler.__call__
6,248
def build_extension(bld, extension, env=None): builder = bld.builders["pyext"] try: if env is None: env = {"PYEXT_CPPPATH": extension.include_dirs} else: val = env.get("PYEXT_CPPPATH", []) val.extend(extension.include_dirs) tasks = builder.extension(extension.name, extension.sources, env) if len(tasks) > 1: outputs = tasks[0].gen.outputs else: outputs = [] return [n.bldpath() for n in outputs] except __HOLE__: e = extract_exception() msg = "Building extension %s failed: %s" % \ (extension.name, str(e)) raise CommandExecutionFailure(msg)
RuntimeError
dataset/ETHPy150Open cournape/Bento/bento/commands/build_yaku.py/build_extension
6,249
def build_compiled_library(bld, clib, env=None): builder = bld.builders["ctasks"] try: for p in clib.include_dirs: builder.env["CPPPATH"].insert(0, p) outputs = builder.static_library(clib.name, clib.sources, env) return [n.bldpath() for n in outputs] except __HOLE__: e = extract_exception() msg = "Building library %s failed: %s" % (clib.name, str(e)) raise CommandExecutionFailure(msg)
RuntimeError
dataset/ETHPy150Open cournape/Bento/bento/commands/build_yaku.py/build_compiled_library
6,250
def beacon(config): ''' Monitor the disk usage of the minion Specify thresholds for each disk and only emit a beacon if any of them are exceeded. .. code-block:: yaml beacons: diskusage: - /: 63% - /mnt/nfs: 50% Windows drives must be quoted to avoid yaml syntax errors .. code-block:: yaml beacons: diskusage: - interval: 120 - 'c:\': 90% - 'd:\': 50% ''' ret = [] for diskusage in config: mount = diskusage.keys()[0] try: _current_usage = psutil.disk_usage(mount) except __HOLE__: # Ensure a valid mount point log.error('{0} is not a valid mount point, skipping.'.format(mount)) continue current_usage = _current_usage.percent monitor_usage = diskusage[mount] if '%' in monitor_usage: monitor_usage = re.sub('%', '', monitor_usage) monitor_usage = float(monitor_usage) if current_usage >= monitor_usage: ret.append({'diskusage': current_usage, 'mount': mount}) return ret
OSError
dataset/ETHPy150Open saltstack/salt/salt/beacons/diskusage.py/beacon
6,251
@auth def _GET(self, *param, **params): try: conf = env.get('KARESANSUI_CONF') _K2V = K2V(conf) config = _K2V.read() self.view.mail = get_view_mail(config) return True except __HOLE__, kge: self.logger.debug(kge) raise KaresansuiGadgetException, kge
IOError
dataset/ETHPy150Open karesansui/karesansui/karesansui/gadget/mail.py/Mail._GET
6,252
@auth def _PUT(self, *param, **params): if not validates_mail(self): return web.badrequest(self.view.alert) try: conf = env.get('KARESANSUI_CONF') _K2V = K2V(conf) config = _K2V.read() config['application.mail.server'] = self.input.server config['application.mail.port'] = self.input.port config['application.mail.email'] = self.input.email _K2V.write(config) self.view.mail = get_view_mail(config) return True except __HOLE__, kge: self.logger.debug(kge) raise KaresansuiGadgetException, kge
IOError
dataset/ETHPy150Open karesansui/karesansui/karesansui/gadget/mail.py/Mail._PUT
6,253
def _get_scheduler_cmds(args): cmds = {"slurm": _get_slurm_cmds, "sge": _get_sge_cmds, "lsf": _get_lsf_cmds, "torque": _get_torque_cmds, "pbspro": _get_torque_cmds} try: return cmds[args.scheduler](args) except __HOLE__: raise NotImplementedError("Batch script preparation for %s not yet supported" % args.scheduler)
KeyError
dataset/ETHPy150Open chapmanb/bcbio-nextgen-vm/bcbiovm/ipython/batchprep.py/_get_scheduler_cmds
6,254
@register.filter def field_attrs(field_inst, **kwargs): """Adds html attributes to django form fields""" for k, v in kwargs.items(): if v is not None: field_inst.field.widget.attrs[k] = v else: try: del field_inst.field.widget.attrs[k] except __HOLE__: pass return field_inst
KeyError
dataset/ETHPy150Open jbalogh/jingo/jingo/helpers.py/field_attrs
6,255
def stringified_dict_contains_value(key, value, str_dict): """Checks if dict in for of string like "{'test': 5}" contains key/value pair. This works faster, then creating actual dict from string since this operation is called for each task in case of kwargs search.""" value = str(value) try: # + 3 for key right quote, one for colon and one for space key_index = str_dict.index(key) + len(key) + 3 except ValueError: return False try: comma_index = str_dict.index(',', key_index) except __HOLE__: # last value in dict comma_index = str_dict.index('}', key_index) return str(value) == str_dict[key_index:comma_index].strip('"\'')
ValueError
dataset/ETHPy150Open mher/flower/flower/utils/search.py/stringified_dict_contains_value
6,256
def prepared_options(self): prepared_opts = ['%sx%s' % tuple(self['size'])] subsampling = six.text_type(self['subsampling']) if subsampling == '2': subsampling_text = '' else: subsampling_text = 'ss%s' % subsampling prepared_opts.append('q%s%s' % (self['quality'], subsampling_text)) for key, value in sorted(six.iteritems(self)): if key == key.upper(): # Uppercase options aren't used by prepared options (a primary # use of prepared options is to generate the filename -- these # options don't alter the filename). continue if not value or key in ('size', 'quality', 'subsampling'): continue if value is True: prepared_opts.append(key) continue if not isinstance(value, six.string_types): try: value = ','.join([six.text_type(item) for item in value]) except __HOLE__: value = six.text_type(value) prepared_opts.append('%s-%s' % (key, value)) return prepared_opts
TypeError
dataset/ETHPy150Open SmileyChris/easy-thumbnails/easy_thumbnails/options.py/ThumbnailOptions.prepared_options
6,257
def main(): try: params = {} r_api_key = None opts, args = getopt.getopt(sys.argv[1:], 'hta:') for opt, val in opts: if opt == '-h': usage() sys.exit(0) if opt == '-t': params['import_time_entries'] = True if opt == '-a': r_api_key = val if r_api_key: r_url, y_url, y_user, y_password = args[:4] project_ids = args[4:] redmine_importer = RedmineImporter( r_api_key, r_url, None, None, y_url, y_user, y_password, params) else: r_url, r_user, r_password, y_url, y_user, y_password = args[:6] project_ids = args[6:] redmine_importer = RedmineImporter( None, r_url, r_user, r_password, y_url, y_user, y_password, params) except getopt.GetoptError, e: print e usage() sys.exit(1) except __HOLE__, e: print 'Not enough arguments' usage() sys.exit(1) redmine_importer.do_import(project_ids)
ValueError
dataset/ETHPy150Open JetBrains/youtrack-rest-python-library/python/redmine2youtrack.py/main
6,258
def _get_projects(self, project_ids=None, by_internal_id=False): if by_internal_id: by = 'by_iid' else: by = 'by_pid' if self._projects is None: self._projects = {'by_iid': {}, 'by_pid': {}} if project_ids: new_projects = [pid for pid in project_ids if pid not in self._projects[by]] else: new_projects = None if new_projects is None or new_projects: for project in self._source.get_projects(new_projects): project.identifier = re.sub('\W', '', project.identifier) self._projects['by_iid'][project.id] = project self._projects['by_pid'][project.identifier] = project if project_ids: result = {} for pid in [re.sub('\W', '', p) for p in project_ids]: try: result[pid] = self._projects[by][pid] except __HOLE__: raise redmine.RedmineException( "Project '%s' doesn't exist in Redmine" % pid) return self._projects[by]
KeyError
dataset/ETHPy150Open JetBrains/youtrack-rest-python-library/python/redmine2youtrack.py/RedmineImporter._get_projects
6,259
def _to_yt_user(self, redmine_user): if isinstance(redmine_user, basestring): user_id = redmine_user else: user_id = redmine_user.id if user_id not in self._users: redmine_user = self._source.get_user(user_id) user = youtrack.User() try: user.email = redmine_user.mail except __HOLE__: pass try: # In some cases redmine user login can be empty or missing. # So, both cases should be handled. user.login = redmine_user.login except AttributeError: pass if not hasattr(user, 'login') or not user.login: if hasattr(user, 'email'): user.login = user.email else: user.login = 'guest' print 'Cannot get login for user id=%s, set it to "%s"' % \ (user_id, user.login) #user.login = redmine_user.login or 'guest' #user.email = redmine_user.mail or '[email protected]' if user.login != 'guest': if redmine_user.firstname is None and redmine_user.lastname is None: user.fullName = user.login elif redmine_user.firstname is None: user.fullName = redmine_user.lastname elif redmine_user.lastname is None: user.fullName = redmine_user.firstname else: user.fullName = redmine_user.firstname + ' ' + redmine_user.lastname else: user.created = True if hasattr(redmine_user, 'groups'): user.groups = [self._to_yt_group(g) for g in redmine_user.groups] self._users[user_id] = user return self._users[user_id]
AttributeError
dataset/ETHPy150Open JetBrains/youtrack-rest-python-library/python/redmine2youtrack.py/RedmineImporter._to_yt_user
6,260
def _apply_relations(self, limit=CHUNK_SIZE): links = [] for link_type, ids in self._relations.items(): for from_id, to_ids in ids.items(): for to_id in to_ids: link = youtrack.Link() link.typeName = link_type try: link.source = self._to_yt_issue_id(from_id) link.target = self._to_yt_issue_id(to_id) except __HOLE__, e: print "Cannot apply link (%s) to issues: %d and %d" % \ (link_type, from_id, to_id) print "Some issues were not imported to YouTrack" raise e links.append(link) if len(links) >= limit: self._target.importLinks(links) del links[0:] if links: self._target.importLinks(links)
KeyError
dataset/ETHPy150Open JetBrains/youtrack-rest-python-library/python/redmine2youtrack.py/RedmineImporter._apply_relations
6,261
@classmethod def XXXgetCssBoxShadowValues(cls, color, x=None, y=None, blur=None, inout=None): outcolor = '' if color == 'none': return None, 0, 0, 0, 0 if isinstance(color, (list, tuple)): if len(color) == 4: outcolor, x, y, blur = color elif len(color) == 5: outcolor, x, y, blur, inout = color else: try: preformat = re.compile('(\w?) ?(\d+)px (\d+)px (\d+)px #(.+)') preformatrgb = re.compile('(.*)rgba\((.+)\)(.*)') m = preformatrgb.match(color) if m: outcolor = 'rgba(' + m.group(2) + ')' color = color.replace(outcolor, '') m = preformat.match(color) if m: inout = m.group(1) or None x = int(m.group(2)) y = int(m.group(3)) blur = int(m.group(4)) outcolor = '#' + m.group(5) else: vals = color.split(' ') if len(vals) > 1: result = [] for s in color.split(' '): s = s.strip() if s.endswith('px'): result.append(int(s[:-2])) elif s.startswith('#'): outcolor = s elif s in ('inset', 'outset'): inout = s x, y, blur = result except __HOLE__: print '### [CssCanvas.getCssBoxShadowValues] Cannot process "%s" */' % result return 'black', 0, 0, False, inout return outcolor, x, y, blur, inout
ValueError
dataset/ETHPy150Open petrvanblokland/Xierpa3/xierpa3/builders/sassbuilder.py/SassBuilder.XXXgetCssBoxShadowValues
6,262
def worker_input_generator(self): '''Only step our workers once.''' try: self.input_index, next_input = self.input_queue.get() yield next_input except __HOLE__: return
TypeError
dataset/ETHPy150Open gatoatigrado/vimap/vimap/testing.py/SerialWorkerRoutine.worker_input_generator
6,263
def generate_constraints(solution, output_path=None, specifier='max', techs=None, constraints=None, include_transmission=True, transmission_techs=None, transmission_constraints=['e_cap'], fillna=None, map_numeric=None, map_any=None): """ Generate constraints from a given solution. If ``output_path`` is specified, write the resulting YAML to disk, else return the YAML string. Can use ``techs``, ``locations``, ``constraints``, ``include_transmission``, ``transmission_techs``, ``transmission_constraints`` to specify only a subset of the solution to be turned into constraints. If ``fillna`` set to something other than None, NA values will be replaced with the given value. Use ``map_numeric`` and ``map_any`` to give functions that will be applied to numeric or any value to modify it. """ # TODO: add unit tests excluded_vars = ['e_cap_net'] def _setkey(d, key, value): # fillna, round, multiply passed implicitly if fillna is not None and np.isnan(value): value = fillna if map_numeric: try: # TypeError if not a number, we don't want to multiply strings value = map_numeric(value) except __HOLE__: pass # Ignore if not a number if map_any: value = map_any(value) d.set_key(key, value) d = utils.AttrDict() # Get a list of default constraints, so that we know which constraints # exist in a form that includes sub-constraints (like '.max') o = solution.config_model possible_constraints = list(o.techs.defaults.constraints.keys()) default_constraints = list(o.techs.defaults.constraints.keys_nested()) max_min_equals_constraints = set([c.split('.')[0] for c in default_constraints if '.max' in c]) # Set up the list of locations, techs, constraints locations = solution.coords['x'].values techs_in_solution = [i for i in solution.coords['y'].values if ':' not in i] if not techs: techs = techs_in_solution if not constraints: constraints = [i for i in possible_constraints if i in solution.data_vars] # Non-transmission techs # FIXME only include techs that are allowed by the model_config at # a given location key_string = 'locations.{0}.override.{1}.constraints.{2}' for x in locations: for y in techs: for var in [v for v in constraints if v not in excluded_vars]: key = key_string.format(x, y, var) if var in max_min_equals_constraints: key += '.{}'.format(specifier) value = solution[var].loc[dict(x=x, y=y)].item() if not np.isnan(value): _setkey(d, key, value) # Transmission techs if include_transmission: transmission_techs_in_sol = [i for i in solution.coords['y'].values if ':' in i] if not transmission_techs: transmission_techs = set([i.split(':')[0] for i in transmission_techs_in_sol]) if not transmission_constraints: transmission_constraints = [i for i in possible_constraints if i in solution.data_vars] d.links = utils.AttrDict() t_key_string = 'links.{0}.{1}.constraints.{2}' for x in locations: for y in transmission_techs_in_sol: for var in [v for v in transmission_constraints if v not in excluded_vars]: value = solution[var].loc[dict(x=x, y=y)].item() y_bare, x_rem = y.split(':') if x_rem == x: continue g = lambda x: o.links.get_key(x, default=False) if (g('{},{}.{}'.format(x, x_rem, y_bare)) is not False or g('{},{}.{}'.format(x_rem, x, y_bare)) is not False): exists = True else: exists = False if exists and y_bare in transmission_techs and not np.isnan(value): key = t_key_string.format(x + ',' + x_rem, y_bare, var) if var in max_min_equals_constraints: key += '.{}'.format(specifier) _setkey(d, key, value) if output_path is not None: d.to_yaml(output_path) else: return d
TypeError
dataset/ETHPy150Open calliope-project/calliope/calliope/output.py/generate_constraints
6,264
def test_errors(self): self.assertRaises(TypeError, grp.getgrgid) self.assertRaises(TypeError, grp.getgrnam) self.assertRaises(TypeError, grp.getgrall, 42) # try to get some errors bynames = {} bygids = {} for (n, p, g, mem) in grp.getgrall(): if not n or n == '+': continue # skip NIS entries etc. bynames[n] = g bygids[g] = n allnames = bynames.keys() namei = 0 fakename = allnames[namei] while fakename in bynames: chars = map(None, fakename) for i in xrange(len(chars)): if chars[i] == 'z': chars[i] = 'A' break elif chars[i] == 'Z': continue else: chars[i] = chr(ord(chars[i]) + 1) break else: namei = namei + 1 try: fakename = allnames[namei] except __HOLE__: # should never happen... if so, just forget it break fakename = ''.join(map(None, chars)) self.assertRaises(KeyError, grp.getgrnam, fakename) # Choose a non-existent gid. fakegid = 4127 while fakegid in bygids: fakegid = (fakegid * 3) % 0x10000 self.assertRaises(KeyError, grp.getgrgid, fakegid)
IndexError
dataset/ETHPy150Open babble/babble/include/jython/Lib/test/test_grp.py/GroupDatabaseTestCase.test_errors
6,265
def reverse_for_language(viewname, lang, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None): # Based on code in Django 1.1.1 in reverse and RegexURLResolver.reverse # in django.core.urlresolvers. args = args or [] kwargs = kwargs or {} if prefix is None: prefix = get_script_prefix() resolver = get_resolver(urlconf, lang) if not isinstance(viewname, basestring): view = viewname else: parts = viewname.split(':') parts.reverse() view = parts[0] path = parts[1:] resolved_path = [] while path: ns = path.pop() # Lookup the name to see if it could be an app identifier try: app_list = resolver.app_dict[ns] # Yes! Path part matches an app in the current Resolver if current_app and current_app in app_list: # If we are reversing for a particular app, use that namespace ns = current_app elif ns not in app_list: # The name isn't shared by one of the instances (i.e., the default) # so just pick the first instance as the default. ns = app_list[0] except __HOLE__: pass try: extra, resolver = resolver.namespace_dict[ns] resolved_path.append(ns) prefix = prefix + extra except KeyError, key: if resolved_path: raise NoReverseMatch("%s is not a registered namespace inside '%s'" % (key, ':'.join(resolved_path))) else: raise NoReverseMatch("%s is not a registered namespace" % key) if args and kwargs: raise ValueError("Don't mix *args and **kwargs in call to reverse()!") try: lookup_view = get_callable(view, True) except (ImportError, AttributeError), e: raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e)) if hasattr(resolver, 'get_reverse_dict'): possibilities = resolver.get_reverse_dict(lang).getlist(lookup_view) else: possibilities = resolver.reverse_dict.getlist(lookup_view) for possibility, pattern in possibilities: for result, params in possibility: if args: if len(args) != len(params): continue unicode_args = [force_unicode(val) for val in args] candidate = result % dict(zip(params, unicode_args)) else: if set(kwargs.keys()) != set(params): continue unicode_kwargs = dict([(k, force_unicode(v)) for (k, v) in kwargs.items()]) candidate = result % unicode_kwargs if re.search(u'^%s' % pattern, candidate, re.UNICODE): iri = u'%s%s' % (prefix, candidate) # If we have a separate domain for lang, put that in the iri domain = transurlvania.settings.LANGUAGE_DOMAINS.get(lang, None) if domain: iri = u'http://%s%s' % (domain[0], iri) return iri_to_uri(iri) # lookup_view can be URL label, or dotted path, or callable, Any of # these can be passed in at the top, but callables are not friendly in # error messages. m = getattr(lookup_view, '__module__', None) n = getattr(lookup_view, '__name__', None) if m is not None and n is not None: lookup_view_s = "%s.%s" % (m, n) else: lookup_view_s = lookup_view raise NoReverseMatch("Reverse for '%s' with arguments '%s' and keyword " "arguments '%s' not found." % (lookup_view_s, args, kwargs))
KeyError
dataset/ETHPy150Open trapeze/transurlvania/transurlvania/urlresolvers.py/reverse_for_language
6,266
def color_font(name, code_point): in_name = 'bitmaps/strike1/uni{}.png'.format(code_point) out_name = 'out/unicode/{}.png'.format(code_point) try: shutil.copyfile(in_name, out_name) except __HOLE__: raise MissingGlyphError('name: %r code_point: %r' % (name, code_point))
IOError
dataset/ETHPy150Open zulip/zulip/tools/emoji_dump/emoji_dump.py/color_font
6,267
@property def summoner(self): """ Returns: Summoner: the summoner represented by this entry. None if this entry is for a team """ if not self.data.playerOrTeamId: return None try: id_ = int(self.data.playerOrTeamId) return cassiopeia.riotapi.get_summoner_by_id(id_) except __HOLE__: return None
ValueError
dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/type/core/league.py/Entry.summoner
6,268
@property def team(self): """ Returns: Team: the team represented by this entry. None if this entry is for a summoner """ if not self.data.playerOrTeamId: return None try: int(self.data.playerOrTeamId) return None except __HOLE__: return cassiopeia.riotapi.get_team_by_id(self.data.playerOrTeamId)
ValueError
dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/type/core/league.py/Entry.team
6,269
@property def summoner_name(self): """ Returns: str: the name of the summoner represented by this entry. An empty string if this entry is for a team """ if not self.data.playerOrTeamId: return "" try: int(self.data.playerOrTeamId) return self.data.playerOrTeamName except __HOLE__: return ""
ValueError
dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/type/core/league.py/Entry.summoner_name
6,270
@property def team_name(self): """ Returns: str: the name of the team represented by this entry. An empty string if this entry is for a summoner """ try: int(self.data.playerOrTeamId) return "" except __HOLE__: return self.data.playerOrTeamName
ValueError
dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/type/core/league.py/Entry.team_name
6,271
@property def summoner(self): """ Returns: Summoner: the relevant summoner that is a member of this league. Only present when full league is requested so that participant's entry can be identified. None when individual entry is requested or the participant is a team. """ if not self.data.participantId: return None try: id_ = int(self.data.participantId) return cassiopeia.riotapi.get_summoner_by_id(id_) except __HOLE__: return None
ValueError
dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/type/core/league.py/League.summoner
6,272
@property def team(self): """ Returns: Team: the relevant team that is a member of this league. Only present when full league is requested so that participant's entry can be identified. None when individual entry is requested or the participant is a summoner. """ if not self.data.participantId: return None try: int(self.data.participantId) return None except __HOLE__: return cassiopeia.riotapi.get_team_by_id(self.data.participantId)
ValueError
dataset/ETHPy150Open meraki-analytics/cassiopeia/cassiopeia/type/core/league.py/League.team
6,273
def process_view(self, request, callback, callback_args, callback_kwargs): if getattr(request, 'csrf_processing_done', False): return None try: csrf_token = _sanitize_token( request.COOKIES[settings.CSRF_COOKIE_NAME]) # Use same token next time request.META['CSRF_COOKIE'] = csrf_token except KeyError: csrf_token = None # Wait until request.META["CSRF_COOKIE"] has been manipulated before # bailing out, so that get_token still works if getattr(callback, 'csrf_exempt', False): return None # Assume that anything not defined as 'safe' by RFC2616 needs protection if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): if getattr(request, '_dont_enforce_csrf_checks', False): # Mechanism to turn off CSRF checks for test suite. # It comes after the creation of CSRF cookies, so that # everything else continues to work exactly the same # (e.g. cookies are sent, etc.), but before any # branches that call reject(). return self._accept(request) if request.is_secure(): # Suppose user visits http://example.com/ # An active network attacker (man-in-the-middle, MITM) sends a # POST form that targets https://example.com/detonate-bomb/ and # submits it via JavaScript. # # The attacker will need to provide a CSRF cookie and token, but # that's no problem for a MITM and the session-independent # nonce we're using. So the MITM can circumvent the CSRF # protection. This is true for any HTTP connection, but anyone # using HTTPS expects better! For this reason, for # https://example.com/ we need additional protection that treats # http://example.com/ as completely untrusted. Under HTTPS, # Barth et al. found that the Referer header is missing for # same-domain requests in only about 0.2% of cases or less, so # we can use strict Referer checking. referer = force_text( request.META.get('HTTP_REFERER'), strings_only=True, errors='replace' ) if referer is None: return self._reject(request, REASON_NO_REFERER) referer = urlparse(referer) # Make sure we have a valid URL for Referer. if '' in (referer.scheme, referer.netloc): return self._reject(request, REASON_MALFORMED_REFERER) # Ensure that our Referer is also secure. if referer.scheme != 'https': return self._reject(request, REASON_INSECURE_REFERER) # If there isn't a CSRF_COOKIE_DOMAIN, assume we need an exact # match on host:port. If not, obey the cookie rules. if settings.CSRF_COOKIE_DOMAIN is None: # request.get_host() includes the port. good_referer = request.get_host() else: good_referer = settings.CSRF_COOKIE_DOMAIN server_port = request.get_port() if server_port not in ('443', '80'): good_referer = '%s:%s' % (good_referer, server_port) # Here we generate a list of all acceptable HTTP referers, # including the current host since that has been validated # upstream. good_hosts = list(settings.CSRF_TRUSTED_ORIGINS) good_hosts.append(good_referer) if not any(is_same_domain(referer.netloc, host) for host in good_hosts): reason = REASON_BAD_REFERER % referer.geturl() return self._reject(request, reason) if csrf_token is None: # No CSRF cookie. For POST requests, we insist on a CSRF cookie, # and in this way we can avoid all CSRF attacks, including login # CSRF. return self._reject(request, REASON_NO_CSRF_COOKIE) # Check non-cookie token for match. request_csrf_token = "" if request.method == "POST": try: request_csrf_token = request.POST.get('csrfmiddlewaretoken', '') except __HOLE__: # Handle a broken connection before we've completed reading # the POST data. process_view shouldn't raise any # exceptions, so we'll ignore and serve the user a 403 # (assuming they're still listening, which they probably # aren't because of the error). pass if request_csrf_token == "": # Fall back to X-CSRFToken, to make things easier for AJAX, # and possible for PUT/DELETE. request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '') if not constant_time_compare(request_csrf_token, csrf_token): return self._reject(request, REASON_BAD_TOKEN) return self._accept(request)
IOError
dataset/ETHPy150Open django/django/django/middleware/csrf.py/CsrfViewMiddleware.process_view
6,274
def start_response(self, status, headers, exc_info=None): if exc_info: try: if self.status and self.headers_sent: reraise(exc_info[0], exc_info[1], exc_info[2]) finally: exc_info = None elif self.status is not None: raise AssertionError("Response headers already set!") self.status = status # get the status code from the response here so we can use it to check # the need for the connection header later without parsing the string # each time. try: self.status_code = int(self.status.split()[0]) except __HOLE__: self.status_code = None self.process_headers(headers) self.chunked = self.is_chunked() return self.write
ValueError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/gunicorn/http/wsgi.py/Response.start_response
6,275
def sendfile(self, respiter): if self.cfg.is_ssl or not self.can_sendfile(): return False if not util.has_fileno(respiter.filelike): return False fileno = respiter.filelike.fileno() try: offset = os.lseek(fileno, 0, os.SEEK_CUR) if self.response_length is None: filesize = os.fstat(fileno).st_size # The file may be special and sendfile will fail. # It may also be zero-length, but that is okay. if filesize == 0: return False nbytes = filesize - offset else: nbytes = self.response_length except (__HOLE__, io.UnsupportedOperation): return False self.send_headers() if self.is_chunked(): chunk_size = "%X\r\n" % nbytes self.sock.sendall(chunk_size.encode('utf-8')) sockno = self.sock.fileno() sent = 0 while sent != nbytes: count = min(nbytes - sent, BLKSIZE) sent += sendfile(sockno, fileno, offset + sent, count) if self.is_chunked(): self.sock.sendall(b"\r\n") os.lseek(fileno, offset, os.SEEK_SET) return True
OSError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/gunicorn/http/wsgi.py/Response.sendfile
6,276
def unpack_integer_range(integerrange): """Input an integer range spec like "200,205-207" and return a list of integers like [200, 205, 206, 207] :param integerrange: The range specification as a string :return: Sorted integers in a list """ integers = [] # To hold the eventual result valid_chars = re.compile("^[0-9\-, ]+$") if re.match(valid_chars, integerrange) is None: assert False, "Number range %s in the feature file is invalid. Must " \ "contain just numbers, commas, and hyphens" % integerrange integerrange.replace(" ", "") rangeparts = integerrange.split(',') # One or more integer ranges # separated by commas for rangepart in rangeparts: rangemaxmin = rangepart.split('-') # Range is defined with a hyphen if len(rangemaxmin) == 1: # This was a single value try: integers.extend([int(rangemaxmin[0])]) except ValueError: assert False, "Number range %s in the feature file is " \ "invalid. Must be integers separated with commas and " \ "hyphens" % integerrange elif len(rangemaxmin) == 2: # It was a range of values try: rangemin = int(rangemaxmin[0]) rangemax = int(rangemaxmin[1]) + 1 except __HOLE__: assert False, "Number range %s in the feature file is " \ "invalid. Must be integers separated with commas and " \ "hyphens" % integerrange if rangemin >= rangemax: assert False, "Number range %s in the feature file is " \ "invalid. Range minimum is more than " \ "maximum" % integerrange integers.extend(range(rangemin, rangemax)) else: # Range specifier was not of the form x-y assert False, "Number range %s in the feature file is invalid. " \ "Incorrect range specifier" % \ integerrange return sorted(integers)
ValueError
dataset/ETHPy150Open F-Secure/mittn/mittn/httpfuzzer/number_ranges.py/unpack_integer_range
6,277
def makedirs(path, mode=0o777): """ Create a directory if it doesn't already exist (keeping concurrency in mind). :param path: The pathname of the directory to create (a string). :param mode: The mode to apply to newly created directories (an integer, defaults to the octal number ``0777``). :returns: :data:`True` when the directory was created, :data:`False` if it already existed. :raises: Any exceptions raised by :func:`os.makedirs()` except for :data:`errno.EEXIST` (this error is swallowed and :data:`False` is returned instead). """ try: os.makedirs(path, mode) return True except __HOLE__ as e: if e.errno != errno.EEXIST: # We don't want to swallow errors other than EEXIST, # because we could be obscuring a real problem. raise return False
OSError
dataset/ETHPy150Open paylogic/pip-accel/pip_accel/utils.py/makedirs
6,278
def same_directories(path1, path2): """ Check if two pathnames refer to the same directory. :param path1: The first pathname (a string). :param path2: The second pathname (a string). :returns: :data:`True` if both pathnames refer to the same directory, :data:`False` otherwise. """ if all(os.path.isdir(p) for p in (path1, path2)): try: return os.path.samefile(path1, path2) except __HOLE__: # On Windows and Python 2 os.path.samefile() is unavailable. return os.path.realpath(path1) == os.path.realpath(path2) else: return False
AttributeError
dataset/ETHPy150Open paylogic/pip-accel/pip_accel/utils.py/same_directories
6,279
def replace_file(src, dst): """ Overwrite a file (in an atomic fashion when possible). :param src: The pathname of the source file (a string). :param dst: The pathname of the destination file (a string). """ # Try os.replace() which was introduced in Python 3.3 # (this should work on POSIX as well as Windows systems). try: os.replace(src, dst) return except __HOLE__: pass # Try os.rename() which is atomic on UNIX but refuses to overwrite existing # files on Windows. try: os.rename(src, dst) return except OSError as e: if e.errno != errno.EEXIST: raise # Finally we fall back to the dumb approach required only on Windows. # See https://bugs.python.org/issue8828 for a long winded discussion. os.remove(dst) os.rename(src, dst)
AttributeError
dataset/ETHPy150Open paylogic/pip-accel/pip_accel/utils.py/replace_file
6,280
def _get_go_files_to_mtime(self): """Returns a dict mapping all Go files to their mtimes. Returns: A dict mapping the path relative to the application root of every .go file in the application root, or any of its subdirectories, to the file's modification time. """ go_file_to_mtime = {} for root, _, file_names in os.walk( self._server_configuration.application_root): for file_name in file_names: if not file_name.endswith('.go'): continue full_path = os.path.join(root, file_name) rel_path = os.path.relpath( full_path, self._server_configuration.application_root) if self._server_configuration.skip_files.match(rel_path): continue if self._server_configuration.nobuild_files.match(rel_path): continue try: go_file_to_mtime[rel_path] = os.path.getmtime(full_path) except __HOLE__ as e: # Ignore deleted files. if e.errno != errno.ENOENT: raise return go_file_to_mtime
OSError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/devappserver2/go_application.py/GoApplication._get_go_files_to_mtime
6,281
def _get_max_instances_per_host(self, host_state, spec_obj): aggregate_vals = utils.aggregate_values_from_key( host_state, 'max_instances_per_host') try: value = utils.validate_num_values( aggregate_vals, CONF.max_instances_per_host, cast_to=int) except __HOLE__ as e: LOG.warning(_LW("Could not decode max_instances_per_host: '%s'"), e) value = CONF.max_instances_per_host return value
ValueError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/scheduler/filters/num_instances_filter.py/AggregateNumInstancesFilter._get_max_instances_per_host
6,282
def get_alert_period_timedelta(self, attribute_str): if getattr(self, attribute_str) and len(getattr(self, attribute_str)) >= 2: period_str = getattr(self, attribute_str)[-1] num_str = getattr(self, attribute_str)[:-1] if period_str in ('h', 'd', 'w',): try: num_int = int(num_str) if period_str == 'h': return datetime.timedelta(0, 0, 0, 0, 0, num_int) if period_str == 'd': return datetime.timedelta(num_int) if period_str == 'w': return datetime.timedelta(0, 0, 0, 0, 0, 0, num_int) except __HOLE__: return None else: return None else: return None
ValueError
dataset/ETHPy150Open holgerd77/django-dynamic-scraper/dynamic_scraper/models.py/Scraper.get_alert_period_timedelta
6,283
def __init__(self, hosts, replicas=None): """Create a new hash ring across the specified hosts. :param hosts: an iterable of hosts which will be mapped. :param replicas: number of hosts to map to each hash partition, or len(hosts), which ever is lesser. Default: CONF.hash_distribution_replicas """ if replicas is None: replicas = CONF.hash_distribution_replicas try: self.hosts = set(hosts) self.replicas = replicas if replicas <= len(hosts) else len(hosts) except __HOLE__: raise exception.Invalid( _("Invalid hosts supplied when building HashRing.")) self._host_hashes = {} for host in hosts: key = str(host).encode('utf8') key_hash = hashlib.md5(key) for p in range(2 ** CONF.hash_partition_exponent): key_hash.update(key) hashed_key = self._hash2int(key_hash) self._host_hashes[hashed_key] = host # Gather the (possibly colliding) resulting hashes into a bisectable # list. self._partitions = sorted(self._host_hashes.keys())
TypeError
dataset/ETHPy150Open openstack/ironic/ironic/common/hash_ring.py/HashRing.__init__
6,284
def _get_partition(self, data): try: if six.PY3 and data is not None: data = data.encode('utf-8') key_hash = hashlib.md5(data) hashed_key = self._hash2int(key_hash) position = bisect.bisect(self._partitions, hashed_key) return position if position < len(self._partitions) else 0 except __HOLE__: raise exception.Invalid( _("Invalid data supplied to HashRing.get_hosts."))
TypeError
dataset/ETHPy150Open openstack/ironic/ironic/common/hash_ring.py/HashRing._get_partition
6,285
def __getitem__(self, driver_name): try: return self.ring[driver_name] except __HOLE__: raise exception.DriverNotFound( _("The driver '%s' is unknown.") % driver_name)
KeyError
dataset/ETHPy150Open openstack/ironic/ironic/common/hash_ring.py/HashRingManager.__getitem__
6,286
def lineReceived(self, line): if self.state == "firstline": while line.startswith("\n") or line.startswith("\r"): line = line[1:] if not line: return try: a, b, c = line.split(" ", 2) except __HOLE__: self.invalidMessage() return if a == "SIP/2.0" and self.acceptResponses: # response try: code = int(b) except ValueError: self.invalidMessage() return self.message = Response(code, c) elif c == "SIP/2.0" and self.acceptRequests: self.message = Request(a, b) else: self.invalidMessage() return self.state = "headers" return else: assert self.state == "headers" if line: # multiline header if line.startswith(" ") or line.startswith("\t"): name, value = self.header self.header = name, (value + line.lstrip()) else: # new header if self.header: self.message.addHeader(*self.header) self.header = None try: name, value = line.split(":", 1) except ValueError: self.invalidMessage() return self.header = name, value.lstrip() # XXX we assume content-length won't be multiline if name.lower() == "content-length": try: self.length = int(value.lstrip()) except ValueError: self.invalidMessage() return else: # CRLF, we now have message body until self.length bytes, # or if no length was given, until there is no more data # from the connection sending us data. self.state = "body" if self.header: self.message.addHeader(*self.header) self.header = None if self.length == 0: self.messageDone() return self.setRawMode()
ValueError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/protocols/sip.py/MessagesParser.lineReceived
6,287
def decode(self, response): response = ' '.join(response.splitlines()) parts = response.split(',') auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]]) try: username = auth['username'] except __HOLE__: raise SIPError(401) try: return DigestedCredentials(username, auth, self.outstanding) except: raise SIPError(400)
KeyError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/protocols/sip.py/DigestAuthorizer.decode
6,288
def unregister(self, message, toURL, contact): try: expires = int(message.headers["expires"][0]) except __HOLE__: self.deliverResponse(self.responseFromRequest(400, message)) else: if expires == 0: if contact == "*": contactURL = "*" else: name, contactURL, params = parseAddress(contact) d = self.registry.unregisterAddress(message.uri, toURL, contactURL) d.addCallback(self._cbUnregister, message ).addErrback(self._ebUnregister, message )
ValueError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/protocols/sip.py/RegisterProxy.unregister
6,289
def _expireRegistration(self, username): try: dc, url = self.users[username] except __HOLE__: return defer.fail(LookupError("no such user")) else: dc.cancel() del self.users[username] return defer.succeed(Registration(0, url))
KeyError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/protocols/sip.py/InMemoryRegistry._expireRegistration
6,290
def cleanUpModules(self): modules = self.getModules() modules.sort() modules.reverse() for module in modules: try: del sys.modules[module] except __HOLE__: pass
KeyError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/trial/test/packages.py/PackageTest.cleanUpModules
6,291
def calculate_packing(request, id, quantity=None, with_properties=False, as_string=False, template_name="lfs/catalog/packing_result.html"): """Calculates the actual amount of pieces to buy on base on packing information. """ product = Product.objects.get(pk=id) if quantity is None: try: quantity = request.POST.get("quantity") if isinstance(quantity, unicode): # atof() on unicode string fails in some environments, like Czech quantity = quantity.encode("utf-8") quantity = locale.atof(quantity) except (AttributeError, TypeError, ValueError): quantity = 1 packing_amount, packing_unit = product.get_packing_info() try: packs = math.ceil(quantity / packing_amount) real_quantity = packs * packing_amount price = product.get_price_gross(request, with_properties=with_properties, amount=quantity) price += _calculate_property_price(request) price *= real_quantity except __HOLE__: packs = 0.0 real_quantity = 0.0 price = 0.0 html = render_to_string(template_name, RequestContext(request, { "price": price, "product": product, "packs": int(packs), "real_quantity": real_quantity, "unit": packing_unit, })) if as_string: return html result = json.dumps({ "html": html, }, cls=LazyEncoder) return HttpResponse(result, content_type='application/json')
TypeError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/views.py/calculate_packing
6,292
def set_price_filter(request, category_slug): """Saves the given price filter to session. Redirects to the category with given slug. """ req = request.POST if request.method == 'POST' else request.GET try: min_val = lfs.core.utils.atof(req.get("min", "0")) except (ValueError): min_val = 0 try: max_val = lfs.core.utils.atof(req.get("max", "99999")) except: max_val = 0 try: float(min_val) except (TypeError, __HOLE__): min_val = "0" try: float(max_val) except (TypeError, ValueError): max_val = "0" request.session["price-filter"] = {"min": min_val, "max": max_val} url = reverse("lfs_category", kwargs={"slug": category_slug}) return HttpResponseRedirect(url)
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/views.py/set_price_filter
6,293
def set_manufacturer_filter(request, category_slug, manufacturer_id): """ Saves the given manufacturer filter to session. Redirects to the category with given slug. """ try: manufacturer_id = int(manufacturer_id) if Manufacturer.objects.filter(pk=manufacturer_id).exists(): mf = request.session.get("manufacturer-filter", []) if manufacturer_id not in mf: mf.append(manufacturer_id) request.session["manufacturer-filter"] = mf except (__HOLE__, TypeError) as e: pass url = reverse("lfs_category", kwargs={"slug": category_slug}) return HttpResponseRedirect(url)
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/views.py/set_manufacturer_filter
6,294
def reset_number_filter(request, category_slug, property_group_id, property_id): """Resets product filter with given property id. Redirects to the category with given slug. """ key = '{0}_{1}'.format(property_group_id, property_id) try: product_filter = request.session.get("product-filter") del product_filter["number-filter"][key] except __HOLE__: pass else: if product_filter["number-filter"] == {}: del product_filter["number-filter"] request.session["product-filter"] = product_filter url = reverse("lfs_category", kwargs={"slug": category_slug}) return HttpResponseRedirect(url)
KeyError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/views.py/reset_number_filter
6,295
def category_products(request, slug, start=1, template_name="lfs/catalog/categories/product/default.html"): """Displays the products of the category with passed slug. This view is called if the user chooses a template that is situated in settings.PRODUCT_PATH ". """ # Resets the product filters if the user navigates to another category. # TODO: Is this what a customer would expect? last_category = request.session.get("last_category") if (last_category is None) or (last_category.slug != slug): if "product-filter" in request.session: del request.session["product-filter"] if "price-filter" in request.session: del request.session["price-filter"] if "manufacturer-filter" in request.session: del request.session["manufacturer-filter"] try: default_sorting = settings.LFS_PRODUCTS_SORTING except __HOLE__: default_sorting = "effective_price" sorting = request.session.get("sorting", default_sorting) product_filter = request.session.get("product-filter", {}) cache_key = "%s-category-products-2-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, slug) sub_cache_key = "%s-2-start-%s-sorting-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, start, sorting) filter_key = ["%s-%s" % (i[0], i[1]) for i in product_filter.items()] if filter_key: sub_cache_key += "-%s" % "-".join(filter_key) price_filter = request.session.get("price-filter") if price_filter: sub_cache_key += "-%s-%s" % (price_filter["min"], price_filter["max"]) manufacturer_filter = request.session.get("manufacturer-filter") if manufacturer_filter: sub_cache_key += "-%s" % ','.join(map(str, manufacturer_filter)) temp = cache.get(cache_key) if temp is not None: try: return temp[sub_cache_key] except KeyError: pass else: temp = dict() category = lfs_get_object_or_404(Category, slug=slug) # Calculates parameters for display. try: start = int(start) except (ValueError, TypeError): start = 1 format_info = category.get_format_info() amount_of_rows = format_info["product_rows"] amount_of_cols = format_info["product_cols"] amount = amount_of_rows * amount_of_cols all_products = lfs.catalog.utils.get_filtered_products_for_category( category, product_filter, price_filter, sorting, manufacturer_filter) all_products = all_products.select_related('parent') # prepare paginator paginator = Paginator(all_products, amount) try: current_page = paginator.page(start) except (EmptyPage, InvalidPage): current_page = paginator.page(paginator.num_pages) # Calculate products row = [] products = [] for i, product in enumerate(current_page.object_list): if product.is_product_with_variants(): default_variant = product.get_variant_for_category(request) if default_variant: product = default_variant image = None product_image = product.get_image() if product_image: image = product_image.image row.append({ "obj": product, "slug": product.slug, "name": product.get_name(), "image": image, "price_unit": product.get_price_unit(), "price_includes_tax": product.price_includes_tax(request), }) if (i + 1) % amount_of_cols == 0: products.append(row) row = [] if len(row) > 0: products.append(row) amount_of_products = all_products.count() # Calculate urls pagination_data = lfs_pagination(request, current_page, url=category.get_absolute_url()) pagination_data['total_text'] = ungettext('%(count)d product', '%(count)d products', amount_of_products) % {'count': amount_of_products} render_template = category.get_template_name() if render_template is not None: template_name = render_template template_data = { "category": category, "products": products, "amount_of_products": amount_of_products, "pagination": pagination_data } result_html = render_to_string(template_name, RequestContext(request, template_data)) result = {'pagination_data': pagination_data, 'html': result_html} temp[sub_cache_key] = result cache.set(cache_key, temp) return result
AttributeError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/views.py/category_products
6,296
def _calculate_property_price(request): """ Calculates the price of the currently selected properties. """ property_price = 0 for key, option_id in request.POST.items(): if key.startswith("property"): try: property_group_id, property_id = map(int, key.split('-')[1:]) prop = Property.objects.get(pk=property_id) if prop.is_select_field: po = PropertyOption.objects.get(property=property, pk=option_id) if prop.add_price: po_price = float(po.price) property_price += po_price except (__HOLE__, ValueError, TypeError, PropertyOption.DoesNotExist, Property.DoesNotExist): pass return property_price
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/views.py/_calculate_property_price
6,297
def upload_to_s3(self, source_path, bucket_name): """ Given a file, upload it to S3. Credentials should be stored in environment variables or ~/.aws/credentials (%USERPROFILE%\.aws\credentials on Windows). Returns True on success, false on failure. """ s3 = self.boto_session.resource('s3') # If this bucket doesn't exist, make it. # Will likely fail, but that's apparently the best way to check # it exists, since boto3 doesn't expose a better check. try: s3.create_bucket(Bucket=bucket_name) except Exception as e: # pragma: no cover pass if not os.path.isfile(source_path) or os.stat(source_path).st_size == 0: print("Problem with source file {}".format(source_path)) return False dest_path = os.path.split(source_path)[1] try: source_size = os.stat(source_path).st_size print("Uploading zip (" + str(self.human_size(source_size)) + ")...") progress = tqdm(total=float(os.path.getsize(source_path)), unit_scale=True) # Attempt to upload to S3 using the S3 meta client with the progress bar. # If we're unable to do that, try one more time using a session client, # which cannot use the progress bar. # Related: https://github.com/boto/boto3/issues/611 try: s3.meta.client.upload_file( source_path, bucket_name, dest_path, Callback=progress.update ) except Exception as e: # pragma: no cover s3 = self.boto_session.client('s3') s3.upload_file(source_path, bucket_name, dest_path) progress.close() except (KeyboardInterrupt, __HOLE__): # pragma: no cover raise except Exception as e: # pragma: no cover print(e) return False return True
SystemExit
dataset/ETHPy150Open Miserlou/Zappa/zappa/zappa.py/Zappa.upload_to_s3
6,298
def dump(file, data): """ Recursively dump the given packets (or packet) to given file. """ for packet in data: try: dump(file, packet) except __HOLE__: dump_packet(file, packet)
TypeError
dataset/ETHPy150Open braiden/python-ant-downloader/antd/garmin.py/dump
6,299
def extract_runs(protocols, get_runs_pkts): """ Given garmin packets which are result of A1000 (get_runs) Return an object tree runs->laps->points for easier processing. """ runs, laps, trks = get_runs_pkts runs = [r.data for r in runs.by_pid[protocols.link_proto.PID_RUN]] laps = [l.data for l in laps.by_pid[protocols.link_proto.PID_LAP]] _log.debug("extract_runs: found %d run(s)", len(runs)) for run_num, run in enumerate(runs): run.laps = [l for l in laps if run.first_lap_index <= l.index <= run.last_lap_index] run.time.time = run.laps[0].start_time.time run.wpts = list(extract_wpts(protocols, trks, run.track_index)) _log.debug("extract_runs: run %d has: %d lap(s), %d wpt(s)", run_num + 1, len(run.laps), len(run.wpts)) for lap in run.laps: lap.wpts = [] lap_num = 0 for wpt in run.wpts: try: while wpt.time.time >= run.laps[lap_num + 1].start_time.time: _log.debug("extract_runs: run %d lap %d has: %d wpt(s)", run_num + 1, lap_num + 1, len(run.laps[lap_num].wpts)) lap_num += 1 except __HOLE__: pass run.laps[lap_num].wpts.append(wpt) all_wpt_in_laps = sum(len(lap.wpts) for lap in run.laps) if len(run.wpts) != all_wpt_in_laps: _log.warning("extract_runs: run %d waypoint mismatch: total(%d) != wpt_in_laps(%d)", run_num + 1, len(run.wpts), all_wpt_in_laps) return runs
IndexError
dataset/ETHPy150Open braiden/python-ant-downloader/antd/garmin.py/extract_runs