Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,600
def parse_item(self, item): """ :param item: str, json-encoded string """ item = item.decode("utf-8") try: parsed_item = json.loads(item) except __HOLE__: parsed_item = None else: # append here just in case .get bellow fails self._parsed_logs.append(parsed_item) # make sure the json is a dictionary object if isinstance(parsed_item, dict): line = parsed_item.get("stream", "") else: parsed_item = None line = item for l in line.splitlines(): l = l.strip() if l: logger.debug(l) self._logs.append(item) if parsed_item is not None: self._error = parsed_item.get("error", None) self._error_detail = parsed_item.get("errorDetail", None) if self._error: logger.error(item.strip())
ValueError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/CommandResult.parse_item
5,601
def wait_for_command(logs_generator): """ using given generator, wait for it to raise StopIteration, which indicates that docker has finished with processing :return: list of str, logs """ logger.info("wait_for_command") cr = CommandResult() while True: try: item = next(logs_generator) # py2 & 3 compat cr.parse_item(item) except __HOLE__: logger.info("no more logs") break return cr
StopIteration
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/wait_for_command
5,602
def clone_git_repo(git_url, target_dir, commit=None): """ clone provided git repo to target_dir, optionally checkout provided commit :param git_url: str, git repo to clone :param target_dir: str, filesystem path where the repo should be cloned :param commit: str, commit to checkout, SHA-1 or ref :return: str, commit ID of HEAD """ commit = commit or "master" logger.info("cloning git repo '%s'", git_url) logger.debug("url = '%s', dir = '%s', commit = '%s'", git_url, target_dir, commit) cmd = ["git", "clone", "-b", commit, "--depth", "1", git_url, quote(target_dir)] logger.debug("doing a shallow clone '%s'", cmd) try: subprocess.check_call(cmd) except subprocess.CalledProcessError as ex: logger.warning(repr(ex)) # http://stackoverflow.com/questions/1911109/clone-a-specific-git-branch/4568323#4568323 # -b takes only refs, not SHA-1 cmd = ["git", "clone", "-b", commit, "--single-branch", git_url, quote(target_dir)] logger.debug("cloning single branch '%s'", cmd) try: subprocess.check_call(cmd) except subprocess.CalledProcessError as ex: logger.warning(repr(ex)) # let's try again with plain `git clone $url && git checkout` cmd = ["git", "clone", git_url, quote(target_dir)] logger.debug("cloning '%s'", cmd) subprocess.check_call(cmd) cmd = ["git", "reset", "--hard", commit] logger.debug("checking out branch '%s'", cmd) subprocess.check_call(cmd, cwd=target_dir) cmd = ["git", "rev-parse", "HEAD"] logger.debug("getting SHA-1 of provided ref '%s'", cmd) try: commit_id = subprocess.check_output(cmd, cwd=target_dir) # py 2.7 except __HOLE__: commit_id = backported_check_output(cmd, cwd=target_dir) # py 2.6 commit_id = commit_id.strip() logger.info("commit ID = %s", commit_id) return commit_id
AttributeError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/clone_git_repo
5,603
def escape_dollar(v): try: str_type = unicode except __HOLE__: str_type = str if isinstance(v, str_type): return v.replace('$', r'\$') else: return v
NameError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/escape_dollar
5,604
def _process_plugin_substitution(mapping, key_parts, value): try: plugin_type, plugin_name, arg_name = key_parts except __HOLE__: logger.error("invalid absolute path '%s': it requires exactly three parts: " "plugin type, plugin name, argument name (dot separated)", key_parts) raise ValueError("invalid absolute path to plugin, it should be " "plugin_type.plugin_name.argument_name") logger.debug("getting plugin conf for '%s' with type '%s'", plugin_name, plugin_type) plugins_of_a_type = mapping.get(plugin_type, None) if plugins_of_a_type is None: logger.warning("there are no plugins with type '%s'", plugin_type) return plugin_conf = [x for x in plugins_of_a_type if x['name'] == plugin_name] plugins_num = len(plugin_conf) if plugins_num == 1: if arg_name not in plugin_conf[0]['args']: logger.warning("no configuration value '%s' for plugin '%s', skipping", arg_name, plugin_name) return logger.info("changing value '%s' of plugin '%s': '%s' -> '%s'", arg_name, plugin_name, plugin_conf[0]['args'][arg_name], value) plugin_conf[0]['args'][arg_name] = value elif plugins_num <= 0: logger.warning("there is no configuration for plugin '%s', skipping substitution", plugin_name) else: logger.error("there is no configuration for plugin '%s'", plugin_name) raise RuntimeError("plugin '%s' was specified multiple (%d) times, can't pick one", plugin_name, plugins_num)
ValueError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/_process_plugin_substitution
5,605
def get_version_of_tools(): """ get versions of tools reactor is using (specified in constants.TOOLS_USED) :returns list of dicts, [{"name": "docker-py", "version": "1.2.3"}, ...] """ response = [] for tool in TOOLS_USED: pkg_name = tool["pkg_name"] try: tool_module = import_module(pkg_name) except __HOLE__ as ex: logger.warning("can't import module %s: %r", pkg_name, ex) else: version = getattr(tool_module, "__version__", None) if version is None: logger.warning("tool %s doesn't have __version__", pkg_name) else: response.append({ "name": tool.get("display_name", pkg_name), "version": version, "path": tool_module.__file__, }) return response
ImportError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/get_version_of_tools
5,606
def get_build_json(): try: return json.loads(os.environ["BUILD"]) except __HOLE__: logger.error("No $BUILD env variable. Probably not running in build container") raise # copypasted and slightly modified from # http://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size/1094933#1094933
KeyError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/get_build_json
5,607
def decode_eventdata(sensor_type, offset, eventdata, sdr): """Decode extra event data from an alert or log Provide a textual summary of eventdata per descriptions in Table 42-3 of the specification. This is for sensor specific offset events only. :param sensor_type: The sensor type number from the event :param offset: Sensor specific offset :param eventdata: The three bytes from the log or alert """ if sensor_type == 5 and offset == 4: # link loss, indicates which port return 'Port {0}'.format(eventdata[1]) elif sensor_type == 8 and offset == 6: # PSU cfg error errtype = eventdata[2] & 0b1111 return psucfg_errors.get(errtype, 'Unknown') elif sensor_type == 0xc and offset == 8: # Memory spare return 'Module {0}'.format(eventdata[2]) elif sensor_type == 0xf: if offset == 0: # firmware error return firmware_errors.get(eventdata[1], 'Unknown') elif offset in (1, 2): return firmware_progress.get(eventdata[1], 'Unknown') elif sensor_type == 0x10: if offset == 0: # Correctable error logging on a specific memory part return 'Module {0}'.format(eventdata[1]) elif offset == 1: return 'Reading type {0:02X}h, offset {1:02X}h'.format( eventdata[1], eventdata[2] & 0b1111) elif offset == 5: return '{0}%'.format(eventdata[2]) elif offset == 6: return 'Processor {0}'.format(eventdata[1]) elif sensor_type == 0x12: if offset == 3: action = (eventdata[1] & 0b1111000) >> 4 return auxlog_actions.get(action, 'Unknown') elif offset == 4: sysactions = [] if eventdata[1] & 0b1 << 5: sysactions.append('NMI') if eventdata[1] & 0b1 << 4: sysactions.append('OEM action') if eventdata[1] & 0b1 << 3: sysactions.append('Power Cycle') if eventdata[1] & 0b1 << 2: sysactions.append('Reset') if eventdata[1] & 0b1 << 1: sysactions.append('Power Down') if eventdata[1] & 0b1: sysactions.append('Alert') return ','.join(sysactions) elif offset == 5: # Clock change event, either before or after if eventdata[1] & 0b10000000: return 'After' else: return 'Before' elif sensor_type == 0x19 and offset == 0: return 'Requested {0] while {1}'.format(eventdata[1], eventdata[2]) elif sensor_type == 0x1d and offset == 7: return restart_causes.get(eventdata[1], 'Unknown') elif sensor_type == 0x21 and offset == 0x9: return '{0} {1}'.format(slot_types.get(eventdata[1], 'Unknown'), eventdata[2]) elif sensor_type == 0x23: phase = eventdata[1] & 0b1111 return watchdog_boot_phases.get(phase, 'Unknown') elif sensor_type == 0x28: if offset == 4: return 'Sensor {0}'.format(eventdata[1]) elif offset == 5: islogical = (eventdata[1] & 0b10000000) if islogical: if eventdata[2] in sdr.fru: return sdr.fru[eventdata[2]].fru_name else: return 'FRU {0}'.format(eventdata[2]) elif sensor_type == 0x2a and offset == 3: return 'User {0}'.format(eventdata[1]) elif sensor_type == 0x2b: return version_changes.get(eventdata[1], 'Unknown') elif sensor_type == 0x2c: cause = (eventdata[1] & 0b11110000) >> 4 cause = fru_states.get(cause, 'Unknown') oldstate = eventdata[1] & 0b1111 if oldstate != offset: try: cause += '(change from {0})'.format( ipmiconst.sensor_type_offsets[0x2c][oldstate]['desc']) except __HOLE__: pass
KeyError
dataset/ETHPy150Open openstack/pyghmi/pyghmi/ipmi/events.py/decode_eventdata
5,608
def _populate_event(self, deassertion, event, event_data, event_type, sensor_type, sensorid): event['component_id'] = sensorid try: event['component'] = self._sdr.sensors[sensorid].name except __HOLE__: if sensorid == 0: event['component'] = None else: event['component'] = 'Sensor {0}'.format(sensorid) event['deassertion'] = deassertion event['event_data_bytes'] = event_data byte2type = (event_data[0] & 0b11000000) >> 6 byte3type = (event_data[0] & 0b110000) >> 4 if byte2type == 1: event['triggered_value'] = event_data[1] evtoffset = event_data[0] & 0b1111 event['event_type_byte'] = event_type if event_type <= 0xc: event['component_type_id'] = sensor_type event['event_id'] = '{0}.{1}'.format(event_type, evtoffset) # use generic offset decode for event description event['component_type'] = ipmiconst.sensor_type_codes.get( sensor_type, '') evreading = ipmiconst.generic_type_offsets.get( event_type, {}).get(evtoffset, {}) if event['deassertion']: event['event'] = evreading.get('deassertion_desc', '') event['severity'] = evreading.get( 'deassertion_severity', pygconst.Health.Ok) else: event['event'] = evreading.get('desc', '') event['severity'] = evreading.get( 'severity', pygconst.Health.Ok) elif event_type == 0x6f: event['component_type_id'] = sensor_type event['event_id'] = '{0}.{1}'.format(event_type, evtoffset) event['component_type'] = ipmiconst.sensor_type_codes.get( sensor_type, '') evreading = ipmiconst.sensor_type_offsets.get( sensor_type, {}).get(evtoffset, {}) if event['deassertion']: event['event'] = evreading.get('deassertion_desc', '') event['severity'] = evreading.get( 'deassertion_severity', pygconst.Health.Ok) else: event['event'] = evreading.get('desc', '') event['severity'] = evreading.get( 'severity', pygconst.Health.Ok) if event_type == 1: # threshold if byte3type == 1: event['threshold_value'] = event_data[2] if 3 in (byte2type, byte3type) or event_type == 0x6f: # sensor specific decode, see sdr module... # 2 - 0xc: generic discrete, 0x6f, sensor specific additionaldata = decode_eventdata( sensor_type, evtoffset, event_data, self._sdr) if additionaldata: event['event_data'] = additionaldata
KeyError
dataset/ETHPy150Open openstack/pyghmi/pyghmi/ipmi/events.py/EventHandler._populate_event
5,609
def _get_user_model(): """ Get the User Document class user for MongoEngine authentication. Use the model defined in SOCIAL_AUTH_USER_MODEL if defined, or defaults to MongoEngine's configured user document class. """ custom_model = getattr(settings, setting_name('USER_MODEL'), None) if custom_model: return module_member(custom_model) try: # Custom user model support with MongoEngine 0.8 from mongoengine.django.mongo_auth.models import get_user_document return get_user_document() except __HOLE__: return module_member('mongoengine.django.auth.User')
ImportError
dataset/ETHPy150Open omab/python-social-auth/social/apps/django_app/me/models.py/_get_user_model
5,610
def __init__(self, url, serverName, transporter, entitlementDir, callLog): try: util.ServerProxy.__init__(self, url=url, transport=transporter) except __HOLE__, e: raise errors.OpenError('Error occurred opening repository ' '%s: %s' % (url, e)) self._serverName = serverName self._protocolVersion = CLIENT_VERSIONS[-1] self._entitlementDir = entitlementDir self._callLog = callLog
IOError
dataset/ETHPy150Open sassoftware/conary/conary/repository/netclient.py/ServerProxy.__init__
5,611
def iterFilesInTrove(self, troveName, version, flavor, sortByPath = False, withFiles = False, capsules = False): # XXX this code should most likely go away, and anything that # uses it should be written to use other functions l = [(troveName, (None, None), (version, flavor), True)] cs = self._getChangeSet(l, recurse = False, withFiles = True, withFileContents = False) try: trvCs = cs.getNewTroveVersion(troveName, version, flavor) except __HOLE__: raise StopIteration t = trv_mod.Trove(trvCs, skipIntegrityChecks = not withFiles) # if we're sorting, we'll need to pull out all the paths ahead # of time. We'll use a generator that returns the items # in the same order as iterFileList() to reuse code. if sortByPath: pathDict = {} for pathId, path, fileId, version in t.iterFileList( capsules = capsules, members = not capsules): pathDict[path] = (pathId, fileId, version) paths = pathDict.keys() paths.sort() def rearrange(paths, pathDict): for path in paths: (pathId, fileId, version) = pathDict[path] yield (pathId, path, fileId, version) generator = rearrange(paths, pathDict) else: generator = t.iterFileList(capsules = capsules, members = not capsules) for pathId, path, fileId, version in generator: if withFiles: fileStream = files.ThawFile(cs.getFileChange(None, fileId), pathId) yield (pathId, path, fileId, version, fileStream) else: yield (pathId, path, fileId, version)
KeyError
dataset/ETHPy150Open sassoftware/conary/conary/repository/netclient.py/NetworkRepositoryClient.iterFilesInTrove
5,612
@api.publicApi def getTroves(self, troves, withFiles = True, callback = None): """ @param troves: List of troves to be retrieved @type troves: list @param withFiles: If set (default), retrieve files. @type withFiles: bool @raise RepositoryError: if a repository error occurred. """ if not troves: return [] chgSetList = [] for (name, version, flavor) in troves: chgSetList.append((name, (None, None), (version, flavor), True)) cs = self._getChangeSet(chgSetList, recurse = False, withFiles = withFiles, withFileContents = False, callback = callback) l = [] # walk the list so we can return the troves in the same order for (name, version, flavor) in troves: try: troveCs = cs.getNewTroveVersion(name, version, flavor) except __HOLE__: l.append(None) continue # trove integrity checks don't work when file information is # excluded t = trv_mod.Trove(troveCs, skipIntegrityChecks = not withFiles) l.append(t) return l
KeyError
dataset/ETHPy150Open sassoftware/conary/conary/repository/netclient.py/NetworkRepositoryClient.getTroves
5,613
def _getChangeSet(self, chgSetList, recurse = True, withFiles = True, withFileContents = True, target = None, excludeAutoSource = False, primaryTroveList = None, callback = None, forceLocalGeneration = False, changesetVersion = None, mirrorMode = False): # This is a bit complicated due to servers not wanting to talk # to other servers. To make this work, we do this: # # 1. Split the list of change set requests into ones for # remote servers (by server) and ones we need to generate # locally # # 2. Get the changesets from the remote servers. This also # gives us lists of other changesets we need (which need # to be locally generated, or the repository server would # have created them for us). # # 3. Create the local changesets. Doing this could well # result in our needing changesets which we're better off # generating on a server. # # 4. If more changesets are needed (from step 3) go to # step 2. # # 5. Download any extra files (and create any extra diffs) # which step 2 couldn't do for us. def _separateJobList(jobList, removedList, forceLocalGeneration, mirrorMode): if forceLocalGeneration: return {}, jobList serverJobs = {} ourJobList = [] for (troveName, (old, oldFlavor), (new, newFlavor), absolute) in \ jobList: if not new: # XXX does doing this on the client get recursion right? ourJobList.append((troveName, (old, oldFlavor), (new, newFlavor), absolute)) continue serverName = new.getHost() if old and mirrorMode and \ self.c[serverName].getProtocolVersion() < 49: # old clients don't support mirrorMode argument; force # local changeset generation (but only for relative # change sets) ourJobList.append((troveName, (old, oldFlavor), (new, newFlavor), absolute)) elif old: if self.c.singleServer(old, new): l = serverJobs.setdefault(serverName, []) l.append((troveName, (self.fromVersion(old), self.fromFlavor(oldFlavor)), (self.fromVersion(new), self.fromFlavor(newFlavor)), absolute)) else: ourJobList.append((troveName, (old, oldFlavor), (new, newFlavor), absolute)) else: l = serverJobs.setdefault(serverName, []) l.append((troveName, (0, 0), (self.fromVersion(new), self.fromFlavor(newFlavor)), absolute)) ourJobList += removedList return (serverJobs, ourJobList) def _getLocalTroves(troveList): if not self.localRep or not troveList: return [ None ] * len(troveList) return self.localRep.getTroves(troveList, pristine=True) def _getCsFromRepos(target, cs, server, job, recurse, withFiles, withFileContents, excludeAutoSource, filesNeeded, chgSetList, removedList, changesetVersion, mirrorMode): if callback: callback.requestingChangeSet() server.setAbortCheck(None) args = (job, recurse, withFiles, withFileContents, excludeAutoSource) kwargs = {} serverVersion = server.getProtocolVersion() if mirrorMode and serverVersion >= 49: if not changesetVersion: changesetVersion = \ filecontainer.FILE_CONTAINER_VERSION_LATEST args += (changesetVersion, mirrorMode, ) elif changesetVersion and serverVersion > 47: args += (changesetVersion, ) # seek to the end of the file outFile.seek(0, 2) start = resume = outFile.tell() attempts = max(1, self.cfg.downloadAttempts) while attempts > 0: if resume - start: assert serverVersion >= 73 outFile.seek(resume) kwargs['resumeOffset'] = resume - start if callback: callback.warning("Changeset download was interrupted. " "Attempting to resume where it left off.") try: (sizes, extraTroveList, extraFileList, removedTroveList, extra,) = _getCsOnce(serverVersion, args, kwargs) break except errors.TruncatedResponseError: attempts -= 1 if not attempts or serverVersion < 73: raise # Figure out how many bytes were downloaded, then trim off # a bit to ensure any garbage (e.g. a proxy error page) is # discarded. keep = max(resume, outFile.tell() - self.cfg.downloadRetryTrim) if self.cfg.downloadRetryTrim and ( keep - resume > self.cfg.downloadRetryThreshold): attempts = max(1, self.cfg.downloadAttempts) resume = keep chgSetList += self.toJobList(extraTroveList) filesNeeded.update(self.toFilesNeeded(extraFileList)) removedList += self.toJobList(removedTroveList) for size in sizes: f = util.SeekableNestedFile(outFile, size, start) try: newCs = changeset.ChangeSetFromFile(f) except IOError, err: assert False, 'IOError in changeset (%s); args = %r' % ( str(err), args,) if not cs: cs = newCs else: cs.merge(newCs) start += size return (cs, self.toJobList(extraTroveList), self.toFilesNeeded(extraFileList)) def _getCsOnce(serverVersion, args, kwargs): l = server.getChangeSet(*args, **kwargs) extra = {} if serverVersion >= 50: url = l[0] sizes = [ x[0] for x in l[1] ] extraTroveList = [ x for x in itertools.chain( *[ x[1] for x in l[1] ] ) ] extraFileList = [ x for x in itertools.chain( *[ x[2] for x in l[1] ] ) ] removedTroveList = [ x for x in itertools.chain( *[ x[3] for x in l[1] ] ) ] if serverVersion >= 73: extra = l[2] elif serverVersion < 38: (url, sizes, extraTroveList, extraFileList) = l removedTroveList = [] else: (url, sizes, extraTroveList, extraFileList, removedTroveList) = l # ensure that sizes are integers. protocol version 44 and # later sends them as strings instead of ints due to the 2 # GiB limitation sizes = [ int(x) for x in sizes ] if hasattr(url, 'read'): # Nested changeset file in a multi-part response inF = url elif os.path.exists(url): # attempt to remove temporary local files # possibly created by a shim client inF = open(url, 'rb') try: os.unlink(url) except OSError, err: if err.args[0] != errno.EPERM: raise else: # "forceProxy" here makes sure that multi-part requests go back # through the same proxy on subsequent requests. forceProxy = server.usedProxy() headers = [('X-Conary-Servername', server._serverName)] try: inF = transport.ConaryURLOpener(proxyMap=self.c.proxyMap ).open(url, forceProxy=forceProxy, headers=headers) except transport.TransportError, e: raise errors.RepositoryError(str(e)) if callback: wrapper = callbacks.CallbackRateWrapper( callback, callback.downloadingChangeSet, sum(sizes)) copyCallback = wrapper.callback abortCheck = callback.checkAbort else: copyCallback = None abortCheck = None resumeOffset = kwargs.get('resumeOffset') or 0 # Start the total at resumeOffset so that progress callbacks # continue where they left off. copied = util.copyfileobj(inF, outFile, callback=copyCallback, abortCheck=abortCheck, rateLimit=self.downloadRateLimit, total=resumeOffset) if copied is None: raise errors.RepositoryError("Unknown error downloading changeset") totalSize = copied + resumeOffset if hasattr(inF, 'headers') and 'content-length' in inF.headers: expectSize = resumeOffset + long(inF.headers['content-length']) if totalSize != expectSize: raise errors.TruncatedResponseError(expectSize, totalSize) assert sum(sizes) == expectSize elif totalSize != sum(sizes): raise errors.TruncatedResponseError(sum(sizes), totalSize) inF.close() return (sizes, extraTroveList, extraFileList, removedTroveList, extra) def _getCsFromShim(target, cs, server, job, recurse, withFiles, withFileContents, excludeAutoSource, filesNeeded, chgSetList, removedList): (newCs, extraTroveList, extraFileList, removedList) = \ server.getChangeSetObj(job, recurse, withFiles, withFileContents, excludeAutoSource) if not cs: cs = newCs else: cs.merge(newCs) return cs, extraTroveList, extraFileList if not chgSetList: # no need to work hard to find this out return changeset.ReadOnlyChangeSet() # make sure the absolute flag isn't set for any differential change # sets assert(not [ x for x in chgSetList if (x[1][0] and x[-1]) ]) cs = None internalCs = None filesNeeded = set() removedList = [] if target: try: outFile = util.ExtendedFile(target, "w+", buffering = False) except __HOLE__, e: strerr = "Error writing to file %s: %s" % (e.filename, e.strerror) raise errors.FilesystemError(e.errno, e.filename, e.strerror, strerr) else: (outFd, tmpName) = util.mkstemp(suffix = '.ccs') outFile = util.ExtendedFile(tmpName, "w+", buffering = False) os.close(outFd) os.unlink(tmpName) if primaryTroveList is None: # (name, version, release) list. removed troves aren't primary primaryTroveList = [ (x[0], x[2][0], x[2][1]) for x in chgSetList if x[2][0] is not None ] while chgSetList or removedList: (serverJobs, ourJobList) = _separateJobList(chgSetList, removedList, forceLocalGeneration, mirrorMode) chgSetList = [] removedList = [] for serverName, job in serverJobs.iteritems(): server = self.c[serverName] args = (target, cs, server, job, recurse, withFiles, withFileContents, excludeAutoSource, filesNeeded, chgSetList, removedList) try: if server.__class__ == ServerProxy: # this is a XML-RPC proxy for a remote repository rc = _getCsFromRepos(*(args + (changesetVersion, mirrorMode))) else: # assume we are a shim repository rc = _getCsFromShim(*args) cs, extraTroveList, extraFileList = rc except Exception: if target and os.path.exists(target): os.unlink(target) elif os.path.exists(tmpName): os.unlink(tmpName) raise chgSetList += extraTroveList filesNeeded.update(extraFileList) if (ourJobList or filesNeeded) and not internalCs: internalCs = changeset.ChangeSet() # Handle everything in ourJobList which is just a deletion. We # need timestamped versions for this; only go the repository # to get those if the ones we have are not versioned. delList = [] timesNeeded = [ ] for i, (troveName, (oldVersion, oldFlavor), (newVersion, newFlavor), absolute) in enumerate(ourJobList): if not newVersion: delList.append(((troveName, oldVersion, oldFlavor), i)) if not sum(oldVersion.timeStamps()): timesNeeded.append(delList[-1]) # XXX this is an expensive way to get a version w/ timestamps, but # it's easier than other approaches :-( trvs = self.getTroves([ x[0] for x in timesNeeded ], withFiles = False) timeDict = dict(zip([ x[0] for x in timesNeeded ], [ x.getVersion() for x in trvs ])) # this lets us remove from ourJobList from back to front, keeping # our indices valid delList.reverse() for trvInfo, i in delList: ver = timeDict.get(trvInfo, trvInfo[1]) internalCs.oldTrove(trvInfo[0], ver, trvInfo[2]) del ourJobList[i] del delList # generate this change set, and put any recursive generation # which is needed onto the chgSetList for the next pass allTrovesNeeded = [] for (troveName, (oldVersion, oldFlavor), (newVersion, newFlavor), absolute) in ourJobList: if oldVersion is not None: allTrovesNeeded.append((troveName, oldVersion, oldFlavor)) allTrovesNeeded.append((troveName, newVersion, newFlavor)) troves = _getLocalTroves(allTrovesNeeded) remoteTrovesNeeded = [] indices = [] for i, (trove, req) in enumerate(zip(troves, allTrovesNeeded)): # don't ask for local troves from a remote server if trove is None and not req[1].isOnLocalHost(): remoteTrovesNeeded.append(req) indices.append(i) remoteTroves = self.getTroves(remoteTrovesNeeded) for i, trove in zip(indices, remoteTroves): troves[i] = trove del allTrovesNeeded, remoteTrovesNeeded, indices, remoteTroves i = 0 for (troveName, (oldVersion, oldFlavor), (newVersion, newFlavor), absolute) in ourJobList: if oldVersion is not None: old = troves[i] i += 1 else: old = None new = troves[i] i += 1 # if the old version is marked removed, pretend as though # it doesn't exist. if old and old.isRemoved(): old = None (troveChgSet, newFilesNeeded, pkgsNeeded) = \ new.diff(old, absolute = absolute) # newFilesNeeded = [ (pathId, oldFileVersion, newFileVersion) ] filesNeeded.update( ( (x[0], troveName, (oldVersion, oldFlavor, x[1], x[2]), (newVersion, newFlavor, x[3], x[4])) for x in newFilesNeeded ) ) if recurse: for (otherTroveName, (otherOldVersion, otherOldFlavor), (otherNewVersion, otherNewFlavor), otherIsAbsolute) in pkgsNeeded: chgSetList.append((otherTroveName, (otherOldVersion, otherOldFlavor), (otherNewVersion, otherNewFlavor), absolute)) internalCs.newTrove(troveChgSet) # Files that are missing from upstream missingFiles = [] if withFiles and filesNeeded: need = [] for (pathId, troveName, (oldTroveVersion, oldTroveFlavor, oldFileId, oldFileVersion), (newTroveVersion, newTroveFlavor, newFileId, newFileVersion)) \ in filesNeeded: if oldFileVersion: need.append((pathId, oldFileId, oldFileVersion)) need.append((pathId, newFileId, newFileVersion)) # If a callback was passed in, then allow for missing files fileObjs = self.getFileVersions(need, lookInLocal = True, allowMissingFiles = bool(callback)) fileDict = {} for ((pathId, fileId, fileVersion), fileObj, error) in zip( need, fileObjs, fileObjs.errors): fileDict[(pathId, fileId)] = fileObj, error del fileObj, fileObjs, need, fileId contentsNeeded = [] fileJob = [] for (pathId, troveName, (oldTroveVersion, oldTroveF, oldFileId, oldFileVersion), (newTroveVersion, newTroveF, newFileId, newFileVersion)) \ in filesNeeded: if oldFileVersion: oldFileObj, _ = fileDict[(pathId, oldFileId)] else: oldFileObj = None newFileObj, newFileError = fileDict[(pathId, newFileId)] if newFileObj is None: # File missing from server missingFiles.append((troveName, newTroveVersion, newTroveF, newFileError, pathId, newFileId, newFileVersion)) continue forceAbsolute = mirrorMode and oldFileObj and ( oldFileId != newFileId or oldFileVersion.getHost() != newFileVersion.getHost() ) if forceAbsolute: (filecs, hash) = changeset.fileChangeSet(pathId, None, newFileObj) else: (filecs, hash) = changeset.fileChangeSet(pathId, oldFileObj, newFileObj) internalCs.addFile(oldFileId, newFileId, filecs) if not withFileContents: continue if excludeAutoSource and newFileObj.flags.isAutoSource(): continue if hash or (forceAbsolute and newFileObj.hasContents): # pull contents from the trove it was originally # built in fetchItems = [] needItems = [] if (not mirrorMode and changeset.fileContentsUseDiff(oldFileObj, newFileObj)): fetchItems.append( (oldFileId, oldFileVersion, oldFileObj) ) needItems.append( (pathId, None, oldFileObj) ) if not newFileObj.flags.isEncapsulatedContent(): fetchItems.append( (newFileId, newFileVersion, newFileObj) ) contentsNeeded += fetchItems needItems.append( (pathId, newFileId, newFileObj) ) fileJob.extend([ needItems ]) contentList = self.getFileContents(contentsNeeded, tmpFile = outFile, lookInLocal = True, callback = callback, compressed = True) i = 0 for item in fileJob: pathId, fileId, fileObj = item[0] contents = contentList[i] i += 1 if len(item) == 1: internalCs.addFileContents(pathId, fileId, changeset.ChangedFileTypes.file, contents, fileObj.flags.isConfig(), compressed = True) else: # Don't bother with diffs. Clients can reconstruct them for # installs and they're just a pain to assemble here anyway. fileId = item[1][1] newFileObj = item[1][2] newContents = contentList[i] i += 1 (contType, cont) = changeset.fileContentsDiff(None, None, newFileObj, newContents, mirrorMode = mirrorMode) internalCs.addFileContents(pathId, fileId, contType, cont, True, compressed = True) if not cs and internalCs: cs = internalCs internalCs = None elif cs and internalCs: cs.merge(internalCs) # convert the versions in here to ones w/ timestamps cs.setPrimaryTroveList([]) oldTroveSet = dict([ (x,x) for x in cs.getOldTroveList() ] ) for (name, version, flavor) in primaryTroveList: if cs.hasNewTrove(name, version, flavor): trove = cs.getNewTroveVersion(name, version, flavor) cs.addPrimaryTrove(name, trove.getNewVersion(), flavor) else: cs.addPrimaryTrove(*oldTroveSet[(name, version,flavor)]) if missingFiles: mfs = [] for mf in missingFiles: trvName, trvVersion, trvFlavor, error = mf[:4] trv = cs.getNewTroveVersion(trvName, trvVersion, trvFlavor) # Find the file path associated with this missing file for pathId, path, fileId, version in trv.getNewFileList(): if (pathId, fileId, version) == mf[4:]: break else: # for # Unable to find this file raise Exception("Cannot find file in changeset") mfs.append((trvName, trvVersion, trvFlavor, pathId, path, fileId, version, error)) # The test for the presence of the callback is redundant, if we # have missing files we should have a callback, otherwise # getFileVersions would have raised an exception because of the # allowMissingFiles flag. ret = False if callback: assert(hasattr(callback, 'missingFiles')) ret = callback.missingFiles(mfs) # If the callback returns False, or no callback is present, # keep the old behavior of raising the exception # Note that the callback can choose to raise an exception itself, # in which case this code will not get executed. if not ret: # Grab just the first file mf = mfs[0] fileId, error = mf[5], mf[7] if error: error.throw() else: raise errors.FileStreamMissing(fileId) if target and cs: if cs.oldTroves or cs.newTroves: os.unlink(target) cs.writeToFile(target, versionOverride = changesetVersion) cs = None elif target: os.unlink(target) return cs
IOError
dataset/ETHPy150Open sassoftware/conary/conary/repository/netclient.py/NetworkRepositoryClient._getChangeSet
5,614
def __init__(self, *args, **kwargs): attrs = kwargs.setdefault('attrs', {}) try: attrs['class'] = "%s autoresize" % (attrs['class'],) except __HOLE__: attrs['class'] = 'autoresize' attrs.setdefault('cols', 80) attrs.setdefault('rows', 5) super(AutoResizeTextarea, self).__init__(*args, **kwargs)
KeyError
dataset/ETHPy150Open carljm/django-form-utils/form_utils/widgets.py/AutoResizeTextarea.__init__
5,615
def __init__(self, *args, **kwargs): attrs = kwargs.setdefault('attrs', {}) try: attrs['class'] = "%s inline" % (attrs['class'],) except __HOLE__: attrs['class'] = 'inline' attrs.setdefault('cols', 40) attrs.setdefault('rows', 2) super(InlineAutoResizeTextarea, self).__init__(*args, **kwargs)
KeyError
dataset/ETHPy150Open carljm/django-form-utils/form_utils/widgets.py/InlineAutoResizeTextarea.__init__
5,616
def run (self): self.daemon.daemonise() # Make sure we create processes once we have closed file descriptor # unfortunately, this must be done before reading the configuration file # so we can not do it with dropped privileges self.processes = Processes(self) # we have to read the configuration possibly with root privileges # as we need the MD5 information when we bind, and root is needed # to bind to a port < 1024 # this is undesirable as : # - handling user generated data as root should be avoided # - we may not be able to reload the configuration once the privileges are dropped # but I can not see any way to avoid it if not self.load(): return False try: self.listener = Listener() if self.ip: self.listener.listen(IP.create(self.ip),IP.create('0.0.0.0'),self.port,None,None) self.logger.reactor('Listening for BGP session(s) on %s:%d' % (self.ip,self.port)) for neighbor in self.configuration.neighbors.values(): if neighbor.listen: self.listener.listen(neighbor.md5_ip,neighbor.peer_address,neighbor.listen,neighbor.md5_password,neighbor.ttl_in) self.logger.reactor('Listening for BGP session(s) on %s:%d%s' % (neighbor.md5_ip,neighbor.listen,' with MD5' if neighbor.md5_password else '')) except NetworkError,exc: self.listener = None if os.geteuid() != 0 and self.port <= 1024: self.logger.reactor('Can not bind to %s:%d, you may need to run ExaBGP as root' % (self.ip,self.port),'critical') else: self.logger.reactor('Can not bind to %s:%d (%s)' % (self.ip,self.port,str(exc)),'critical') self.logger.reactor('unset exabgp.tcp.bind if you do not want listen for incoming connections','critical') self.logger.reactor('and check that no other daemon is already binding to port %d' % self.port,'critical') sys.exit(1) if not self.early_drop: self.processes.start() if not self.daemon.drop_privileges(): self.logger.reactor('Could not drop privileges to \'%s\' refusing to run as root' % self.daemon.user,'critical') self.logger.reactor('Set the environmemnt value exabgp.daemon.user to change the unprivileged user','critical') return if self.early_drop: self.processes.start() # This is required to make sure we can write in the log location as we now have dropped root privileges if not self.logger.restart(): self.logger.reactor('Could not setup the logger, aborting','critical') return if not self.daemon.savepid(): self.logger.reactor('could not update PID, not starting','error') # did we complete the run of updates caused by the last SIGUSR1/SIGUSR2 ? reload_completed = True wait = environment.settings().tcp.delay if wait: sleeptime = (wait * 60) - int(time.time()) % (wait * 60) self.logger.reactor('waiting for %d seconds before connecting' % sleeptime) time.sleep(float(sleeptime)) while True: try: while self.peers: start = time.time() end = start+self.max_loop_time if self._shutdown: self._shutdown = False self.shutdown() elif self._reload and reload_completed: self._reload = False self.load() self.processes.start(self._reload_processes) self._reload_processes = False elif self._restart: self._restart = False self.restart() elif self.route_update: self.route_update = False self.route_send() ios = {} keys = set(self.peers.keys()) while start < time.time() < end: for key in list(keys): peer = self.peers[key] action = peer.run() # .run() returns an ACTION enum: # * immediate if it wants to be called again # * later if it should be called again but has no work atm # * close if it is finished and is closing down, or restarting if action == ACTION.CLOSE: self.unschedule(peer) keys.discard(key) # we are loosing this peer, not point to schedule more process work elif action == ACTION.LATER: for io in peer.sockets(): ios[io] = key # no need to come back to it before a a full cycle keys.discard(key) if not self.schedule() and not keys: ready = self.ready(ios.keys() + self.processes.fds(),end-time.time()) for io in ready: if io in ios: keys.add(ios[io]) del ios[io] if not keys: reload_completed = True # RFC state that we MUST not send more than one KEEPALIVE / sec # And doing less could cause the session to drop if self.listener: for connection in self.listener.connected(): # found # * False, not peer found for this TCP connection # * True, peer found # * None, conflict found for this TCP connections found = False for key in self.peers: peer = self.peers[key] neighbor = peer.neighbor # XXX: FIXME: Inet can only be compared to Inet if connection.local == str(neighbor.peer_address) and connection.peer == str(neighbor.local_address): if peer.incoming(connection): found = True break found = None break if found: self.logger.reactor('accepted connection from %s - %s' % (connection.local,connection.peer)) elif found is False: self.logger.reactor('no session configured for %s - %s' % (connection.local,connection.peer)) connection.notification(6,3,'no session configured for the peer') connection.close() elif found is None: self.logger.reactor('connection refused (already connected to the peer) %s - %s' % (connection.local,connection.peer)) connection.notification(6,5,'could not accept the connection') connection.close() self.processes.terminate() self.daemon.removepid() break except KeyboardInterrupt: while True: try: self._shutdown = True self.logger.reactor('^C received') break except KeyboardInterrupt: pass except SystemExit: while True: try: self._shutdown = True self.logger.reactor('exiting') break except KeyboardInterrupt: pass except IOError: while True: try: self._shutdown = True self.logger.reactor('I/O Error received, most likely ^C during IO','warning') break except KeyboardInterrupt: pass except ProcessError: while True: try: self._shutdown = True self.logger.reactor('Problem when sending message(s) to helper program, stopping','error') break except KeyboardInterrupt: pass except select.error: while True: try: self._shutdown = True self.logger.reactor('problem using select, stopping','error') break except __HOLE__: pass # from exabgp.leak import objgraph # print objgraph.show_most_common_types(limit=20) # import random # obj = objgraph.by_type('Route')[random.randint(0,2000)] # objgraph.show_backrefs([obj], max_depth=10)
KeyboardInterrupt
dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/reactor/loop.py/Reactor.run
5,617
def schedule (self): try: # read at least on message per process if there is some and parse it for service,command in self.processes.received(): self.api.text(self,service,command) # if we have nothing to do, return or save the work if not self._running: if not self._pending: return False self._running,name = self._pending.popleft() self.logger.reactor('callback | installing %s' % name) if self._running: # run it try: self.logger.reactor('callback | running') self._running.next() # run # should raise StopIteration in most case # and prevent us to have to run twice to run one command self._running.next() # run except StopIteration: self._running = None self.logger.reactor('callback | removing') return True except StopIteration: pass except __HOLE__: self._shutdown = True self.logger.reactor('^C received','error')
KeyboardInterrupt
dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/reactor/loop.py/Reactor.schedule
5,618
def celery_teardown_request(error=None): if error is not None: _local.queue = [] return try: if queue(): if settings.USE_CELERY: group(queue()).apply_async() else: for task in queue(): task.apply() except __HOLE__: if not settings.DEBUG_MODE: logger.error('Task queue not initialized')
AttributeError
dataset/ETHPy150Open CenterForOpenScience/osf.io/framework/celery_tasks/handlers.py/celery_teardown_request
5,619
def _get_checksum(path): """ Generates a md5 checksum of the file at the specified path. `path` Path to file for checksum. Returns string or ``None`` """ # md5 uses a 512-bit digest blocks, let's scale by defined block_size _md5 = hashlib.md5() chunk_size = 128 * _md5.block_size try: with open(path, 'rb') as _file: for chunk in iter(lambda: _file.read(chunk_size), ''): _md5.update(chunk) return _md5.hexdigest() except __HOLE__: return None
IOError
dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/apps.py/_get_checksum
5,620
def _get_user_processes(): """ Gets process information owned by the current user. Returns generator of tuples: (``psutil.Process`` instance, path). """ uid = os.getuid() for proc in psutil.process_iter(): try: # yield processes that match current user if proc.uids.real == uid: yield (proc, proc.exe) except psutil.AccessDenied: # work around for suid/sguid processes and MacOS X restrictions try: path = common.which(proc.name) # psutil doesn't support MacOS X relative paths, # let's use a workaround to merge working directory with # process relative path if not path and common.IS_MACOSX: cwd = _get_process_cwd(proc.pid) if not cwd: continue path = os.path.join(cwd, proc.cmdline[0]) yield (proc, path) except (psutil.AccessDenied, __HOLE__): pass except psutil.NoSuchProcess: pass
OSError
dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/apps.py/_get_user_processes
5,621
def mkdir_p(path): """Create potentially nested directories as required. Does nothing if the path already exists and is a directory. """ try: os.makedirs(path) except __HOLE__ as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
OSError
dataset/ETHPy150Open radhermit/vimball/vimball/base.py/mkdir_p
5,622
def is_vimball(fd): """Test for vimball archive format compliance. Simple check to see if the first line of the file starts with standard vimball archive header. """ fd.seek(0) try: header = fd.readline() except __HOLE__: # binary files will raise exceptions when trying to decode raw bytes to # str objects in our readline() wrapper return False if re.match('^" Vimball Archiver', header) is not None: return True return False
UnicodeDecodeError
dataset/ETHPy150Open radhermit/vimball/vimball/base.py/is_vimball
5,623
def __del__(self): try: self.fd.close() except __HOLE__: return
AttributeError
dataset/ETHPy150Open radhermit/vimball/vimball/base.py/Vimball.__del__
5,624
@property def files(self): """Yields archive file information.""" # try new file header format first, then fallback on old for header in (r"(.*)\t\[\[\[1\n", r"^(\d+)\n$"): header = re.compile(header) filename = None self.fd.seek(0) line = self.readline() while line: m = header.match(line) if m is not None: filename = m.group(1) try: filelines = int(self.readline().rstrip()) except __HOLE__: raise ArchiveError('invalid archive format') filestart = self.fd.tell() yield (filename, filelines, filestart) line = self.readline() if filename is not None: break
ValueError
dataset/ETHPy150Open radhermit/vimball/vimball/base.py/Vimball.files
5,625
def extract(self, extractdir=None, verbose=False): """Extract archive files to a directory.""" if extractdir is None: filebase, ext = os.path.splitext(self.path) if ext in ('.gz', '.bz2', '.xz'): filebase, _ext = os.path.splitext(filebase) extractdir = os.path.basename(filebase) if os.path.exists(extractdir): tempdir = tempfile.mkdtemp(prefix='vimball-', dir=os.getcwd()) extractdir = os.path.join(tempdir.split('/')[-1], extractdir) self.fd.seek(0) for filename, lines, offset in self.files: filepath = os.path.join(extractdir, filename) try: directory = os.path.dirname(filepath) mkdir_p(directory) except __HOLE__ as e: raise ArchiveError("failed creating directory '{}': {}".format( directory, os.strerror(e.errno))) with open(filepath, 'w') as f: if verbose: print(filepath) self.fd.seek(offset) for i in range(lines): f.write(self.readline())
OSError
dataset/ETHPy150Open radhermit/vimball/vimball/base.py/Vimball.extract
5,626
def doctree_resolved(app, doctree, docname): # replace numfig nodes with links if app.builder.name in ('html', 'singlehtml', 'epub'): env = app.builder.env docname_figs = getattr(env, 'docname_figs', {}) docnames_by_figname = env.docnames_by_figname figids = getattr(env, 'figids', {}) secnums = [] fignames_by_secnum = {} for figdocname, figurelist in env.docname_figs.iteritems(): if figdocname not in env.toc_secnumbers: continue secnum = env.toc_secnumbers[figdocname][''] secnums.append(secnum) fignames_by_secnum[secnum] = figurelist last_secnum = 0 secnums = sorted(secnums) figid = 1 for secnum in secnums: if secnum[0] != last_secnum: figid = 1 for figname, subfigs in fignames_by_secnum[secnum].iteritems(): figids[figname] = str(secnum[0]) + '.' + str(figid) for i, subfigname in enumerate(subfigs): subfigid = figids[figname] + chr(ord('a') + i) figids[subfigname] = subfigid figid += 1 last_secnum = secnum[0] env.figids = figids for figure_info in doctree.traverse(lambda n: isinstance(n, nodes.figure) or \ isinstance(n, subfig.subfigend) or \ isinstance(n, figtable.figtable)): try: id = figure_info['ids'][0] fignum = figids[id] except (__HOLE__, KeyError): continue for cap in figure_info.traverse(nodes.caption): cap.insert(1, nodes.Text(" %s" % cap[0])) if fignum[-1] in map(str, range(10)): boldcaption = "%s %s:" % (app.config.figure_caption_prefix, fignum) else: boldcaption = "(%s)" % fignum[-1] cap[0] = nodes.strong('', boldcaption) for ref_info in doctree.traverse(num_ref): if '#' in ref_info['reftarget']: label, target = ref_info['reftarget'].split('#') labelfmt = label + " %s" else: labelfmt = '%s' target = ref_info['reftarget'] if target not in docnames_by_figname: app.warn('Target figure not found: %s' % target) link = "#%s" % target linktext = target else: target_doc = docnames_by_figname[target] if app.builder.name == 'singlehtml': link = "#%s" % target else: link = "%s#%s"%(app.builder.get_relative_uri(docname, target_doc), target) if target in figids: linktext = labelfmt % figids[target] else: linktext = target html = '<a href="%s">%s</a>' % (link, linktext) ref_info.replace_self(nodes.raw(html, html, format='html'))
IndexError
dataset/ETHPy150Open xraypy/xraylarch/doc/sphinx/ext/numfig.py/doctree_resolved
5,627
@pytest.fixture def NINLayer_c01b(self): try: from lasagne.layers.cuda_convnet import NINLayer_c01b except __HOLE__: pytest.skip("cuda_convnet not available") return NINLayer_c01b
ImportError
dataset/ETHPy150Open Lasagne/Lasagne/lasagne/tests/layers/test_dense.py/TestNINLayer_c01b.NINLayer_c01b
5,628
def RenderAFF4Object(obj, args=None): """Renders given AFF4 object into JSON-friendly data structure.""" args = args or [] cache_key = obj.__class__.__name__ try: candidates = RENDERERS_CACHE[cache_key] except __HOLE__: candidates = [] for candidate in ApiAFF4ObjectRendererBase.classes.values(): if candidate.aff4_type: candidate_class = aff4.AFF4Object.classes[candidate.aff4_type] else: continue if aff4.issubclass(obj.__class__, candidate_class): candidates.append(candidate) if not candidates: raise RuntimeError("No renderer found for object %s." % obj.__class__.__name__) # Ensure that the renderers order is stable. candidates = sorted(candidates, key=lambda cls: cls.__name__) RENDERERS_CACHE[cache_key] = candidates result = {} for candidate in candidates: api_renderer_args = None for arg in args: if candidate.args_type and isinstance(arg, candidate.args_type): api_renderer_args = arg if api_renderer_args is None and candidate.args_type is not None: api_renderer_args = candidate.args_type() api_renderer = candidate() renderer_output = api_renderer.RenderObject(obj, api_renderer_args) for k, v in renderer_output.items(): result[k] = v return result
KeyError
dataset/ETHPy150Open google/grr/grr/gui/api_aff4_object_renderers.py/RenderAFF4Object
5,629
def is_string(var): try: return isinstance(var, basestring) except __HOLE__: return isinstance(var, str)
NameError
dataset/ETHPy150Open joke2k/faker/faker/utils/__init__.py/is_string
5,630
def autotype(s): '''Automatively detect the type (int, float or string) of `s` and convert `s` into it.''' if not isinstance(s, str): return s if s.isdigit(): return int(s) try: return float(s) except __HOLE__: return s
ValueError
dataset/ETHPy150Open moskytw/clime/clime/util.py/autotype
5,631
@register.assignment_tag(name='webpack') def webpack_template_tag(path_to_config): """ A template tag that will output a webpack bundle. Usage: {% load webpack %} {% webpack 'path/to/webpack.config.js' as bundle %} {{ bundle.render_css|safe }} {{ bundle.render_js|safe }} """ # TODO: allow selection of entries # Django's template system silently fails on some exceptions try: return webpack(path_to_config) except (AttributeError, __HOLE__) as e: raise six.reraise(BundlingError, BundlingError(*e.args), sys.exc_info()[2])
ValueError
dataset/ETHPy150Open markfinger/python-webpack/webpack/templatetags/webpack.py/webpack_template_tag
5,632
def main(): sc_prefs = SCPreferences() from optparse import OptionParser parser = OptionParser(__doc__.strip()) parser.add_option('--enable', dest='enable', action="store_true", default=True, help='Enable proxy for the specified protocol' ) parser.add_option('--disable', dest='enable', action='store_false', help='Disable proxy for the specified protocol' ) parser.add_option('--protocol', choices=sc_prefs.proxy_protocols, metavar='PROTOCOL', help='Specify the protocol (%s)' % ", ".join(sc_prefs.proxy_protocols) ) parser.add_option('--server', metavar='SERVER', help="Specify the proxy server's hostname" ) parser.add_option('--port', type='int', metavar='PORT', help="Specify the proxy server's port" ) (options, args) = parser.parse_args() # optparser inexplicably lacks a require option due to extreme # pedanticism but it's not worth switching to argparse: if not options.protocol: print >> sys.stderr, "ERROR: You must specify a protocol to %s" % ("enable" if options.enable else "disable") sys.exit(1) if options.enable and not ( options.server and options.port ): print >> sys.stderr, "ERROR: You must specify a %s proxy server and port" % options.protocol sys.exit(1) if options.server: try: gethostbyname(options.server) except gaierror, exc: print >> sys.stderr, "ERROR: couldn't resolve server hostname %s: %s" % (options.server, exc.args[1]) # e.message is broken in the standard socket.gaierror! sys.exit(1) try: sc_prefs.set_proxy(enable=options.enable, protocol=options.protocol, server=options.server, port=options.port) sc_prefs.save() except __HOLE__, exc: print >> sys.stderr, exc.message
RuntimeError
dataset/ETHPy150Open MacSysadmin/pymacadmin/bin/set-proxy.py/main
5,633
def add_prefix(self): """ Add prefix according to the specification. The following keys can be used: vrf ID of VRF to place the prefix in prefix the prefix to add if already known family address family (4 or 6) description A short description expires Expiry time of assignment comment Longer comment node Hostname of node type Type of prefix; reservation, assignment, host status Status of prefix; assigned, reserved, quarantine pool ID of pool country Country where the prefix is used order_id Order identifier customer_id Customer identifier vlan VLAN ID alarm_priority Alarm priority of prefix monitor If the prefix should be monitored or not from-prefix A prefix the prefix is to be allocated from from-pool A pool (ID) the prefix is to be allocated from prefix_length Prefix length of allocated prefix """ p = Prefix() # Sanitize input parameters if 'vrf' in request.params: try: if request.params['vrf'] is None or len(request.params['vrf']) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.params['vrf'])) except __HOLE__: return json.dumps({'error': 1, 'message': "Invalid VRF ID '%s'" % request.params['vrf']}) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) if 'description' in request.params: if request.params['description'].strip() != '': p.description = request.params['description'].strip() if 'expires' in request.params: if request.params['expires'].strip() != '': p.expires = request.params['expires'].strip(' "') if 'comment' in request.params: if request.params['comment'].strip() != '': p.comment = request.params['comment'].strip() if 'node' in request.params: if request.params['node'].strip() != '': p.node = request.params['node'].strip() if 'status' in request.params: p.status = request.params['status'].strip() if 'type' in request.params: p.type = request.params['type'].strip() if 'pool' in request.params: if request.params['pool'].strip() != '': try: p.pool = Pool.get(int(request.params['pool'])) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) if 'country' in request.params: if request.params['country'].strip() != '': p.country = request.params['country'].strip() if 'order_id' in request.params: if request.params['order_id'].strip() != '': p.order_id = request.params['order_id'].strip() if 'customer_id' in request.params: if request.params['customer_id'].strip() != '': p.customer_id = request.params['customer_id'].strip() if 'alarm_priority' in request.params: p.alarm_priority = request.params['alarm_priority'].strip() if 'monitor' in request.params: if request.params['monitor'] == 'true': p.monitor = True else: p.monitor = False if 'vlan' in request.params: if request.params['vlan'].strip() != '': p.vlan = request.params['vlan'] if 'tags' in request.params: p.tags = json.loads(request.params['tags']) if 'avps' in request.params: p.avps = json.loads(request.params['avps']) # arguments args = {} if 'from_prefix[]' in request.params: args['from-prefix'] = request.params.getall('from_prefix[]') if 'from_pool' in request.params: try: args['from-pool'] = Pool.get(int(request.params['from_pool'])) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) if 'family' in request.params: args['family'] = request.params['family'] if 'prefix_length' in request.params: args['prefix_length'] = request.params['prefix_length'] # manual allocation? if args == {}: if 'prefix' in request.params: p.prefix = request.params['prefix'] try: p.save(args) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) return json.dumps(p, cls=NipapJSONEncoder)
ValueError
dataset/ETHPy150Open SpriteLink/NIPAP/nipap-www/nipapwww/controllers/xhr.py/XhrController.add_prefix
5,634
def edit_prefix(self, id): """ Edit a prefix. """ try: p = Prefix.get(int(id)) # extract attributes if 'prefix' in request.params: p.prefix = request.params['prefix'] if 'type' in request.params: p.type = request.params['type'].strip() if 'description' in request.params: if request.params['description'].strip() == '': p.description = None else: p.description = request.params['description'].strip() if 'expires' in request.params: if request.params['expires'].strip() == '': p.expires = None else: p.expires = request.params['expires'].strip(' "') if 'comment' in request.params: if request.params['comment'].strip() == '': p.comment = None else: p.comment = request.params['comment'].strip() if 'node' in request.params: if request.params['node'].strip() == '': p.node = None else: p.node = request.params['node'].strip() if 'status' in request.params: p.status = request.params['status'].strip() if 'pool' in request.params: if request.params['pool'].strip() == '': p.pool = None else: try: p.pool = Pool.get(int(request.params['pool'])) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) if 'alarm_priority' in request.params: p.alarm_priority = request.params['alarm_priority'].strip() if 'monitor' in request.params: if request.params['monitor'] == 'true': p.monitor = True else: p.monitor = False if 'country' in request.params: if request.params['country'].strip() == '': p.country = None else: p.country = request.params['country'].strip() if 'order_id' in request.params: if request.params['order_id'].strip() == '': p.order_id = None else: p.order_id = request.params['order_id'].strip() if 'customer_id' in request.params: if request.params['customer_id'].strip() == '': p.customer_id = None else: p.customer_id = request.params['customer_id'].strip() if 'vrf' in request.params: try: if request.params['vrf'] is None or len(request.params['vrf']) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.params['vrf'])) except __HOLE__: return json.dumps({'error': 1, 'message': "Invalid VRF ID '%s'" % request.params['vrf']}) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) if 'vlan' in request.params: if request.params['vlan'].strip() != '': p.vlan = request.params['vlan'] if 'tags' in request.params: p.tags = json.loads(request.params['tags']) if 'avps' in request.params: p.avps = json.loads(request.params['avps']) p.save() except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) return json.dumps(p, cls=NipapJSONEncoder)
ValueError
dataset/ETHPy150Open SpriteLink/NIPAP/nipap-www/nipapwww/controllers/xhr.py/XhrController.edit_prefix
5,635
def test(webapp): try: Root().register(webapp) urlopen(webapp.server.http.base) except __HOLE__ as e: assert e.code == 500 else: assert False
HTTPError
dataset/ETHPy150Open circuits/circuits/tests/web/test_request_failure.py/test
5,636
def _get_module_name(self, options): try: module_name = options.module_name except __HOLE__: # pragma: no cover module_name = options.model_name return module_name
AttributeError
dataset/ETHPy150Open ellmetha/django-machina/tests/functional/admin/test_forum.py/TestForumAdmin._get_module_name
5,637
def emit(self, record): try: if getattr(record, 'append', False): if self.appending: self.stream.write(record.getMessage()) else: self.stream.write(self.format(record)) self.appending = True else: terminator = getattr(self, 'terminator', '\n') if self.appending: self.stream.write(terminator) self.stream.write(self.format(record)) self.stream.write(terminator) self.appending = False self.flush() except (__HOLE__, SystemExit): raise except Exception: self.handleError(record)
KeyboardInterrupt
dataset/ETHPy150Open boto/requestbuilder/requestbuilder/logging.py/ProgressiveStreamHandler.emit
5,638
def configure_root_logger(use_color=False): logfmt = '%(asctime)s %(levelname)-7s %(name)s %(message)s' rootlogger = logging.getLogger('') handler = ProgressiveStreamHandler() if use_color: formatter = ColoringFormatter(logfmt) else: formatter = logging.Formatter(logfmt) handler.setFormatter(formatter) rootlogger.addHandler(handler) rootlogger.setLevel(100) # Attempt to have logging capture warnings as well (requires 2.7) try: logging.captureWarnings(True) except __HOLE__: pass
AttributeError
dataset/ETHPy150Open boto/requestbuilder/requestbuilder/logging.py/configure_root_logger
5,639
def stop(so, out, err): basedir = so["basedir"] dbfile = os.path.join(basedir, "petmail.db") if not (os.path.isdir(basedir) and os.path.exists(dbfile)): print >>err, "'%s' doesn't look like a Petmail basedir, quitting" % basedir return 1 print >>out, "STOPPING", basedir pidfile = os.path.join(basedir, "twistd.pid") if not os.path.exists(pidfile): print >>err, "%s does not look like a running node directory (no twistd.pid)" % basedir # we define rc=2 to mean "nothing is running, but it wasn't me who # stopped it" return 2 pid = open(pidfile, "r").read() pid = int(pid) # kill it hard (SIGKILL), delete the twistd.pid file, then wait for the # process itself to go away. If it hasn't gone away after 20 seconds, warn # the user but keep waiting until they give up. try: os.kill(pid, signal.SIGKILL) except OSError, oserr: if oserr.errno == 3: print >>err, oserr.strerror # the process didn't exist, so wipe the pid file os.remove(pidfile) return 2 else: raise try: os.remove(pidfile) except EnvironmentError: pass start = time.time() time.sleep(0.1) wait = 40 first_time = True while True: # poll once per second until we see the process is no longer running try: os.kill(pid, 0) except __HOLE__: print >>out, "process %d is dead" % pid return wait -= 1 if wait < 0: if first_time: print >>err, ("It looks like pid %d is still running " "after %d seconds" % (pid, (time.time() - start))) print >>err, "I will keep watching it until you interrupt me." wait = 10 first_time = False else: print >>err, "pid %d still running after %d seconds" % \ (pid, (time.time() - start)) wait = 10 time.sleep(1) # we define rc=1 to mean "I think something is still running, sorry" return 1
OSError
dataset/ETHPy150Open warner/petmail/petmail/scripts/startstop.py/stop
5,640
def _cursor(self): settings_dict = self.settings_dict if self.connection is None or connection_pools[self.alias]['settings'] != settings_dict: # Is this the initial use of the global connection_pools dictionary for # this python interpreter? Build a ThreadedConnectionPool instance and # add it to the dictionary if so. if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != settings_dict: if not settings_dict['NAME']: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") conn_params = { 'database': settings_dict['NAME'], } conn_params.update(settings_dict['OPTIONS']) for extra in ['autocommit'] + pool_config_defaults.keys(): if extra in conn_params: del conn_params[extra] if settings_dict['USER']: conn_params['user'] = settings_dict['USER'] if settings_dict['PASSWORD']: conn_params['password'] = force_str(settings_dict['PASSWORD']) if settings_dict['HOST']: conn_params['host'] = settings_dict['HOST'] if settings_dict['PORT']: conn_params['port'] = settings_dict['PORT'] self.create_connection_pool(conn_params) self.connection = PooledConnection(connection_pools[self.alias]['pool'], test_query=self._test_on_borrow_query) self.connection.set_client_encoding('UTF8') tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE') if tz: try: get_parameter_status = self.connection.get_parameter_status except __HOLE__: # psycopg2 < 2.0.12 doesn't have get_parameter_status conn_tz = None else: conn_tz = get_parameter_status('TimeZone') if conn_tz != tz: # Set the time zone in autocommit mode (see #17062) self.connection.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self.connection.cursor().execute( self.ops.set_time_zone_sql(), [tz]) self.connection.set_isolation_level(self.isolation_level) self._get_pg_version() connection_created.send(sender=self.__class__, connection=self) cursor = self.connection.cursor() cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None return CursorWrapper(cursor)
AttributeError
dataset/ETHPy150Open gmcguire/django-db-pool/dbpool/db/backends/postgresql_psycopg2/base.py/DatabaseWrapper14and15._cursor
5,641
def parse_address(rest): if rest.startswith('['): # remove first [] for ip rest = rest.replace('[', '', 1).replace(']', '', 1) pos = 0 while (pos < len(rest) and not (rest[pos] == 'R' or rest[pos] == '/')): pos += 1 address = rest[:pos] rest = rest[pos:] port_start = address.rfind(':') if port_start == -1: raise ValueError('Invalid port in add value') ip = address[:port_start] try: port = int(address[(port_start + 1):]) except (__HOLE__, ValueError): raise ValueError( 'Invalid port %s in add value' % address[port_start:]) # if this is an ipv6 address then we want to convert it # to all lowercase and use its fully expanded representation # to make searches easier ip = validate_and_normalize_ip(ip) return (ip, port, rest)
TypeError
dataset/ETHPy150Open openstack/swift/swift/common/ring/utils.py/parse_address
5,642
def to_pydot(N, strict=True): """Return a pydot graph from a NetworkX graph N. Parameters ---------- N : NetworkX graph A graph created with NetworkX Examples -------- >>> K5 = nx.complete_graph(5) >>> P = nx.nx_pydot.to_pydot(K5) Notes ----- """ import pydotplus # set Graphviz graph type if N.is_directed(): graph_type='digraph' else: graph_type='graph' strict=N.number_of_selfloops()==0 and not N.is_multigraph() name = N.name graph_defaults=N.graph.get('graph',{}) if name is '': P = pydotplus.Dot('', graph_type=graph_type, strict=strict, **graph_defaults) else: P = pydotplus.Dot('"%s"'%name, graph_type=graph_type, strict=strict, **graph_defaults) try: P.set_node_defaults(**N.graph['node']) except __HOLE__: pass try: P.set_edge_defaults(**N.graph['edge']) except KeyError: pass for n,nodedata in N.nodes(data=True): str_nodedata=dict((k,make_str(v)) for k,v in nodedata.items()) p=pydotplus.Node(make_str(n),**str_nodedata) P.add_node(p) if N.is_multigraph(): for u,v,key,edgedata in N.edges(data=True,keys=True): str_edgedata=dict((k,make_str(v)) for k,v in edgedata.items() if k != 'key') edge=pydotplus.Edge(make_str(u), make_str(v), key=make_str(key), **str_edgedata) P.add_edge(edge) else: for u,v,edgedata in N.edges(data=True): str_edgedata=dict((k,make_str(v)) for k,v in edgedata.items()) edge=pydotplus.Edge(make_str(u),make_str(v),**str_edgedata) P.add_edge(edge) return P
KeyError
dataset/ETHPy150Open networkx/networkx/networkx/drawing/nx_pydot.py/to_pydot
5,643
def setup_module(module): from nose import SkipTest try: import pydotplus except __HOLE__: raise SkipTest("pydotplus not available")
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/drawing/nx_pydot.py/setup_module
5,644
def parse (self, vstring): # I've given up on thinking I can reconstruct the version string # from the parsed tuple -- so I just store the string here for # use by __str__ self.vstring = vstring components = filter(lambda x: x and x != '.', self.component_re.split(vstring)) for i in range(len(components)): try: components[i] = int(components[i]) except __HOLE__: pass self.version = components
ValueError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/distutils/version.py/LooseVersion.parse
5,645
def include(self,tokens): # Try to extract the filename and then process an include file if not tokens: return if tokens: if tokens[0].value != '<' and tokens[0].type != self.t_STRING: tokens = self.expand_macros(tokens) if tokens[0].value == '<': # Include <...> i = 1 while i < len(tokens): if tokens[i].value == '>': break i += 1 else: print "Malformed #include <...>" return filename = "".join([x.value for x in tokens[1:i]]) path = self.path + [""] + self.temp_path elif tokens[0].type == self.t_STRING: filename = tokens[0].value[1:-1] path = self.temp_path + [""] + self.path else: print "Malformed #include statement" return for p in path: iname = os.path.join(p,filename) try: data = open(iname,"r").read() dname = os.path.dirname(iname) if dname: self.temp_path.insert(0,dname) for tok in self.parsegen(data,filename): yield tok if dname: del self.temp_path[0] break except __HOLE__,e: pass else: print "Couldn't find '%s'" % filename # ---------------------------------------------------------------------- # define() # # Define a new macro # ----------------------------------------------------------------------
IOError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/util/external/ply/cpp.py/Preprocessor.include
5,646
def token(self): try: while True: tok = self.parser.next() if tok.type not in self.ignore: return tok except __HOLE__: self.parser = None return None
StopIteration
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/util/external/ply/cpp.py/Preprocessor.token
5,647
def start(self): """Start collecting trace information.""" if self._collectors: self._collectors[-1].pause() self._collectors.append(self) #print("Started: %r" % self._collectors, file=sys.stderr) # Check to see whether we had a fullcoverage tracer installed. traces0 = [] if hasattr(sys, "gettrace"): fn0 = sys.gettrace() if fn0: tracer0 = getattr(fn0, '__self__', None) if tracer0: traces0 = getattr(tracer0, 'traces', []) # Install the tracer on this thread. fn = self._start_tracer() for args in traces0: (frame, event, arg), lineno = args try: fn(frame, event, arg, lineno=lineno) except __HOLE__: raise Exception( "fullcoverage must be run with the C trace function." ) # Install our installation tracer in threading, to jump start other # threads. threading.settrace(self._installation_trace)
TypeError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/coverage/collector.py/Collector.start
5,648
def BuildTable(self, start_row, end_row, request): """Builds table of ClientCrash'es.""" crashes_urn = str(self.state.get("crashes_urn") or request.REQ.get("crashes_urn")) try: collection = aff4.FACTORY.Open(crashes_urn, aff4_type="PackedVersionedCollection", token=request.token) except __HOLE__: return for row_index, value in enumerate(collection): if row_index < start_row: continue row_index += 1 if row_index > end_row: # Indicate that there are more rows. return True self.AddCell(row_index, "Client Id", value.client_id) self.AddCell(row_index, "Crash Details", value)
IOError
dataset/ETHPy150Open google/grr/grr/gui/plugins/crash_view.py/ClientCrashCollectionRenderer.BuildTable
5,649
def _get_failed_targets(self, tests_and_targets): """Return a mapping of target -> set of individual test cases that failed. Targets with no failed tests are omitted. Analyzes JUnit XML files to figure out which test had failed. The individual test cases are formatted strings of the form org.foo.bar.classname#methodName. :tests_and_targets: {test: target} mapping. """ def get_test_filename(test_class_name): return os.path.join(self.workdir, 'TEST-{0}.xml'.format(test_class_name.replace('$', '-'))) xml_filenames_to_targets = defaultdict() for test, target in tests_and_targets.items(): if target is None: self.context.log.warn('Unknown target for test %{0}'.format(test)) # Look for a TEST-*.xml file that matches the classname or a containing classname test_class_name = test for _part in test.split('$'): filename = get_test_filename(test_class_name) if os.path.exists(filename): xml_filenames_to_targets[filename] = target break else: test_class_name = test_class_name.rsplit('$', 1)[0] failed_targets = defaultdict(set) for xml_filename, target in xml_filenames_to_targets.items(): try: xml = XmlParser.from_file(xml_filename) failures = int(xml.get_attribute('testsuite', 'failures')) errors = int(xml.get_attribute('testsuite', 'errors')) if target and (failures or errors): for testcase in xml.parsed.getElementsByTagName('testcase'): test_failed = testcase.getElementsByTagName('failure') test_errored = testcase.getElementsByTagName('error') if test_failed or test_errored: failed_targets[target].add('{testclass}#{testname}'.format( testclass=testcase.getAttribute('classname'), testname=testcase.getAttribute('name'), )) except (XmlParser.XmlError, __HOLE__) as e: self.context.log.error('Error parsing test result file {0}: {1}'.format(xml_filename, e)) return dict(failed_targets)
ValueError
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/backend/jvm/tasks/junit_run.py/JUnitRun._get_failed_targets
5,650
def __new__(cls, obj, sentinel=''): try: iterator = iter(obj) except __HOLE__: return IterI(obj, sentinel) return IterO(iterator, sentinel)
TypeError
dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/contrib/iterio.py/IterIO.__new__
5,651
def seek(self, pos, mode=0): if self.closed: raise ValueError('I/O operation on closed file') if mode == 1: pos += self.pos elif mode == 2: self.read() self.pos = min(self.pos, self.pos + pos) return elif mode != 0: raise IOError('Invalid argument') buf = [] try: tmp_end_pos = len(self._buf) while pos > tmp_end_pos: item = self._gen.next() tmp_end_pos += len(item) buf.append(item) except __HOLE__: pass if buf: self._buf_append(_mixed_join(buf, self.sentinel)) self.pos = max(0, pos)
StopIteration
dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/contrib/iterio.py/IterO.seek
5,652
def read(self, n=-1): if self.closed: raise ValueError('I/O operation on closed file') if n < 0: self._buf_append(_mixed_join(self._gen, self.sentinel)) result = self._buf[self.pos:] self.pos += len(result) return result new_pos = self.pos + n buf = [] try: tmp_end_pos = 0 if self._buf is None else len(self._buf) while new_pos > tmp_end_pos or (self._buf is None and not buf): item = next(self._gen) tmp_end_pos += len(item) buf.append(item) except __HOLE__: pass if buf: self._buf_append(_mixed_join(buf, self.sentinel)) if self._buf is None: return self.sentinel new_pos = max(0, new_pos) try: return self._buf[self.pos:new_pos] finally: self.pos = min(new_pos, len(self._buf))
StopIteration
dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/contrib/iterio.py/IterO.read
5,653
def readline(self, length=None): if self.closed: raise ValueError('I/O operation on closed file') nl_pos = -1 if self._buf: nl_pos = self._buf.find(_newline(self._buf), self.pos) buf = [] try: pos = self.pos while nl_pos < 0: item = next(self._gen) local_pos = item.find(_newline(item)) buf.append(item) if local_pos >= 0: nl_pos = pos + local_pos break pos += len(item) except __HOLE__: pass if buf: self._buf_append(_mixed_join(buf, self.sentinel)) if self._buf is None: return self.sentinel if nl_pos < 0: new_pos = len(self._buf) else: new_pos = nl_pos + 1 if length is not None and self.pos + length < new_pos: new_pos = self.pos + length try: return self._buf[self.pos:new_pos] finally: self.pos = min(new_pos, len(self._buf))
StopIteration
dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/contrib/iterio.py/IterO.readline
5,654
@classmethod def _validate(cls, value): if not isinstance(value, basestring): raise ValueError(value) try: return unicode(value) except __HOLE__: return unicode(value, 'utf-8')
UnicodeDecodeError
dataset/ETHPy150Open stepank/pyws/src/pyws/functions/args/types/simple.py/String._validate
5,655
@classmethod def _validate(cls, value): value, offset = cls.get_offset(value) tz = cls.get_tzinfo(offset) try: return cls._parse(value).replace(tzinfo=tz) except __HOLE__: mo = re.search('\\.\d+$', value) if not mo: raise ms = mo.group(0) value = value.replace(ms, '') return cls._parse(value, cls.format).\ replace(tzinfo=tz, microsecond=int(ms[1:].ljust(6, '0')))
ValueError
dataset/ETHPy150Open stepank/pyws/src/pyws/functions/args/types/simple.py/DateTime._validate
5,656
def run_only_if_boto_is_available(func): try: import boto except __HOLE__: boto = None pred = lambda: boto is not None return run_only(func, pred)
ImportError
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/elb/test/testelb.py/run_only_if_boto_is_available
5,657
def __getitem__(self, index): """ Retrieve a specific `BoundColumn` object. *index* can either be 0-indexed or the name of a column .. code-block:: python columns['speed'] # returns a bound column with name 'speed' columns[0] # returns the first column """ if isinstance(index, int): try: return next(islice(self.iterall(), index, index + 1)) except __HOLE__: raise IndexError elif isinstance(index, six.string_types): for column in self.iterall(): if column.name == index: return column raise KeyError("Column with name '%s' does not exist; " "choices are: %s" % (index, self.names())) else: raise TypeError('Column indices must be integers or str, not %s' % type(index).__name__)
StopIteration
dataset/ETHPy150Open bradleyayers/django-tables2/django_tables2/columns/base.py/BoundColumns.__getitem__
5,658
@classmethod def read(cls, reader, dump=None): code = reader.read_u1() # Create an index of all known opcodes. if Opcode.opcodes is None: Opcode.opcodes = {} for name in globals(): klass = globals()[name] try: if name != 'Opcode' and issubclass(klass, Opcode): Opcode.opcodes[klass.code] = klass except __HOLE__: pass instance = Opcode.opcodes[code].read_extra(reader, dump) if dump: reader.debug(" " * dump, '%3d: %s' % (reader.offset, instance)) return instance
TypeError
dataset/ETHPy150Open pybee/voc/voc/java/opcodes.py/Opcode.read
5,659
def tearDown(self): try: os.unlink(self.fname1) except OSError: pass try: os.unlink(self.fname2) except __HOLE__: pass
OSError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_binhex.py/BinHexTestCase.tearDown
5,660
@classmethod def GetFilter(cls, filter_name): """Return an initialized filter. Only initialize filters once. Args: filter_name: The name of the filter, as a string. Returns: an initialized instance of the filter. Raises: DefinitionError if the type of filter has not been defined. """ # Check if the filter is defined in the registry. try: filt_cls = cls.GetPlugin(filter_name) except __HOLE__: raise DefinitionError("Filter %s does not exist." % filter_name) # Return an initialized filter, after initializing it in cls.filters if it # doesn't exist. return cls.filters.setdefault(filter_name, filt_cls())
KeyError
dataset/ETHPy150Open google/grr/grr/lib/checks/filters.py/Filter.GetFilter
5,661
def Validate(self, expression): """Validates that a parsed rule entry is valid for fschecker. Args: expression: A rule expression. Raises: DefinitionError: If the filter definition could not be validated. Returns: True if the expression validated OK. """ parsed = self._Load(expression) if not parsed: raise DefinitionError("Empty StatFilter expression.") bad_keys = set(parsed) - self._KEYS if bad_keys: raise DefinitionError("Invalid parameters: %s" % ",".join(bad_keys)) if self.cfg.mask and not self.cfg.mode: raise DefinitionError("mode can only be set when mask is also defined.") if self.cfg.mask: if len(self.cfg.mask) > 1: raise DefinitionError("Too many mask values defined.") if not self._PERM_RE.match(self.cfg.mask[0]): raise DefinitionError("mask=%s is not octal, e.g. 0600" % self.cfg.mask) if self.cfg.mode: if len(self.cfg.mode) > 1: raise DefinitionError("Too many mode values defined.") if not self._PERM_RE.match(self.cfg.mode[0]): raise DefinitionError("mode=%s is not octal, e.g. 0600" % self.cfg.mode) if self.cfg.gid: for gid in self.cfg.gid: matched = self._UID_GID_RE.match(gid) if not matched: raise DefinitionError("gid: %s is not an integer preceded by " "!, >, < or =." % gid) if self.cfg.uid: for uid in self.cfg.uid: matched = self._UID_GID_RE.match(uid) if not matched: raise DefinitionError("uid: %s is not an integer preceded by " "!, >, < or =." % uid) if self.cfg.file_re: if len(self.cfg.file_re) > 1: raise DefinitionError( "Too many regexes defined: %s" % self.cfg.file_re) try: self.file_re = re.compile(self.cfg.file_re[0]) except (re.error, TypeError) as e: raise DefinitionError("Invalid file regex: %s" % e) if self.cfg.path_re: if len(self.cfg.path_re) > 1: raise DefinitionError( "Too many regexes defined: %s" % self.cfg.path_re) try: self.path_re = re.compile(self.cfg.path_re[0]) except (re.error, __HOLE__) as e: raise DefinitionError("Invalid path regex: %s" % e) if self.cfg.file_type: if len(self.cfg.file_type) > 1: raise DefinitionError( "Too many file types defined: %s" % self.cfg.file_type) file_type = self.cfg.file_type[0].upper() if file_type not in self._TYPES: raise DefinitionError("Unsupported file type %s" % file_type) self._Initialize() if not self.matchers: raise DefinitionError("StatFilter has no actions: %s" % expression) return True
TypeError
dataset/ETHPy150Open google/grr/grr/lib/checks/filters.py/StatFilter.Validate
5,662
def py_encode_basestring_ascii(s): if isinstance(s, str) and HAS_UTF8.search(s) is not None: s = s.decode('utf-8') def replace(match): s = match.group(0) try: return ESCAPE_DCT[s] except __HOLE__: n = ord(s) if n < 0x10000: return '\\u{0:04x}'.format(n) else: # surrogate pair n -= 0x10000 s1 = 0xd800 | ((n >> 10) & 0x3ff) s2 = 0xdc00 | (n & 0x3ff) return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/json/encoder.py/py_encode_basestring_ascii
5,663
def get_installed_libraries(): """ Return the built-in template tag libraries and those from installed applications. Libraries are stored in a dictionary where keys are the individual module names, not the full module paths. Example: django.templatetags.i18n is stored as i18n. """ libraries = {} candidates = ['django.templatetags'] candidates.extend( '%s.templatetags' % app_config.name for app_config in apps.get_app_configs()) for candidate in candidates: try: pkg = import_module(candidate) except __HOLE__: # No templatetags package defined. This is safe to ignore. continue if hasattr(pkg, '__path__'): for name in get_package_libraries(pkg): libraries[name[len(candidate) + 1:]] = name return libraries
ImportError
dataset/ETHPy150Open django/django/django/template/backends/django.py/get_installed_libraries
5,664
def get_package_libraries(pkg): """ Recursively yield template tag libraries defined in submodules of a package. """ for entry in walk_packages(pkg.__path__, pkg.__name__ + '.'): try: module = import_module(entry[1]) except __HOLE__ as e: raise InvalidTemplateLibrary( "Invalid template library specified. ImportError raised when " "trying to load '%s': %s" % (entry[1], e) ) if hasattr(module, 'register'): yield entry[1]
ImportError
dataset/ETHPy150Open django/django/django/template/backends/django.py/get_package_libraries
5,665
def on_query_completions(self, view, prefix, locations): results = [] try: pos = locations[0] env = getenv() p = openProcess(["gocode", "-sock=tcp", "-addr=localhost:37777", "-f=json", "autocomplete", view.file_name().encode('utf-8'), str(pos)], env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) src = view.substr(sublime.Region(0, view.size())) stdout, stderr = p.communicate(input=src.encode()) if p.returncode != 0: err = stderr.decode('utf-8') print("gocode failed: %s" % (err)) return [] try: j = json.loads(stdout.decode("utf8")) except __HOLE__: print("Not valid JSON") return [] if len(j) == 2: for r in j[1]: #results.append(['trigger': r['name'], 'contents': r['name']]) results.append([ r['name'], r['name']]) except Exception as e: print(e) return results
ValueError
dataset/ETHPy150Open newhook/gomode/gomode.py/GoModeAutocomplete.on_query_completions
5,666
def skip_if_not_available(modules=None, datasets=None, configurations=None): """Raises a SkipTest exception when requirements are not met. Parameters ---------- modules : list A list of strings of module names. If one of the modules fails to import, the test will be skipped. datasets : list A list of strings of folder names. If the data path is not configured, or the folder does not exist, the test is skipped. configurations : list A list of of strings of configuration names. If this configuration is not set and does not have a default, the test will be skipped. """ if modules is None: modules = [] if datasets is None: datasets = [] if configurations is None: configurations = [] for module in modules: try: import_module(module) except Exception: raise SkipTest if datasets and not hasattr(config, 'data_path'): raise SkipTest for dataset in datasets: try: find_in_data_path(dataset) except __HOLE__: raise SkipTest for configuration in configurations: if not hasattr(config, configuration): raise SkipTest
IOError
dataset/ETHPy150Open mila-udem/fuel/tests/__init__.py/skip_if_not_available
5,667
def __new__(cls, library_path=''): if library_path == '': errs = [] for path in cls.get_library_paths(): try: return cls(path) except __HOLE__ as e: logger.debug('Could not open VISA library %s: %s', path, str(e)) errs.append(str(e)) except Exception as e: errs.append(str(e)) else: raise OSError('Could not open VISA library:\n' + '\n'.join(errs)) if (cls, library_path) in cls._registry: return cls._registry[(cls, library_path)] obj = super(VisaLibraryBase, cls).__new__(cls) obj.library_path = library_path obj._logging_extra = {'library_path': obj.library_path} obj._init() # Create instance specific registries. #: Error codes on which to issue a warning. obj.issue_warning_on = set(errors.default_warnings) obj._last_status_in_session = dict() obj._ignore_warning_in_session = defaultdict(set) obj.handlers = defaultdict(list) logger.debug('Created library wrapper for %s', library_path) cls._registry[(cls, library_path)] = obj return obj
OSError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/VisaLibraryBase.__new__
5,668
def get_last_status_in_session(self, session): """Last status in session. Helper function to be called by resources properties. """ try: return self._last_status_in_session[session] except __HOLE__: raise errors.Error('The session %r does not seem to be valid as it does not have any last status' % session)
KeyError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/VisaLibraryBase.get_last_status_in_session
5,669
def install_visa_handler(self, session, event_type, handler, user_handle=None): """Installs handlers for event callbacks. :param session: Unique logical identifier to a session. :param event_type: Logical event identifier. :param handler: Interpreted as a valid reference to a handler to be installed by a client application. :param user_handle: A value specified by an application that can be used for identifying handlers uniquely for an event type. :returns: user handle (a ctypes object) """ try: new_handler = self.install_handler(session, event_type, handler, user_handle) except __HOLE__ as e: raise errors.VisaTypeError(str(e)) self.handlers[session].append(new_handler + (event_type,)) return new_handler[1]
TypeError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/VisaLibraryBase.install_visa_handler
5,670
def parse_resource_extended(self, session, resource_name): """Parse a resource string to get extended interface information. Corresponds to viParseRsrcEx function of the VISA library. :param session: Resource Manager session (should always be the Default Resource Manager for VISA returned from open_default_resource_manager()). :param resource_name: Unique symbolic name of a resource. :return: Resource information, return value of the library call. :rtype: :class:`pyvisa.highlevel.ResourceInfo`, :class:`pyvisa.constants.StatusCode` """ try: parsed = rname.parse_resource_name(resource_name) return (ResourceInfo(parsed.interface_type_const, parsed.board, parsed.resource_class, None, None), constants.StatusCode.success) except __HOLE__: return 0, constants.StatusCode.error_invalid_resource_name
ValueError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/VisaLibraryBase.parse_resource_extended
5,671
def get_wrapper_class(backend_name): """Return the WRAPPER_CLASS for a given backend. :rtype: pyvisa.highlevel.VisaLibraryBase """ try: return _WRAPPERS[backend_name] except __HOLE__: if backend_name == 'ni': from .ctwrapper import NIVisaLibrary _WRAPPERS['ni'] = NIVisaLibrary return NIVisaLibrary for pkgname in list_backends(): if pkgname.endswith('-' + backend_name): pkg = __import__(pkgname) _WRAPPERS[backend_name] = cls = pkg.WRAPPER_CLASS return cls else: raise ValueError('Wrapper not found: No package named pyvisa-%s' % backend_name)
KeyError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/get_wrapper_class
5,672
def open_visa_library(specification): """Helper function to create a VISA library wrapper. In general, you should not use the function directly. The VISA library wrapper will be created automatically when you create a ResourceManager object. """ try: argument, wrapper = specification.split('@') except __HOLE__: argument = specification wrapper = 'ni' cls = get_wrapper_class(wrapper) try: return cls(argument) except Exception as e: logger.debug('Could not open VISA wrapper %s: %s\n%s', cls, str(argument), e) raise
ValueError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/open_visa_library
5,673
def open_resource(self, resource_name, access_mode=constants.AccessModes.no_lock, open_timeout=constants.VI_TMO_IMMEDIATE, resource_pyclass=None, **kwargs): """Return an instrument for the resource name. :param resource_name: name or alias of the resource to open. :param access_mode: access mode. :type access_mode: :class:`pyvisa.constants.AccessModes` :param open_timeout: time out to open. :param resource_pyclass: resource python class to use to instantiate the Resource. Defaults to None: select based on the resource name. :param kwargs: keyword arguments to be used to change instrument attributes after construction. :rtype: :class:`pyvisa.resources.Resource` """ if resource_pyclass is None: info = self.resource_info(resource_name, extended=True) try: resource_pyclass = self._resource_classes[(info.interface_type, info.resource_class)] except KeyError: resource_pyclass = self._resource_classes[(constants.InterfaceType.unknown, '')] logger.warning('There is no class defined for %r. Using Resource', (info.interface_type, info.resource_class)) res = resource_pyclass(self, resource_name) for key in kwargs.keys(): try: getattr(res, key) present = True except __HOLE__: present = False except errors.InvalidSession: present = True if not present: raise ValueError('%r is not a valid attribute for type %s' % (key, res.__class__.__name__)) res.open(access_mode, open_timeout) for key, value in kwargs.items(): setattr(res, key, value) return res #: For backwards compatibility
AttributeError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/highlevel.py/ResourceManager.open_resource
5,674
def script_paths(self, script_name): """Returns the sys.path prefix appropriate for this script. Args: script_name: the basename of the script, for example 'appcfg.py'. """ try: return self._script_to_paths[script_name] except __HOLE__: raise KeyError('Script name %s not recognized' % script_name)
KeyError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/wrapper_util.py/Paths.script_paths
5,675
def test_makepyfile_unicode(testdir): global unichr try: unichr(65) except __HOLE__: unichr = chr testdir.makepyfile(unichr(0xfffd))
NameError
dataset/ETHPy150Open pytest-dev/pytest/testing/test_pytester.py/test_makepyfile_unicode
5,676
@register.tag(name='change_currency') def change_currency(parser, token): try: tag_name, current_price, new_currency = token.split_contents() except __HOLE__: raise template.TemplateSyntaxError, \ '%r tag requires exactly two arguments' % token.contents.split()[0] return ChangeCurrencyNode(current_price, new_currency)
ValueError
dataset/ETHPy150Open hassanch/django-currencies/currencies/templatetags/currency.py/change_currency
5,677
def can_user_vote(self, user): """ Test whether the passed user is allowed to vote on this review """ if not user.is_authenticated(): return False, _(u"Only signed in users can vote") vote = self.votes.model(review=self, user=user, delta=1) try: vote.full_clean() except __HOLE__ as e: return False, u"%s" % e return True, ""
ValidationError
dataset/ETHPy150Open django-oscar/django-oscar/src/oscar/apps/catalogue/reviews/abstract_models.py/AbstractProductReview.can_user_vote
5,678
def switch(name, ip=None, netmask=None, gateway=None, dhcp=None, password=None, snmp=None): ''' Manage switches in a Dell Chassis. name The switch designation (e.g. switch-1, switch-2) ip The Static IP Address of the switch netmask The netmask for the static IP gateway The gateway for the static IP dhcp True: Enable DHCP False: Do not change DHCP setup (disabling DHCP is automatic when a static IP is set) password The access (root) password for the switch snmp The SNMP community string for the switch Example: .. code-block:: yaml my-dell-chassis: dellchassis.switch: - switch: switch-1 - ip: 192.168.1.1 - netmask: 255.255.255.0 - gateway: 192.168.1.254 - dhcp: True - password: secret - snmp: public ''' ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} current_nic = __salt__['chassis.cmd']('network_info', module=name) try: if current_nic.get('retcode', 0) != 0: ret['result'] = False ret['comment'] = current_nic['stdout'] return ret if ip or netmask or gateway: if not ip: ip = current_nic['Network']['IP Address'] if not netmask: ip = current_nic['Network']['Subnet Mask'] if not gateway: ip = current_nic['Network']['Gateway'] if current_nic['Network']['DHCP Enabled'] == '0' and dhcp: ret['changes'].update({'DHCP': {'Old': {'DHCP Enabled': current_nic['Network']['DHCP Enabled']}, 'New': {'DHCP Enabled': dhcp}}}) if ((ip or netmask or gateway) and not dhcp and (ip != current_nic['Network']['IP Address'] or netmask != current_nic['Network']['Subnet Mask'] or gateway != current_nic['Network']['Gateway'])): ret['changes'].update({'IP': {'Old': current_nic['Network'], 'New': {'IP Address': ip, 'Subnet Mask': netmask, 'Gateway': gateway}}}) if password: if 'New' not in ret['changes']: ret['changes']['New'] = {} ret['changes']['New'].update({'Password': '*****'}) if snmp: if 'New' not in ret['changes']: ret['changes']['New'] = {} ret['changes']['New'].update({'SNMP': '*****'}) if ret['changes'] == {}: ret['comment'] = 'Switch ' + name + ' is already in desired state' return ret except __HOLE__: ret['changes'] = {} ret['comment'] = 'Something went wrong retrieving the switch details' return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Switch ' + name + ' configuration will change' return ret # Finally, set the necessary configurations on the chassis. dhcp_ret = net_ret = password_ret = snmp_ret = True if dhcp: dhcp_ret = __salt__['chassis.cmd']('set_niccfg', module=name, dhcp=dhcp) if ip or netmask or gateway: net_ret = __salt__['chassis.cmd']('set_niccfg', ip, netmask, gateway, module=name) if password: password_ret = __salt__['chassis.cmd']('deploy_password', 'root', password, module=name) if snmp: snmp_ret = __salt__['chassis.cmd']('deploy_snmp', snmp, module=name) if any([password_ret, snmp_ret, net_ret, dhcp_ret]) is False: ret['result'] = False ret['comment'] = 'There was an error setting the switch {0}.'.format(name) ret['comment'] = 'Dell chassis switch {0} was updated.'.format(name) return ret
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/states/dellchassis.py/switch
5,679
def update_ca_bundle( target=None, source=None, opts=None, merge_files=None, ): ''' Attempt to update the CA bundle file from a URL If not specified, the local location on disk (``target``) will be auto-detected, if possible. If it is not found, then a new location on disk will be created and updated. The default ``source`` is: http://curl.haxx.se/ca/cacert.pem This is based on the information at: http://curl.haxx.se/docs/caextract.html A string or list of strings representing files to be appended to the end of the CA bundle file may also be passed through as ``merge_files``. ''' if opts is None: opts = {} if target is None: target = get_ca_bundle(opts) if target is None: log.error('Unable to detect location to write CA bundle to') return if source is None: source = opts.get('ca_bundle_url', 'http://curl.haxx.se/ca/cacert.pem') log.debug('Attempting to download {0} to {1}'.format(source, target)) query( source, text=True, decode=False, headers=False, status=False, text_out=target ) if merge_files is not None: if isinstance(merge_files, six.string_types): merge_files = [merge_files] if not isinstance(merge_files, list): log.error('A value was passed as merge_files which was not either ' 'a string or a list') return merge_content = '' for cert_file in merge_files: if os.path.exists(cert_file): log.debug( 'Queueing up {0} to be appended to {1}'.format( cert_file, target ) ) try: with salt.utils.fopen(cert_file, 'r') as fcf: merge_content = '\n'.join((merge_content, fcf.read())) except __HOLE__ as exc: log.error( 'Reading from {0} caused the following error: {1}'.format( cert_file, exc ) ) if merge_content: log.debug('Appending merge_files to {0}'.format(target)) try: with salt.utils.fopen(target, 'a') as tfp: tfp.write('\n') tfp.write(merge_content) except IOError as exc: log.error( 'Writing to {0} caused the following error: {1}'.format( target, exc ) )
IOError
dataset/ETHPy150Open saltstack/salt/salt/utils/http.py/update_ca_bundle
5,680
def _get_datastore(self, datastore_name=None): """ Returns the couch datastore instance and datastore name. This caches the datastore instance to avoid an explicit lookup to save on http request. The consequence is that if another process deletes the datastore in the meantime, we will fail later. """ ds_name = self._get_datastore_name(datastore_name) if ds_name in self._datastore_cache: return self._datastore_cache[ds_name], ds_name try: if not self.datastore_exists(datastore_name): raise NotFound("Datastore '%s' does not exist" % ds_name) ds = self.server[ds_name] # Note: causes http lookup self._datastore_cache[ds_name] = ds return ds, ds_name except __HOLE__: raise BadRequest("Datastore name '%s' invalid" % ds_name)
ValueError
dataset/ETHPy150Open ooici/pyon/pyon/datastore/couchbase/base_store.py/CouchbaseDataStore._get_datastore
5,681
def _cache_morlist_process(self, instance): """ Empties the self.morlist_raw by popping items and running asynchronously the _cache_morlist_process_atomic operation that will get the available metrics for this MOR and put it in self.morlist """ i_key = self._instance_key(instance) if i_key not in self.morlist: self.morlist[i_key] = {} batch_size = self.init_config.get('batch_morlist_size', BATCH_MORLIST_SIZE) for i in xrange(batch_size): try: mor = self.morlist_raw[i_key].pop() self.pool.apply_async(self._cache_morlist_process_atomic, args=(instance, mor)) except (IndexError, __HOLE__): self.log.debug("No more work to process in morlist_raw") return
KeyError
dataset/ETHPy150Open serverdensity/sd-agent/checks.d/vsphere.py/VSphereCheck._cache_morlist_process
5,682
def get_i18n_js(self): """Generates a Javascript body for i18n in Javascript. If you want to load these javascript code from a static HTML file, you need to create another handler which just returns the code generated by this function. Returns: Actual javascript code for functions and translation messages for i18n. """ try: js_translations = gettext.translation( 'jsmessages', 'locales', fallback=False, languages=self.request.environ[ 'i18n_utils.preferred_languages'], codeset='utf-8') except __HOLE__: template = self.jinja2_env.get_template('null_i18n_js.jinja2') return template.render() translations_dict = convert_translations_to_dict(js_translations) template = self.jinja2_env.get_template('i18n_js.jinja2') return template.render( {'translations': json.dumps(translations_dict, indent=1)})
IOError
dataset/ETHPy150Open GoogleCloudPlatform/python-docs-samples/appengine/i18n/i18n_utils.py/BaseHandler.get_i18n_js
5,683
def escapeChar(match): c=match.group(0) try: replacement = CharReplacements[c] return replacement except __HOLE__: d = ord(c) if d < 32: return '\\u%04x' % d else: return c
KeyError
dataset/ETHPy150Open blinktrade/bitex/libs/jsonrpc/json.py/escapeChar
5,684
def loads(s): stack = [] chars = iter(s) value = None currCharIsNext=False try: while(1): skip = False if not currCharIsNext: c = chars.next() while(c in [' ', '\t', '\r','\n']): c = chars.next() currCharIsNext=False if c=='"': value = '' try: c=chars.next() while c != '"': if c == '\\': c=chars.next() try: value+=EscapeCharToChar[c] except KeyError: if c == 'u': hexCode = chars.next() + chars.next() + chars.next() + chars.next() value += unichr(int(hexCode,16)) else: raise JSONDecodeException("Bad Escape Sequence Found") else: value+=c c=chars.next() except StopIteration: raise JSONDecodeException("Expected end of String") elif c == '{': stack.append({}) skip=True elif c =='}': value = stack.pop() elif c == '[': stack.append([]) skip=True elif c == ']': value = stack.pop() elif c in [',',':']: skip=True elif c in Digits or c == '-': digits=[c] c = chars.next() numConv = int try: while c in Digits: digits.append(c) c = chars.next() if c == ".": numConv=float digits.append(c) c = chars.next() while c in Digits: digits.append(c) c = chars.next() if c.upper() == 'E': digits.append(c) c = chars.next() if c in ['+','-']: digits.append(c) c = chars.next() while c in Digits: digits.append(c) c = chars.next() else: raise JSONDecodeException("Expected + or -") except StopIteration: pass value = numConv("".join(digits)) currCharIsNext=True elif c in ['t','f','n']: kw = c+ chars.next() + chars.next() + chars.next() if kw == 'null': value = None elif kw == 'true': value = True elif kw == 'fals' and chars.next() == 'e': value = False else: raise JSONDecodeException('Expected Null, False or True') else: raise JSONDecodeException('Expected []{}," or Number, Null, False or True') if not skip: if len(stack): top = stack[-1] if type(top) is ListType: top.append(value) elif type(top) is DictionaryType: stack.append(value) elif type(top) in StringTypes: key = stack.pop() stack[-1][key] = value else: raise JSONDecodeException("Expected dictionary key, or start of a value") else: return value except __HOLE__: raise JSONDecodeException("Unexpected end of JSON source")
StopIteration
dataset/ETHPy150Open blinktrade/bitex/libs/jsonrpc/json.py/loads
5,685
def downloadBooks(self,titles=None,formats=None): #titles= list('C# tutorial', 'c++ Tutorial') ; format=tuple('pdf','mobi','epub','code') try: #download ebook if formats is None: formats=('pdf','mobi','epub','code') if titles is not None: tempBookData = [data for i,data in enumerate(self.bookData) if any(data['title']==title for title in titles) ] else: tempBookData=self.bookData nrOfBooksDownloaded=0 for i, book in enumerate(tempBookData): for format in formats: #print(format) if format in list(tempBookData[i]['downloadUrls'].keys()): if format == 'code': fileType='zip' else: fileType = format formattedTitle= ''.join(list(map(str.capitalize, tempBookData[i]['title'].split(' ')))) for ch in ['?',':','*','/']: if ch in formattedTitle: formattedTitle=formattedTitle.replace(ch,'_') fullFilePath=os.path.join(self.downloadFolderPath,formattedTitle+'.'+fileType) if(os.path.isfile(fullFilePath)): print(fullFilePath+" already exists") pass else: if format == 'code': print("downloading code for eBook: '"+tempBookData[i]['title']+ "'...") else: print("downloading eBook: '"+tempBookData[i]['title']+"' in '."+format+ "' format...") r = self.session.get(self.packtPubUrl+tempBookData[i]['downloadUrls'][format],timeout=100) if(r.status_code is 200): with open(fullFilePath,'wb') as f: f.write(r.content) if format == 'code': print("[SUCCESS] code for eBook: '"+tempBookData[i]['title']+"' downloaded succesfully!") else: print("[SUCCESS] eBook: '"+tempBookData[i]['title']+'.'+format+"' downloaded succesfully!") nrOfBooksDownloaded=i+1 else: raise requests.exceptions.RequestException("Cannot download "+tempBookData[i]['title']) print(str(nrOfBooksDownloaded)+" eBooks have been downloaded !") except requests.exceptions.RequestException as exception: print("[ERROR] - Exception occured during GET request%s "%exception ) except __HOLE__ as exception: print("[ERROR] - Exception occured durin opening file %s "%exception )
IOError
dataset/ETHPy150Open igbt6/Packt-Publishing-Free-Learning/packtFreeBookDownloader.py/MyPacktPublishingBooksDownloader.downloadBooks
5,686
def process(self, event): if self.field in event: data = event[self.field] try: fields = self._decode(data) self.logger.debug('syslog decoded: %s' % fields) event.update(fields) if self.consume: del event[self.field] return True except __HOLE__ as ex: self._error(event, ex) return False
ValueError
dataset/ETHPy150Open artirix/logcabin/logcabin/filters/syslog.py/Syslog.process
5,687
def _decode(self, data): m = self.re_syslog.match(data) if m: d = m.groupdict() prio = int(d.pop('prio')) try: d['facility'] = self.facilities[prio >> 3] except __HOLE__: d['facility'] = 'unknown' d['severity'] = self.severities[prio & 7] timestamp = d.pop('timestamp') d['timestamp'] = dateutil.parser.parse(timestamp) return d else: raise ValueError('invalid syslog')
IndexError
dataset/ETHPy150Open artirix/logcabin/logcabin/filters/syslog.py/Syslog._decode
5,688
def store_assertion(self, assertion, to_sign): self.assertion[assertion.id] = (assertion, to_sign) key = sha1(code(assertion.subject.name_id)).hexdigest() try: self.authn[key].append(assertion.authn_statement) except __HOLE__: self.authn[key] = [assertion.authn_statement]
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/sdb.py/SessionStorage.store_assertion
5,689
def get_authn_statements(self, name_id, session_index=None, requested_context=None): """ :param name_id: :param session_index: :param requested_context: :return: """ result = [] key = sha1(code(name_id)).hexdigest() try: statements = self.authn[key] except __HOLE__: logger.info("Unknown subject %s" % name_id) return [] for statement in statements: if session_index: if statement.session_index != session_index: continue if requested_context: if not context_match(requested_context, statement[0].authn_context): continue result.append(statement) return result
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/sdb.py/SessionStorage.get_authn_statements
5,690
def removeObserver(self, observer): """ Unregisters an observer with this publisher. @param observer: An L{ILogObserver} to remove. """ try: self._observers.remove(observer) except __HOLE__: pass
ValueError
dataset/ETHPy150Open twisted/twisted/twisted/logger/_observer.py/LogPublisher.removeObserver
5,691
@staticmethod def coerce(a, b): if isinstance(a, string_types) and isinstance(b, string_types): return sort_normalize_string(a), sort_normalize_string(b) if type(a) is type(b): return a, b if isinstance(a, Undefined) or isinstance(b, Undefined): if isinstance(a, Undefined): a = None if isinstance(b, Undefined): b = None return a, b if isinstance(a, (int, long, float)): try: return a, type(a)(b) except (__HOLE__, TypeError, OverflowError): pass if isinstance(b, (int, long, float)): try: return type(b)(a), b except (ValueError, TypeError, OverflowError): pass return a, b
ValueError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/_CmpHelper.coerce
5,692
def __lt__(self, other): a, b = self.coerce(self.value, other.value) try: if self.reverse: return b < a return a < b except __HOLE__: # Put None at the beginning if reversed, else at the end. if self.reverse: return a is not None return a is None
TypeError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/_CmpHelper.__lt__
5,693
def __eval__(self, record): try: return record[self.__field] except __HOLE__: return Undefined(obj=record, name=self.__field)
KeyError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/_RecordQueryField.__eval__
5,694
def __getitem__(self, name): try: return self.__getattr__(name) except __HOLE__: raise KeyError(name)
AttributeError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/_RecordQueryProxy.__getitem__
5,695
@cached_property def _siblings(self): parent = self.parent pagination_enabled = parent.datamodel.pagination_config.enabled # Don't track dependencies for this part. with Context(pad=self.pad): if pagination_enabled: pagination = parent.pagination siblings = list(pagination.config.get_pagination_query(parent)) else: siblings = list(parent.children) prev_item, next_item = None, None try: me = siblings.index(self) except __HOLE__: # Self not in parents.children or not in parents.pagination. pass else: if me > 0: prev_item = siblings[me - 1] if me + 1 < len(siblings): next_item = siblings[me + 1] return prev_item, next_item
ValueError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/Page._siblings
5,696
def load_raw_data(self, path, alt=PRIMARY_ALT, cls=None, fallback=True): """Internal helper that loads the raw record data. This performs very little data processing on the data. """ path = cleanup_path(path) if cls is None: cls = dict fn_base = self.to_fs_path(path) rv = cls() rv_type = None choiceiter = _iter_filename_choices(fn_base, [alt], self.config, fallback=fallback) for fs_path, source_alt, is_attachment in choiceiter: # If we already determined what our return value is but the # type mismatches what we try now, we have to abort. Eg: # a page can not become an attachment or the other way round. if rv_type is not None and rv_type != is_attachment: break try: with open(fs_path, 'rb') as f: if rv_type is None: rv_type = is_attachment for key, lines in metaformat.tokenize(f, encoding='utf-8'): if key not in rv: rv[key] = u''.join(lines) except __HOLE__ as e: if e.errno not in (errno.ENOTDIR, errno.ENOENT): raise if not is_attachment or not os.path.isfile(fs_path[:-3]): continue # Special case: we are loading an attachment but the meta # data file does not exist. In that case we still want to # record that we're loading an attachment. elif is_attachment: rv_type = True if '_source_alt' not in rv: rv['_source_alt'] = source_alt if rv_type is None: return rv['_path'] = path rv['_id'] = posixpath.basename(path) rv['_gid'] = hashlib.md5(path.encode('utf-8')).hexdigest() rv['_alt'] = alt if rv_type: rv['_attachment_for'] = posixpath.dirname(path) return rv
IOError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/Database.load_raw_data
5,697
def iter_items(self, path, alt=PRIMARY_ALT): """Iterates over all items below a path and yields them as tuples in the form ``(id, alt, is_attachment)``. """ fn_base = self.to_fs_path(path) if alt is None: alts = self.config.list_alternatives() single_alt = False else: alts = [alt] single_alt = True choiceiter = _iter_filename_choices(fn_base, alts, self.config) for fs_path, actual_alt, is_attachment in choiceiter: if not os.path.isfile(fs_path): continue # This path is actually for an attachment, which means that we # cannot have any items below it and will just abort with an # empty iterator. if is_attachment: break try: dir_path = os.path.dirname(fs_path) for filename in os.listdir(dir_path): if not isinstance(filename, text_type): try: filename = filename.decode(fs_enc) except UnicodeError: continue if filename.endswith('.lr') or \ self.env.is_uninteresting_source_name(filename): continue # We found an attachment. Attachments always live # below the primary alt, so we report it as such. if os.path.isfile(os.path.join(dir_path, filename)): yield filename, PRIMARY_ALT, True # We found a directory, let's make sure it contains a # contents.lr file (or a contents+alt.lr file). else: for content_alt in _iter_content_files( os.path.join(dir_path, filename), alts): yield filename, content_alt, False # If we want a single alt, we break here so # that we only produce a single result. # Otherwise this would also return the primary # fallback here. if single_alt: break except __HOLE__ as e: if e.errno != errno.ENOENT: raise continue # If we reach this point, we found our parent, so we can stop # searching for more at this point. break
IOError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/Database.iter_items
5,698
def persist(self, record): """Persists a record. This will put it into the persistent cache.""" cache_key = self._get_cache_key(record) self.persistent[cache_key] = record try: del self.ephemeral[cache_key] except __HOLE__: pass
KeyError
dataset/ETHPy150Open lektor/lektor/lektor/db.py/RecordCache.persist
5,699
def kill(self, sig): """Attempt to kill the process if it still seems to be running. """ if self._exit_status is not None: raise OSError(errno.ESRCH, os.strerror(errno.ESRCH)) try: result = os.kill(self.pid(), sig) except __HOLE__, e: # not much we can do at this point. print "WTF? Kill threw an error:", e
OSError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/runner/tsubprocess.py/Process.kill